aboutsummaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorBert Belder <bertbelder@gmail.com>2012-06-13 15:34:45 +0200
committerBert Belder <bertbelder@gmail.com>2012-06-14 01:37:13 +0200
commit50464cd4f49e40f4fe792ff46a81052319a222e9 (patch)
tree1fe524b2e6c0eb3c459142cd27539f88e1a3f63c /deps
parent09be360a0fee2c7619bae8c4248f9ed3d79d1b30 (diff)
downloadandroid-node-v8-50464cd4f49e40f4fe792ff46a81052319a222e9.tar.gz
android-node-v8-50464cd4f49e40f4fe792ff46a81052319a222e9.tar.bz2
android-node-v8-50464cd4f49e40f4fe792ff46a81052319a222e9.zip
v8: upgrade to v3.11.10
Diffstat (limited to 'deps')
-rw-r--r--deps/v8/AUTHORS1
-rw-r--r--deps/v8/ChangeLog311
-rw-r--r--deps/v8/DEPS27
-rw-r--r--deps/v8/Makefile69
-rw-r--r--deps/v8/SConstruct4
-rw-r--r--deps/v8/build/armu.gypi36
-rw-r--r--deps/v8/build/common.gypi268
-rwxr-xr-xdeps/v8/build/gyp_v82
-rw-r--r--deps/v8/build/mipsu.gypi33
-rw-r--r--deps/v8/build/standalone.gypi12
-rw-r--r--deps/v8/include/v8-profiler.h85
-rw-r--r--deps/v8/include/v8.h251
-rw-r--r--deps/v8/samples/lineprocessor.cc6
-rw-r--r--deps/v8/samples/samples.gyp8
-rw-r--r--deps/v8/samples/shell.cc25
-rwxr-xr-xdeps/v8/src/SConscript1
-rw-r--r--deps/v8/src/allocation-inl.h4
-rw-r--r--deps/v8/src/allocation.h14
-rw-r--r--deps/v8/src/api.cc175
-rw-r--r--deps/v8/src/api.h11
-rw-r--r--deps/v8/src/apiutils.h9
-rw-r--r--deps/v8/src/arguments.h13
-rw-r--r--deps/v8/src/arm/builtins-arm.cc9
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc146
-rw-r--r--deps/v8/src/arm/codegen-arm.cc4
-rw-r--r--deps/v8/src/arm/debug-arm.cc4
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc293
-rw-r--r--deps/v8/src/arm/ic-arm.cc61
-rw-r--r--deps/v8/src/arm/lithium-arm.cc210
-rw-r--r--deps/v8/src/arm/lithium-arm.h116
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.cc548
-rw-r--r--deps/v8/src/arm/lithium-codegen-arm.h41
-rw-r--r--deps/v8/src/arm/lithium-gap-resolver-arm.cc4
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.cc127
-rw-r--r--deps/v8/src/arm/macro-assembler-arm.h27
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.cc244
-rw-r--r--deps/v8/src/arm/regexp-macro-assembler-arm.h24
-rw-r--r--deps/v8/src/arm/simulator-arm.h12
-rw-r--r--deps/v8/src/arm/stub-cache-arm.cc342
-rw-r--r--deps/v8/src/array.js166
-rw-r--r--deps/v8/src/assembler.cc95
-rw-r--r--deps/v8/src/assembler.h7
-rw-r--r--deps/v8/src/ast.cc269
-rw-r--r--deps/v8/src/ast.h81
-rw-r--r--deps/v8/src/bootstrapper.cc38
-rw-r--r--deps/v8/src/builtins.cc153
-rw-r--r--deps/v8/src/builtins.h1
-rw-r--r--deps/v8/src/bytecodes-irregexp.h35
-rw-r--r--deps/v8/src/code-stubs.cc51
-rw-r--r--deps/v8/src/code-stubs.h1
-rw-r--r--deps/v8/src/codegen.h6
-rw-r--r--deps/v8/src/compiler-intrinsics.h17
-rw-r--r--deps/v8/src/compiler.cc21
-rw-r--r--deps/v8/src/contexts.h22
-rw-r--r--deps/v8/src/conversions-inl.h4
-rw-r--r--deps/v8/src/d8.cc245
-rw-r--r--deps/v8/src/d8.h6
-rw-r--r--deps/v8/src/d8.js2
-rw-r--r--deps/v8/src/dateparser-inl.h3
-rw-r--r--deps/v8/src/debug-agent.cc38
-rw-r--r--deps/v8/src/debug-debugger.js57
-rw-r--r--deps/v8/src/debug.cc86
-rw-r--r--deps/v8/src/debug.h55
-rw-r--r--deps/v8/src/deoptimizer.cc68
-rw-r--r--deps/v8/src/deoptimizer.h24
-rw-r--r--deps/v8/src/double.h6
-rw-r--r--deps/v8/src/elements-kind.cc134
-rw-r--r--deps/v8/src/elements-kind.h221
-rw-r--r--deps/v8/src/elements.cc681
-rw-r--r--deps/v8/src/elements.h65
-rw-r--r--deps/v8/src/extensions/externalize-string-extension.cc7
-rw-r--r--deps/v8/src/extensions/gc-extension.cc5
-rw-r--r--deps/v8/src/factory.cc80
-rw-r--r--deps/v8/src/factory.h30
-rw-r--r--deps/v8/src/flag-definitions.h16
-rw-r--r--deps/v8/src/frames.cc54
-rw-r--r--deps/v8/src/frames.h12
-rw-r--r--deps/v8/src/full-codegen.cc141
-rw-r--r--deps/v8/src/full-codegen.h54
-rw-r--r--deps/v8/src/func-name-inferrer.cc15
-rw-r--r--deps/v8/src/func-name-inferrer.h10
-rw-r--r--deps/v8/src/handles.cc6
-rw-r--r--deps/v8/src/hashmap.h102
-rw-r--r--deps/v8/src/heap-inl.h27
-rw-r--r--deps/v8/src/heap-profiler.cc50
-rw-r--r--deps/v8/src/heap-profiler.h13
-rw-r--r--deps/v8/src/heap.cc325
-rw-r--r--deps/v8/src/heap.h59
-rw-r--r--deps/v8/src/hydrogen-instructions.cc249
-rw-r--r--deps/v8/src/hydrogen-instructions.h466
-rw-r--r--deps/v8/src/hydrogen.cc2015
-rw-r--r--deps/v8/src/hydrogen.h147
-rw-r--r--deps/v8/src/ia32/assembler-ia32.h3
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc20
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc150
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc22
-rw-r--r--deps/v8/src/ia32/debug-ia32.cc39
-rw-r--r--deps/v8/src/ia32/deoptimizer-ia32.cc98
-rw-r--r--deps/v8/src/ia32/frames-ia32.h6
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc307
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc202
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc582
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.h33
-rw-r--r--deps/v8/src/ia32/lithium-gap-resolver-ia32.cc4
-rw-r--r--deps/v8/src/ia32/lithium-ia32.cc141
-rw-r--r--deps/v8/src/ia32/lithium-ia32.h106
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc174
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h26
-rw-r--r--deps/v8/src/ia32/regexp-macro-assembler-ia32.cc237
-rw-r--r--deps/v8/src/ia32/regexp-macro-assembler-ia32.h24
-rw-r--r--deps/v8/src/ia32/simulator-ia32.h8
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc536
-rw-r--r--deps/v8/src/ic.cc251
-rw-r--r--deps/v8/src/ic.h28
-rw-r--r--deps/v8/src/incremental-marking-inl.h26
-rw-r--r--deps/v8/src/incremental-marking.cc56
-rw-r--r--deps/v8/src/incremental-marking.h15
-rw-r--r--deps/v8/src/interface.cc43
-rw-r--r--deps/v8/src/interface.h65
-rw-r--r--deps/v8/src/interpreter-irregexp.cc87
-rw-r--r--deps/v8/src/isolate.cc29
-rw-r--r--deps/v8/src/isolate.h49
-rw-r--r--deps/v8/src/json-parser.h16
-rw-r--r--deps/v8/src/jsregexp.cc2271
-rw-r--r--deps/v8/src/jsregexp.h590
-rw-r--r--deps/v8/src/lazy-instance.h10
-rw-r--r--deps/v8/src/list-inl.h73
-rw-r--r--deps/v8/src/list.h66
-rw-r--r--deps/v8/src/lithium-allocator.cc89
-rw-r--r--deps/v8/src/lithium-allocator.h1
-rw-r--r--deps/v8/src/lithium.cc49
-rw-r--r--deps/v8/src/lithium.h86
-rw-r--r--deps/v8/src/liveedit-debugger.js5
-rw-r--r--deps/v8/src/liveedit.cc105
-rw-r--r--deps/v8/src/liveedit.h4
-rw-r--r--deps/v8/src/log.cc15
-rw-r--r--deps/v8/src/log.h2
-rw-r--r--deps/v8/src/macros.py10
-rw-r--r--deps/v8/src/mark-compact-inl.h28
-rw-r--r--deps/v8/src/mark-compact.cc340
-rw-r--r--deps/v8/src/mark-compact.h66
-rw-r--r--deps/v8/src/math.js1
-rw-r--r--deps/v8/src/messages.js230
-rw-r--r--deps/v8/src/mips/builtins-mips.cc9
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc99
-rw-r--r--deps/v8/src/mips/codegen-mips.cc4
-rw-r--r--deps/v8/src/mips/constants-mips.h5
-rw-r--r--deps/v8/src/mips/debug-mips.cc4
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc297
-rw-r--r--deps/v8/src/mips/ic-mips.cc68
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc391
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.h30
-rw-r--r--deps/v8/src/mips/lithium-gap-resolver-mips.cc4
-rw-r--r--deps/v8/src/mips/lithium-mips.cc147
-rw-r--r--deps/v8/src/mips/lithium-mips.h96
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc101
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h18
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.cc275
-rw-r--r--deps/v8/src/mips/regexp-macro-assembler-mips.h22
-rw-r--r--deps/v8/src/mips/simulator-mips.cc9
-rw-r--r--deps/v8/src/mips/simulator-mips.h10
-rw-r--r--deps/v8/src/mips/stub-cache-mips.cc389
-rw-r--r--deps/v8/src/mirror-debugger.js88
-rw-r--r--deps/v8/src/mksnapshot.cc6
-rw-r--r--deps/v8/src/objects-debug.cc102
-rw-r--r--deps/v8/src/objects-inl.h600
-rw-r--r--deps/v8/src/objects-printer.cc48
-rw-r--r--deps/v8/src/objects-visiting-inl.h4
-rw-r--r--deps/v8/src/objects-visiting.cc1
-rw-r--r--deps/v8/src/objects-visiting.h17
-rw-r--r--deps/v8/src/objects.cc1776
-rw-r--r--deps/v8/src/objects.h549
-rw-r--r--deps/v8/src/parser.cc371
-rw-r--r--deps/v8/src/parser.h29
-rw-r--r--deps/v8/src/platform-cygwin.cc46
-rw-r--r--deps/v8/src/platform-freebsd.cc47
-rw-r--r--deps/v8/src/platform-linux.cc86
-rw-r--r--deps/v8/src/platform-macos.cc38
-rw-r--r--deps/v8/src/platform-nullos.cc5
-rw-r--r--deps/v8/src/platform-openbsd.cc38
-rw-r--r--deps/v8/src/platform-posix.cc65
-rw-r--r--deps/v8/src/platform-posix.h5
-rw-r--r--deps/v8/src/platform-solaris.cc46
-rw-r--r--deps/v8/src/platform-win32.cc128
-rw-r--r--deps/v8/src/platform.h6
-rw-r--r--deps/v8/src/preparser.cc10
-rw-r--r--deps/v8/src/preparser.h15
-rw-r--r--deps/v8/src/profile-generator-inl.h29
-rw-r--r--deps/v8/src/profile-generator.cc1976
-rw-r--r--deps/v8/src/profile-generator.h371
-rw-r--r--deps/v8/src/property-details.h5
-rw-r--r--deps/v8/src/property.cc7
-rw-r--r--deps/v8/src/property.h17
-rw-r--r--deps/v8/src/regexp-macro-assembler-irregexp-inl.h10
-rw-r--r--deps/v8/src/regexp-macro-assembler-irregexp.cc50
-rw-r--r--deps/v8/src/regexp-macro-assembler-irregexp.h13
-rw-r--r--deps/v8/src/regexp-macro-assembler-tracer.cc132
-rw-r--r--deps/v8/src/regexp-macro-assembler-tracer.h10
-rw-r--r--deps/v8/src/regexp-macro-assembler.cc15
-rw-r--r--deps/v8/src/regexp-macro-assembler.h43
-rw-r--r--deps/v8/src/regexp.js28
-rw-r--r--deps/v8/src/rewriter.cc4
-rw-r--r--deps/v8/src/runtime-profiler.cc66
-rw-r--r--deps/v8/src/runtime-profiler.h10
-rw-r--r--deps/v8/src/runtime.cc1183
-rw-r--r--deps/v8/src/runtime.h13
-rw-r--r--deps/v8/src/safepoint-table.cc19
-rw-r--r--deps/v8/src/safepoint-table.h19
-rwxr-xr-xdeps/v8/src/scanner.cc18
-rw-r--r--deps/v8/src/scanner.h9
-rw-r--r--deps/v8/src/scopeinfo.cc16
-rw-r--r--deps/v8/src/scopes.cc196
-rw-r--r--deps/v8/src/scopes.h53
-rw-r--r--deps/v8/src/serialize.cc2
-rw-r--r--deps/v8/src/small-pointer-list.h32
-rw-r--r--deps/v8/src/spaces-inl.h20
-rw-r--r--deps/v8/src/spaces.cc60
-rw-r--r--deps/v8/src/spaces.h21
-rw-r--r--deps/v8/src/splay-tree-inl.h15
-rw-r--r--deps/v8/src/splay-tree.h23
-rw-r--r--deps/v8/src/string-stream.cc6
-rw-r--r--deps/v8/src/string.js151
-rw-r--r--deps/v8/src/stub-cache.cc61
-rw-r--r--deps/v8/src/stub-cache.h44
-rw-r--r--deps/v8/src/type-info.cc19
-rw-r--r--deps/v8/src/type-info.h6
-rw-r--r--deps/v8/src/utils.cc15
-rw-r--r--deps/v8/src/utils.h26
-rw-r--r--deps/v8/src/v8.cc16
-rw-r--r--deps/v8/src/v8.h1
-rw-r--r--deps/v8/src/v8globals.h4
-rw-r--r--deps/v8/src/v8natives.js4
-rw-r--r--deps/v8/src/version.cc6
-rw-r--r--deps/v8/src/x64/assembler-x64.h3
-rw-r--r--deps/v8/src/x64/builtins-x64.cc9
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc105
-rw-r--r--deps/v8/src/x64/codegen-x64.cc4
-rw-r--r--deps/v8/src/x64/debug-x64.cc15
-rw-r--r--deps/v8/src/x64/deoptimizer-x64.cc42
-rw-r--r--deps/v8/src/x64/disasm-x64.cc8
-rw-r--r--deps/v8/src/x64/full-codegen-x64.cc442
-rw-r--r--deps/v8/src/x64/ic-x64.cc36
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.cc489
-rw-r--r--deps/v8/src/x64/lithium-codegen-x64.h33
-rw-r--r--deps/v8/src/x64/lithium-gap-resolver-x64.cc4
-rw-r--r--deps/v8/src/x64/lithium-x64.cc139
-rw-r--r--deps/v8/src/x64/lithium-x64.h102
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc108
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.h11
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.cc249
-rw-r--r--deps/v8/src/x64/regexp-macro-assembler-x64.h38
-rw-r--r--deps/v8/src/x64/simulator-x64.h8
-rw-r--r--deps/v8/src/x64/stub-cache-x64.cc326
-rw-r--r--deps/v8/src/zone-inl.h21
-rw-r--r--deps/v8/src/zone.h71
-rw-r--r--deps/v8/test/cctest/cctest.status15
-rw-r--r--deps/v8/test/cctest/test-accessors.cc9
-rw-r--r--deps/v8/test/cctest/test-alloc.cc36
-rw-r--r--deps/v8/test/cctest/test-api.cc522
-rw-r--r--deps/v8/test/cctest/test-dataflow.cc2
-rw-r--r--deps/v8/test/cctest/test-debug.cc5
-rw-r--r--deps/v8/test/cctest/test-decls.cc16
-rw-r--r--deps/v8/test/cctest/test-disasm-arm.cc8
-rw-r--r--deps/v8/test/cctest/test-disasm-x64.cc1
-rw-r--r--deps/v8/test/cctest/test-double.cc15
-rw-r--r--deps/v8/test/cctest/test-func-name-inference.cc38
-rw-r--r--deps/v8/test/cctest/test-heap-profiler.cc562
-rw-r--r--deps/v8/test/cctest/test-heap.cc243
-rw-r--r--deps/v8/test/cctest/test-list.cc14
-rw-r--r--deps/v8/test/cctest/test-liveedit.cc3
-rw-r--r--deps/v8/test/cctest/test-mark-compact.cc10
-rwxr-xr-xdeps/v8/test/cctest/test-parsing.cc5
-rw-r--r--deps/v8/test/cctest/test-regexp.cc253
-rw-r--r--deps/v8/test/cctest/test-spaces.cc8
-rw-r--r--deps/v8/test/cctest/test-strings.cc118
-rw-r--r--deps/v8/test/cctest/test-thread-termination.cc4
-rw-r--r--deps/v8/test/cctest/test-weakmaps.cc80
-rw-r--r--deps/v8/test/message/message.status2
-rw-r--r--deps/v8/test/mjsunit/accessor-map-sharing.js176
-rw-r--r--deps/v8/test/mjsunit/array-bounds-check-removal.js145
-rw-r--r--deps/v8/test/mjsunit/array-construct-transition.js6
-rw-r--r--deps/v8/test/mjsunit/array-literal-transitions.js20
-rw-r--r--deps/v8/test/mjsunit/compiler/alloc-object-huge.js2
-rw-r--r--deps/v8/test/mjsunit/compiler/inline-arguments.js67
-rw-r--r--deps/v8/test/mjsunit/compiler/inline-construct.js6
-rw-r--r--deps/v8/test/mjsunit/compiler/literals.js24
-rw-r--r--deps/v8/test/mjsunit/compiler/optimize-bitnot.js42
-rw-r--r--deps/v8/test/mjsunit/date-parse.js3
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js17
-rw-r--r--deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js17
-rw-r--r--deps/v8/test/mjsunit/debug-function-scopes.js162
-rw-r--r--deps/v8/test/mjsunit/debug-liveedit-stack-padding.js88
-rw-r--r--deps/v8/test/mjsunit/debug-scripts-request.js6
-rw-r--r--deps/v8/test/mjsunit/debug-stepin-builtin-callback.js157
-rw-r--r--deps/v8/test/mjsunit/declare-locally.js6
-rw-r--r--deps/v8/test/mjsunit/elements-kind.js12
-rw-r--r--deps/v8/test/mjsunit/elements-transition-hoisting.js46
-rw-r--r--deps/v8/test/mjsunit/elements-transition.js10
-rw-r--r--deps/v8/test/mjsunit/error-constructors.js101
-rw-r--r--deps/v8/test/mjsunit/external-array.js80
-rw-r--r--deps/v8/test/mjsunit/fast-array-length.js37
-rw-r--r--deps/v8/test/mjsunit/fast-non-keyed.js113
-rw-r--r--deps/v8/test/mjsunit/fast-prototype.js113
-rw-r--r--deps/v8/test/mjsunit/harmony/debug-function-scopes.js115
-rw-r--r--deps/v8/test/mjsunit/harmony/module-linking.js121
-rw-r--r--deps/v8/test/mjsunit/harmony/module-parsing.js10
-rw-r--r--deps/v8/test/mjsunit/harmony/module-resolution.js2
-rw-r--r--deps/v8/test/mjsunit/harmony/proxies.js48
-rw-r--r--deps/v8/test/mjsunit/math-floor-of-div.js216
-rw-r--r--deps/v8/test/mjsunit/mjsunit.js2
-rw-r--r--deps/v8/test/mjsunit/mjsunit.status14
-rw-r--r--deps/v8/test/mjsunit/override-read-only-property.js10
-rw-r--r--deps/v8/test/mjsunit/packed-elements.js112
-rw-r--r--deps/v8/test/mjsunit/readonly.js228
-rw-r--r--deps/v8/test/mjsunit/regexp-capture-3.js191
-rwxr-xr-xdeps/v8/test/mjsunit/regexp-capture.js2
-rw-r--r--deps/v8/test/mjsunit/regexp-global.js141
-rw-r--r--deps/v8/test/mjsunit/regexp.js11
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1119.js12
-rw-r--r--deps/v8/test/mjsunit/regress/regress-115452.js19
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1170.js64
-rw-r--r--deps/v8/test/mjsunit/regress/regress-117409.js2
-rw-r--r--deps/v8/test/mjsunit/regress/regress-119609.js71
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1199637.js4
-rw-r--r--deps/v8/test/mjsunit/regress/regress-120099.js40
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1217.js2
-rw-r--r--deps/v8/test/mjsunit/regress/regress-123512.js78
-rw-r--r--deps/v8/test/mjsunit/regress/regress-123919.js47
-rw-r--r--deps/v8/test/mjsunit/regress/regress-126412.js33
-rw-r--r--deps/v8/test/mjsunit/regress/regress-128146.js33
-rw-r--r--deps/v8/test/mjsunit/regress/regress-131923.js30
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1639-2.js5
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1639.js22
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1849.js6
-rw-r--r--deps/v8/test/mjsunit/regress/regress-1878.js4
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2030.js53
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2032.js64
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2034.js46
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2054.js34
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2055.js48
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2058.js37
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2110.js53
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2153.js32
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2163.js70
-rw-r--r--deps/v8/test/mjsunit/regress/regress-2170.js58
-rw-r--r--deps/v8/test/mjsunit/regress/regress-334.js2
-rw-r--r--deps/v8/test/mjsunit/regress/regress-crbug-122271.js8
-rw-r--r--deps/v8/test/mjsunit/regress/regress-deep-proto.js45
-rw-r--r--deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js62
-rw-r--r--deps/v8/test/mjsunit/regress/regress-iteration-order.js42
-rw-r--r--deps/v8/test/mjsunit/regress/regress-smi-only-concat.js4
-rw-r--r--deps/v8/test/mjsunit/regress/regress-transcendental.js49
-rw-r--r--deps/v8/test/mjsunit/stack-traces.js14
-rw-r--r--deps/v8/test/mjsunit/try-finally-continue.js72
-rw-r--r--deps/v8/test/mjsunit/unbox-double-arrays.js7
-rw-r--r--deps/v8/test/mjsunit/unicodelctest-no-optimization.js4914
-rw-r--r--deps/v8/test/mjsunit/unicodelctest.js4912
-rw-r--r--deps/v8/test/mjsunit/with-readonly.js6
-rw-r--r--deps/v8/test/mozilla/mozilla.status28
-rw-r--r--deps/v8/test/mozilla/testcfg.py1
-rw-r--r--deps/v8/test/sputnik/sputnik.status40
-rw-r--r--deps/v8/test/test262/README4
-rw-r--r--deps/v8/test/test262/test262.status23
-rw-r--r--deps/v8/test/test262/testcfg.py51
-rw-r--r--deps/v8/tools/fuzz-harness.sh92
-rwxr-xr-xdeps/v8/tools/grokdump.py728
-rw-r--r--deps/v8/tools/gyp/v8.gyp63
-rw-r--r--deps/v8/tools/js2c.py6
-rw-r--r--deps/v8/tools/jsmin.py4
-rwxr-xr-xdeps/v8/tools/presubmit.py5
-rwxr-xr-xdeps/v8/tools/push-to-trunk.sh9
-rwxr-xr-xdeps/v8/tools/test-wrapper-gypbuild.py17
-rwxr-xr-xdeps/v8/tools/test.py9
373 files changed, 36026 insertions, 11559 deletions
diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS
index dfefad129f..6e46b3d621 100644
--- a/deps/v8/AUTHORS
+++ b/deps/v8/AUTHORS
@@ -23,6 +23,7 @@ Daniel James <dnljms@gmail.com>
Dineel D Sule <dsule@codeaurora.org>
Erich Ocean <erich.ocean@me.com>
Fedor Indutny <fedor@indutny.com>
+Filipe David Manana <fdmanana@gmail.com>
Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com>
Jan de Mooij <jandemooij@gmail.com>
Jay Freeman <saurik@saurik.com>
diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog
index 2240ec0e68..fae15e58ee 100644
--- a/deps/v8/ChangeLog
+++ b/deps/v8/ChangeLog
@@ -1,3 +1,314 @@
+2012-06-13: Version 3.11.10
+
+ Implemented heap profiler memory usage reporting.
+
+ Preserved error message during finally block in try..finally.
+ (Chromium issue 129171)
+
+ Fixed EnsureCanContainElements to properly handle double values.
+ (issue 2170)
+
+ Improved heuristics to keep objects in fast mode with inherited
+ constructors.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-06-06: Version 3.11.9
+
+ Implemented ES5-conformant semantics for inherited setters and read-only
+ properties. Currently behind --es5_readonly flag, because it breaks
+ WebKit bindings.
+
+ Exposed last seen heap object id via v8 public api.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-31: Version 3.11.8
+
+ Avoid overdeep recursion in regexp where a guarded expression with a
+ minimum repetition count is inside another quantifier.
+ (Chromium issue 129926)
+
+ Fixed missing write barrier in store field stub.
+ (issues 2143, 1465, Chromium issue 129355)
+
+ Proxies: Fixed receiver for setters inherited from proxies.
+ Proxies: Fixed ToStringArray function so that it does not reject some
+ keys.
+ (issue 1543)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-29: Version 3.11.7
+
+ Get better function names in stack traces.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-24: Version 3.11.6
+
+ Fixed RegExp.prototype.toString for incompatible receivers
+ (issue 1981).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-23: Version 3.11.5
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-22: Version 3.11.4
+
+ Some cleanup to common.gypi. This fixes some host/target combinations
+ that weren't working in the Make build on Mac.
+
+ Handle EINTR in socket functions and continue incomplete sends.
+ (issue 2098)
+
+ Fixed python deprecations. (issue 1391)
+
+ Made socket send and receive more robust and return 0 on failure.
+ (Chromium issue 15719)
+
+ Fixed GCC 4.7 (C++11) compilation. (issue 2136)
+
+ Set '-m32' option for host and target platforms
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-18: Version 3.11.3
+
+ Disable optimization for functions that have scopes that cannot be
+ reconstructed from the context chain. (issue 2071)
+
+ Define V8_EXPORT to nothing for clients of v8. (Chromium issue 90078)
+
+ Correctly check for native error objects. (Chromium issue 2138)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-16: Version 3.11.2
+
+ Revert r11496. (Chromium issue 128146)
+
+ Implement map collection for incremental marking. (issue 1465)
+
+ Add toString method to CallSite (which describes a frame of the
+ stack trace).
+
+
+2012-05-15: Version 3.11.1
+
+ Added a readbuffer function to d8 that reads a file into an ArrayBuffer.
+
+ Fix freebsd build. (V8 issue 2126)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-11: Version 3.11.0
+
+ Fixed compose-discard crasher from r11524 (issue 2123).
+
+ Activated new global semantics by default. Global variables can
+ now shadow properties of the global object (ES5.1 erratum).
+
+ Properly set ElementsKind of empty FAST_DOUBLE_ELEMENTS arrays when
+ transitioning (Chromium issue 117409).
+
+ Made Error.prototype.name writable again, as required by the spec and
+ the web (Chromium issue 69187).
+
+ Implemented map collection with incremental marking (issue 1465).
+
+ Regexp: Fixed overflow in min-match-length calculation
+ (Chromium issue 126412).
+
+ MIPS: Fixed illegal instruction use on Loongson in code for
+ Math.random() (issue 2115).
+
+ Fixed crash bug in VisitChoice (Chromium issue 126272).
+
+ Fixed unsigned-Smi check in MappedArgumentsLookup
+ (Chromium issue 126414).
+
+ Fixed LiveEdit for function with no locals (issue 825).
+
+ Fixed register clobbering in LoadIC for interceptors
+ (Chromium issue 125988).
+
+ Implemented clearing of CompareICs (issue 2102).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-05-03: Version 3.10.8
+
+ Enabled MIPS cross-compilation.
+
+ Ensured reload of elements pointer in StoreFastDoubleElement stub.
+ (Chromium issue 125515)
+
+ Fixed corner cases in truncation behavior when storing to
+ TypedArrays. (issue 2110)
+
+ Fixed failure to properly recognize and report out-of-memory
+ conditions when allocating code space pages. (Chromium issue
+ 118625)
+
+ Fixed idle notifications to perform a round of incremental GCs
+ after context disposal. (issue 2107)
+
+ Fixed preparser for try statement. (issue 2109)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-30: Version 3.10.7
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-26: Version 3.10.6
+
+ Fixed some bugs in accessing details of the last regexp match.
+
+ Fixed source property of empty RegExp objects. (issue 1982)
+
+ Enabled inlining some V8 API functions.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-23: Version 3.10.5
+
+ Put new global var semantics behind a flag until WebKit tests are
+ cleaned up.
+
+ Enabled stepping into callback passed to builtins.
+ (Chromium issue 109564)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-19: Version 3.10.4
+
+ Fixed issues when stressing compaction with WeakMaps.
+
+ Fixed missing GVN flag for new-space promotion. (Chromium issue 123919)
+
+ Simplify invocation sequence at monomorphic function invocation sites.
+ (issue 2079)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-17: Version 3.10.3
+
+ Fixed several bugs in heap profiles (including issue 2078).
+
+ Throw syntax errors on illegal escape sequences.
+
+ Implemented rudimentary module linking (behind --harmony flag)
+
+ Implemented ES5 erratum: Global declarations should shadow
+ inherited properties.
+
+ Made handling of const more consistent when combined with 'eval'
+ and 'with'.
+
+ Fixed V8 on MinGW-x64 (issue 2026).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-13: Version 3.10.2
+
+ Fixed native ARM build (issues 1744, 539)
+
+ Return LOOKUP variable instead of CONTEXT for non-context allocated
+ outer scope parameters (Chromium issue 119609).
+
+ Fixed regular and ElementsKind transitions interfering with each other
+ (Chromium issue 122271).
+
+ Improved performance of keyed loads/stores which have a HeapNumber
+ index (issues 1388, 1295).
+
+ Fixed WeakMap processing for evacuation candidates (issue 2060).
+
+ Bailout on possible direct eval calls (Chromium issue 122681).
+
+ Do not assume that names of function expressions are context-allocated
+ (issue 2051).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-04-10: Version 3.10.1
+
+ Fixed bug with arguments object in inlined functions (issue 2045).
+
+ Fixed performance bug with lazy initialization (Chromium issue
+ 118686).
+
+ Added suppport for Mac OS X 64bit builds with GYP.
+ (Patch contributed by Filipe David Manana <fdmanana@gmail.com>)
+
+ Fixed bug with hidden properties (issue 2034).
+
+ Fixed a performance bug when reloading pages (Chromium issue 117767,
+ V8 issue 1902).
+
+ Fixed bug when optimizing throw in top-level code (issue 2054).
+
+ Fixed two bugs with array literals (issue 2055, Chromium issue 121407).
+
+ Fixed bug with Math.min/Math.max with NaN inputs (issue 2056).
+
+ Fixed a bug with the new runtime profiler (Chromium issue 121147).
+
+ Fixed compilation of V8 using uClibc.
+
+ Optimized boot-up memory use.
+
+ Optimized regular expressions.
+
+
+2012-03-30: Version 3.10.0
+
+ Fixed store IC writability check in strict mode
+ (Chromium issue 120099).
+
+ Resynchronize timers if the Windows system time was changed.
+ (Chromium issue 119815)
+
+ Removed "-mfloat-abi=hard" from host compiler cflags when building for
+ hardfp ARM
+ (https://code.google.com/p/chrome-os-partner/issues/detail?id=8539)
+
+ Fixed edge case for case independent regexp character classes
+ (issue 2032).
+
+ Reset function info counters after context disposal.
+ (Chromium issue 117767, V8 issue 1902)
+
+ Fixed missing write barrier in CopyObjectToObjectElements.
+ (Chromium issue 119926)
+
+ Fixed missing bounds check in HasElementImpl.
+ (Chromium issue 119925)
+
+ Performance and stability improvements on all platforms.
+
+
2012-03-23: Version 3.9.24
Activated count-based profiler for ARM.
diff --git a/deps/v8/DEPS b/deps/v8/DEPS
new file mode 100644
index 0000000000..e50d1d20f6
--- /dev/null
+++ b/deps/v8/DEPS
@@ -0,0 +1,27 @@
+# Note: The buildbots evaluate this file with CWD set to the parent
+# directory and assume that the root of the checkout is in ./v8/, so
+# all paths in here must match this assumption.
+
+deps = {
+ # Remember to keep the revision in sync with the Makefile.
+ "v8/build/gyp":
+ "http://gyp.googlecode.com/svn/trunk@1282",
+}
+
+deps_os = {
+ "win": {
+ "v8/third_party/cygwin":
+ "http://src.chromium.org/svn/trunk/deps/third_party/cygwin@66844",
+
+ "v8/third_party/python_26":
+ "http://src.chromium.org/svn/trunk/tools/third_party/python_26@89111",
+ }
+}
+
+hooks = [
+ {
+ # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+ "pattern": ".",
+ "action": ["python", "v8/build/gyp_v8"],
+ },
+]
diff --git a/deps/v8/Makefile b/deps/v8/Makefile
index 2f86c512e4..0d825c0795 100644
--- a/deps/v8/Makefile
+++ b/deps/v8/Makefile
@@ -150,21 +150,21 @@ $(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES))
$(ARCHES): $(addprefix $$@.,$(MODES))
# Defines how to build a particular target (e.g. ia32.release).
-$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@)
- @$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \
+$(BUILDS): $(OUTDIR)/Makefile.$$(basename $$@)
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile.$(basename $@) \
CXX="$(CXX)" LINK="$(LINK)" \
BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \
python -c "print raw_input().capitalize()") \
builddir="$(shell pwd)/$(OUTDIR)/$@"
-native: $(OUTDIR)/Makefile-native
- @$(MAKE) -C "$(OUTDIR)" -f Makefile-native \
+native: $(OUTDIR)/Makefile.native
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile.native \
CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \
builddir="$(shell pwd)/$(OUTDIR)/$@"
# TODO(jkummerow): add "android.debug" when we need it.
-android android.release: $(OUTDIR)/Makefile-android
- @$(MAKE) -C "$(OUTDIR)" -f Makefile-android \
+android android.release: $(OUTDIR)/Makefile.android
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile.android \
CXX="$(ANDROID_TOOL_PREFIX)-g++" \
AR="$(ANDROID_TOOL_PREFIX)-ar" \
RANLIB="$(ANDROID_TOOL_PREFIX)-ranlib" \
@@ -197,55 +197,41 @@ native.check: native
--arch-and-mode=. $(TESTFLAGS)
# Clean targets. You can clean each architecture individually, or everything.
-$(addsuffix .clean,$(ARCHES)):
- rm -f $(OUTDIR)/Makefile-$(basename $@)
+$(addsuffix .clean,$(ARCHES)) android.clean:
+ rm -f $(OUTDIR)/Makefile.$(basename $@)
rm -rf $(OUTDIR)/$(basename $@).release
rm -rf $(OUTDIR)/$(basename $@).debug
- find $(OUTDIR) -regex '.*\(host\|target\)-$(basename $@)\.mk' -delete
+ find $(OUTDIR) -regex '.*\(host\|target\).$(basename $@)\.mk' -delete
native.clean:
- rm -f $(OUTDIR)/Makefile-native
+ rm -f $(OUTDIR)/Makefile.native
rm -rf $(OUTDIR)/native
- find $(OUTDIR) -regex '.*\(host\|target\)-native\.mk' -delete
+ find $(OUTDIR) -regex '.*\(host\|target\).native\.mk' -delete
-android.clean:
- rm -f $(OUTDIR)/Makefile-android
- rm -rf $(OUTDIR)/android.release
- find $(OUTDIR) -regex '.*\(host\|target\)-android\.mk' -delete
-
-clean: $(addsuffix .clean,$(ARCHES)) native.clean
+clean: $(addsuffix .clean,$(ARCHES)) native.clean android.clean
# GYP file generation targets.
-$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE)
- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \
- -S-ia32 $(GYPFLAGS)
-
-$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE)
- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \
- -S-x64 $(GYPFLAGS)
-
-$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) build/armu.gypi
- build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \
- -S-arm $(GYPFLAGS)
-
-$(OUTDIR)/Makefile-mips: $(GYPFILES) $(ENVFILE) build/mipsu.gypi
+MAKEFILES = $(addprefix $(OUTDIR)/Makefile.,$(ARCHES))
+$(MAKEFILES): $(GYPFILES) $(ENVFILE)
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -Ibuild/mipsu.gypi \
- -S-mips $(GYPFLAGS)
+ -Ibuild/standalone.gypi --depth=. \
+ -Dv8_target_arch=$(subst .,,$(suffix $@)) \
+ -S.$(subst .,,$(suffix $@)) $(GYPFLAGS)
-$(OUTDIR)/Makefile-native: $(GYPFILES) $(ENVFILE)
+$(OUTDIR)/Makefile.native: $(GYPFILES) $(ENVFILE)
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
- -Ibuild/standalone.gypi --depth=. -S-native $(GYPFLAGS)
+ -Ibuild/standalone.gypi --depth=. -S.native $(GYPFLAGS)
-$(OUTDIR)/Makefile-android: $(GYPFILES) $(ENVFILE) build/android.gypi \
+$(OUTDIR)/Makefile.android: $(GYPFILES) $(ENVFILE) build/android.gypi \
must-set-ANDROID_NDK_ROOT
+ GYP_GENERATORS=make \
CC="${ANDROID_TOOL_PREFIX}-gcc" \
+ CXX="${ANDROID_TOOL_PREFIX}-g++" \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \
- -S-android $(GYPFLAGS)
+ -S.android $(GYPFLAGS)
must-set-ANDROID_NDK_ROOT:
ifndef ANDROID_NDK_ROOT
@@ -261,9 +247,10 @@ $(ENVFILE): $(ENVFILE).new
# Stores current GYPFLAGS in a file.
$(ENVFILE).new:
- @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new;
+ @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new; \
+ echo "CXX=$(CXX)" >> $(ENVFILE).new
# Dependencies.
dependencies:
svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \
- --revision 1026
+ --revision 1282
diff --git a/deps/v8/SConstruct b/deps/v8/SConstruct
index b0d1344700..ebce7ff892 100644
--- a/deps/v8/SConstruct
+++ b/deps/v8/SConstruct
@@ -101,14 +101,14 @@ LIBRARY_FLAGS = {
'os:linux': {
'CCFLAGS': ['-ansi'] + GCC_EXTRA_CCFLAGS,
'library:shared': {
- 'CPPDEFINES': ['V8_SHARED'],
+ 'CPPDEFINES': ['V8_SHARED', 'BUILDING_V8_SHARED'],
'LIBS': ['pthread']
}
},
'os:macos': {
'CCFLAGS': ['-ansi', '-mmacosx-version-min=10.4'],
'library:shared': {
- 'CPPDEFINES': ['V8_SHARED']
+ 'CPPDEFINES': ['V8_SHARED', 'BUILDING_V8_SHARED'],
}
},
'os:freebsd': {
diff --git a/deps/v8/build/armu.gypi b/deps/v8/build/armu.gypi
deleted file mode 100644
index d15b8ab705..0000000000
--- a/deps/v8/build/armu.gypi
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-{
- 'variables': {
- 'target_arch': 'ia32',
- 'v8_target_arch': 'arm',
- 'armv7': 1,
- 'arm_neon': 0,
- 'arm_fpu': 'vfpv3',
- },
-}
diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi
index 5fa109b4d1..7f084b8c1d 100644
--- a/deps/v8/build/common.gypi
+++ b/deps/v8/build/common.gypi
@@ -110,133 +110,117 @@
['v8_enable_gdbjit==1', {
'defines': ['ENABLE_GDB_JIT_INTERFACE',],
}],
- ['OS!="mac"', {
- # TODO(mark): The OS!="mac" conditional is temporary. It can be
- # removed once the Mac Chromium build stops setting target_arch to
- # ia32 and instead sets it to mac. Other checks in this file for
- # OS=="mac" can be removed at that time as well. This can be cleaned
- # up once http://crbug.com/44205 is fixed.
+ ['v8_target_arch=="arm"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_ARM',
+ ],
'conditions': [
- ['v8_target_arch=="arm"', {
+ [ 'v8_can_use_unaligned_accesses=="true"', {
'defines': [
- 'V8_TARGET_ARCH_ARM',
+ 'CAN_USE_UNALIGNED_ACCESSES=1',
],
- 'conditions': [
- [ 'v8_can_use_unaligned_accesses=="true"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=1',
- ],
- }],
- [ 'v8_can_use_unaligned_accesses=="false"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=0',
- ],
- }],
- [ 'v8_can_use_vfp_instructions=="true"', {
- 'defines': [
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- }],
- [ 'v8_use_arm_eabi_hardfloat=="true"', {
- 'defines': [
- 'USE_EABI_HARDFLOAT=1',
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- 'cflags': [
- '-mfloat-abi=hard',
- ],
- }, {
- 'defines': [
- 'USE_EABI_HARDFLOAT=0',
- ],
- }],
- # The ARM assembler assumes the host is 32 bits,
- # so force building 32-bit host tools.
- ['host_arch=="x64" or OS=="android"', {
- 'target_conditions': [
- ['_toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
- }],
- ],
- }],
+ }],
+ [ 'v8_can_use_unaligned_accesses=="false"', {
+ 'defines': [
+ 'CAN_USE_UNALIGNED_ACCESSES=0',
],
}],
- ['v8_target_arch=="ia32"', {
+ [ 'v8_can_use_vfp_instructions=="true"', {
'defines': [
- 'V8_TARGET_ARCH_IA32',
+ 'CAN_USE_VFP_INSTRUCTIONS',
],
}],
- ['v8_target_arch=="mips"', {
+ [ 'v8_use_arm_eabi_hardfloat=="true"', {
'defines': [
- 'V8_TARGET_ARCH_MIPS',
+ 'USE_EABI_HARDFLOAT=1',
+ 'CAN_USE_VFP_INSTRUCTIONS',
],
- 'conditions': [
- [ 'target_arch=="mips"', {
- 'target_conditions': [
- ['_toolset=="target"', {
- 'cflags': ['-EL'],
- 'ldflags': ['-EL'],
- 'conditions': [
- [ 'v8_use_mips_abi_hardfloat=="true"', {
- 'cflags': ['-mhard-float'],
- 'ldflags': ['-mhard-float'],
- }, {
- 'cflags': ['-msoft-float'],
- 'ldflags': ['-msoft-float'],
- }],
- ['mips_arch_variant=="mips32r2"', {
- 'cflags': ['-mips32r2', '-Wa,-mips32r2'],
- }],
- ['mips_arch_variant=="loongson"', {
- 'cflags': ['-mips3', '-Wa,-mips3'],
- }, {
- 'cflags': ['-mips32', '-Wa,-mips32'],
- }],
- ],
- }],
- ],
- }],
- [ 'v8_can_use_fpu_instructions=="true"', {
- 'defines': [
- 'CAN_USE_FPU_INSTRUCTIONS',
- ],
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': ['-mfloat-abi=hard',],
}],
- [ 'v8_use_mips_abi_hardfloat=="true"', {
- 'defines': [
- '__mips_hard_float=1',
- 'CAN_USE_FPU_INSTRUCTIONS',
- ],
- }, {
- 'defines': [
- '__mips_soft_float=1'
- ],
- }],
- ['mips_arch_variant=="mips32r2"', {
- 'defines': ['_MIPS_ARCH_MIPS32R2',],
- }],
- ['mips_arch_variant=="loongson"', {
- 'defines': ['_MIPS_ARCH_LOONGSON',],
- }],
- # The MIPS assembler assumes the host is 32 bits,
- # so force building 32-bit host tools.
- ['host_arch=="x64"', {
- 'target_conditions': [
- ['_toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
+ ],
+ }, {
+ 'defines': [
+ 'USE_EABI_HARDFLOAT=0',
+ ],
+ }],
+ ],
+ }], # v8_target_arch=="arm"
+ ['v8_target_arch=="ia32"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_IA32',
+ ],
+ }], # v8_target_arch=="ia32"
+ ['v8_target_arch=="mips"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_MIPS',
+ ],
+ 'variables': {
+ 'mipscompiler': '<!($(echo ${CXX:-$(which g++)}) -v 2>&1 | grep -q "^Target: mips-" && echo "yes" || echo "no")',
+ },
+ 'conditions': [
+ ['mipscompiler=="yes"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': ['-EL'],
+ 'ldflags': ['-EL'],
+ 'conditions': [
+ [ 'v8_use_mips_abi_hardfloat=="true"', {
+ 'cflags': ['-mhard-float'],
+ 'ldflags': ['-mhard-float'],
+ }, {
+ 'cflags': ['-msoft-float'],
+ 'ldflags': ['-msoft-float'],
+ }],
+ ['mips_arch_variant=="mips32r2"', {
+ 'cflags': ['-mips32r2', '-Wa,-mips32r2'],
+ }],
+ ['mips_arch_variant=="loongson"', {
+ 'cflags': ['-mips3', '-Wa,-mips3'],
+ }, {
+ 'cflags': ['-mips32', '-Wa,-mips32'],
}],
],
}],
],
}],
- ['v8_target_arch=="x64"', {
+ [ 'v8_can_use_fpu_instructions=="true"', {
'defines': [
- 'V8_TARGET_ARCH_X64',
+ 'CAN_USE_FPU_INSTRUCTIONS',
],
}],
+ [ 'v8_use_mips_abi_hardfloat=="true"', {
+ 'defines': [
+ '__mips_hard_float=1',
+ 'CAN_USE_FPU_INSTRUCTIONS',
+ ],
+ }, {
+ 'defines': [
+ '__mips_soft_float=1'
+ ],
+ }],
+ ['mips_arch_variant=="mips32r2"', {
+ 'defines': ['_MIPS_ARCH_MIPS32R2',],
+ }],
+ ['mips_arch_variant=="loongson"', {
+ 'defines': ['_MIPS_ARCH_LOONGSON',],
+ }],
],
- }],
+ }], # v8_target_arch=="mips"
+ ['v8_target_arch=="x64"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_X64',
+ ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'x86_64' ],
+ },
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'StackReserveSize': '2097152',
+ },
+ },
+ }], # v8_target_arch=="x64"
['v8_use_liveobjectlist=="true"', {
'defines': [
'ENABLE_DEBUGGER_SUPPORT',
@@ -254,6 +238,11 @@
'defines': [
'WIN32',
],
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
+ 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+ 'CharacterSet': '1',
+ },
}],
['OS=="win" and v8_enable_prof==1', {
'msvs_settings': {
@@ -262,20 +251,9 @@
},
},
}],
- ['OS=="win" and v8_target_arch=="x64"', {
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'StackReserveSize': '2097152',
- },
- },
- }],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
or OS=="netbsd"', {
'conditions': [
- [ 'target_arch=="ia32"', {
- 'cflags': [ '-m32' ],
- 'ldflags': [ '-m32' ],
- }],
[ 'v8_no_strict_aliasing==1', {
'cflags': [ '-fno-strict-aliasing' ],
}],
@@ -284,6 +262,41 @@
['OS=="solaris"', {
'defines': [ '__C99FEATURES__=1' ], # isinf() etc.
}],
+ ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
+ or OS=="netbsd" or OS=="mac" or OS=="android") and \
+ (v8_target_arch=="arm" or v8_target_arch=="ia32" or \
+ v8_target_arch=="mips")', {
+ # Check whether the host compiler and target compiler support the
+ # '-m32' option and set it if so.
+ 'target_conditions': [
+ ['_toolset=="host"', {
+ 'variables': {
+ 'm32flag': '<!((echo | $(echo ${CXX_host:-$(which g++)}) -m32 -E - > /dev/null 2>&1) && echo -n "-m32" || true)',
+ },
+ 'cflags': [ '<(m32flag)' ],
+ 'ldflags': [ '<(m32flag)' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386' ],
+ },
+ }],
+ ['_toolset=="target"', {
+ 'variables': {
+ 'm32flag': '<!((echo | $(echo ${CXX_target:-${CXX:-$(which g++)}}) -m32 -E - > /dev/null 2>&1) && echo -n "-m32" || true)',
+ },
+ 'cflags': [ '<(m32flag)' ],
+ 'ldflags': [ '<(m32flag)' ],
+ 'xcode_settings': {
+ 'ARCHS': [ 'i386' ],
+ },
+ }],
+ ],
+ }],
+ ['OS=="freebsd" or OS=="openbsd"', {
+ 'cflags': [ '-I/usr/local/include' ],
+ }],
+ ['OS=="netbsd"', {
+ 'cflags': [ '-I/usr/pkg/include' ],
+ }],
], # conditions
'configurations': {
'Debug': {
@@ -310,14 +323,8 @@
},
},
'conditions': [
- ['OS=="freebsd" or OS=="openbsd"', {
- 'cflags': [ '-I/usr/local/include' ],
- }],
- ['OS=="netbsd"', {
- 'cflags': [ '-I/usr/pkg/include' ],
- }],
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', {
- 'cflags': [ '-Wno-unused-parameter',
+ 'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor', '-Woverloaded-virtual' ],
}],
],
@@ -345,12 +352,6 @@
}],
],
}],
- ['OS=="freebsd" or OS=="openbsd"', {
- 'cflags': [ '-I/usr/local/include' ],
- }],
- ['OS=="netbsd"', {
- 'cflags': [ '-I/usr/pkg/include' ],
- }],
['OS=="mac"', {
'xcode_settings': {
'GCC_OPTIMIZATION_LEVEL': '3', # -O3
@@ -363,11 +364,6 @@
},
}], # OS=="mac"
['OS=="win"', {
- 'msvs_configuration_attributes': {
- 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
- 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
- 'CharacterSet': '1',
- },
'msvs_settings': {
'VCCLCompilerTool': {
'Optimization': '2',
diff --git a/deps/v8/build/gyp_v8 b/deps/v8/build/gyp_v8
index 6d5c126844..345f777d79 100755
--- a/deps/v8/build/gyp_v8
+++ b/deps/v8/build/gyp_v8
@@ -1,6 +1,6 @@
#!/usr/bin/python
#
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
diff --git a/deps/v8/build/mipsu.gypi b/deps/v8/build/mipsu.gypi
deleted file mode 100644
index 637ff841e4..0000000000
--- a/deps/v8/build/mipsu.gypi
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2012 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-{
- 'variables': {
- 'target_arch': 'ia32',
- 'v8_target_arch': 'mips',
- },
-}
diff --git a/deps/v8/build/standalone.gypi b/deps/v8/build/standalone.gypi
index b5707800f8..ebdf557230 100644
--- a/deps/v8/build/standalone.gypi
+++ b/deps/v8/build/standalone.gypi
@@ -37,8 +37,9 @@
'variables': {
'variables': {
'conditions': [
- ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', {
- # This handles the Linux platforms we generally deal with.
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or \
+ OS=="netbsd" or OS=="mac"', {
+ # This handles the Unix platforms we generally deal with.
# Anything else gets passed through, which probably won't work
# very well; such hosts should pass an explicit target_arch
# to gyp.
@@ -46,7 +47,8 @@
'<!(uname -m | sed -e "s/i.86/ia32/;\
s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/mips.*/mips/")',
}, {
- # OS!="linux" and OS!="freebsd" and OS!="openbsd" and OS!="netbsd"
+ # OS!="linux" and OS!="freebsd" and OS!="openbsd" and
+ # OS!="netbsd" and OS!="mac"
'host_arch%': 'ia32',
}],
],
@@ -71,6 +73,10 @@
'want_separate_host_toolset': 0,
}],
],
+ # Default ARM variable settings.
+ 'armv7%': 1,
+ 'arm_neon%': 0,
+ 'arm_fpu%': 'vfpv3',
},
'target_defaults': {
'default_configuration': 'Debug',
diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h
index 2499bbf050..cda2463362 100644
--- a/deps/v8/include/v8-profiler.h
+++ b/deps/v8/include/v8-profiler.h
@@ -64,6 +64,7 @@
*/
namespace v8 {
+typedef uint32_t SnapshotObjectId;
/**
* CpuProfileNode represents a node in a call graph.
@@ -274,7 +275,7 @@ class V8EXPORT HeapGraphNode {
* Returns node id. For the same heap object, the id remains the same
* across all snapshots.
*/
- uint64_t GetId() const;
+ SnapshotObjectId GetId() const;
/** Returns node's own size, in bytes. */
int GetSelfSize() const;
@@ -338,7 +339,7 @@ class V8EXPORT HeapSnapshot {
const HeapGraphNode* GetRoot() const;
/** Returns a node by its id. */
- const HeapGraphNode* GetNodeById(uint64_t id) const;
+ const HeapGraphNode* GetNodeById(SnapshotObjectId id) const;
/** Returns total nodes count in the snapshot. */
int GetNodesCount() const;
@@ -346,6 +347,9 @@ class V8EXPORT HeapSnapshot {
/** Returns a node by index. */
const HeapGraphNode* GetNode(int index) const;
+ /** Returns a max seen JS object Id. */
+ SnapshotObjectId GetMaxSnapshotJSObjectId() const;
+
/**
* Deletes the snapshot and removes it from HeapProfiler's list.
* All pointers to nodes, edges and paths previously returned become
@@ -364,16 +368,20 @@ class V8EXPORT HeapSnapshot {
* with the following structure:
*
* {
- * snapshot: {title: "...", uid: nnn},
- * nodes: [
- * meta-info (JSON string),
- * nodes themselves
- * ],
- * strings: [strings]
+ * snapshot: {
+ * title: "...",
+ * uid: nnn,
+ * meta: { meta-info },
+ * node_count: nnn,
+ * edge_count: nnn
+ * },
+ * nodes: [nodes array],
+ * edges: [edges array],
+ * strings: [strings array]
* }
*
- * Outgoing node links are stored after each node. Nodes reference strings
- * and other nodes by their indexes in corresponding arrays.
+ * Nodes reference strings, other nodes, and edges by their indexes
+ * in corresponding arrays.
*/
void Serialize(OutputStream* stream, SerializationFormat format) const;
};
@@ -405,6 +413,19 @@ class V8EXPORT HeapProfiler {
static const HeapSnapshot* FindSnapshot(unsigned uid);
/**
+ * Returns SnapshotObjectId for a heap object referenced by |value| if
+ * it has been seen by the heap profiler, kUnknownObjectId otherwise.
+ */
+ static SnapshotObjectId GetSnapshotObjectId(Handle<Value> value);
+
+ /**
+ * A constant for invalid SnapshotObjectId. GetSnapshotObjectId will return
+ * it in case heap profiler cannot find id for the object passed as
+ * parameter. HeapSnapshot::GetNodeById will always return NULL for such id.
+ */
+ static const SnapshotObjectId kUnknownObjectId = 0;
+
+ /**
* Takes a heap snapshot and returns it. Title may be an empty string.
* See HeapSnapshot::Type for types description.
*/
@@ -414,6 +435,34 @@ class V8EXPORT HeapProfiler {
ActivityControl* control = NULL);
/**
+ * Starts tracking of heap objects population statistics. After calling
+ * this method, all heap objects relocations done by the garbage collector
+ * are being registered.
+ */
+ static void StartHeapObjectsTracking();
+
+ /**
+ * Adds a new time interval entry to the aggregated statistics array. The
+ * time interval entry contains information on the current heap objects
+ * population size. The method also updates aggregated statistics and
+ * reports updates for all previous time intervals via the OutputStream
+ * object. Updates on each time interval are provided as a stream of the
+ * HeapStatsUpdate structure instances.
+ * The return value of the function is the last seen heap object Id.
+ *
+ * StartHeapObjectsTracking must be called before the first call to this
+ * method.
+ */
+ static SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
+
+ /**
+ * Stops tracking of heap objects population statistics, cleans up all
+ * collected data. StartHeapObjectsTracking must be called again prior to
+ * calling PushHeapObjectsStats next time.
+ */
+ static void StopHeapObjectsTracking();
+
+ /**
* Deletes all snapshots taken. All previously returned pointers to
* snapshots and their contents become invalid after this call.
*/
@@ -433,6 +482,9 @@ class V8EXPORT HeapProfiler {
/** Returns the number of currently existing persistent handles. */
static int GetPersistentHandleCount();
+
+ /** Returns memory used for profiler internal data and snapshots. */
+ static size_t GetMemorySizeUsedByProfiler();
};
@@ -510,6 +562,19 @@ class V8EXPORT RetainedObjectInfo { // NOLINT
};
+/**
+ * A struct for exporting HeapStats data from V8, using "push" model.
+ * See HeapProfiler::PushHeapObjectsStats.
+ */
+struct HeapStatsUpdate {
+ HeapStatsUpdate(uint32_t index, uint32_t count, uint32_t size)
+ : index(index), count(count), size(size) { }
+ uint32_t index; // Index of the time interval that was changed.
+ uint32_t count; // New value of count field for the interval with this index.
+ uint32_t size; // New value of size field for the interval with this index.
+};
+
+
} // namespace v8
diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h
index 33179f5bf0..77ffb385ab 100644
--- a/deps/v8/include/v8.h
+++ b/deps/v8/include/v8.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -62,11 +62,13 @@
#else // _WIN32
-// Setup for Linux shared library export. There is no need to distinguish
-// between building or using the V8 shared library, but we should not
-// export symbols when we are building a static library.
+// Setup for Linux shared library export.
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#ifdef BUILDING_V8_SHARED
#define V8EXPORT __attribute__ ((visibility("default")))
+#else
+#define V8EXPORT
+#endif
#else // defined(__GNUC__) && (__GNUC__ >= 4)
#define V8EXPORT
#endif // defined(__GNUC__) && (__GNUC__ >= 4)
@@ -98,6 +100,7 @@ class Function;
class Date;
class ImplementationUtilities;
class Signature;
+class AccessorSignature;
template <class T> class Handle;
template <class T> class Local;
template <class T> class Persistent;
@@ -107,6 +110,7 @@ class Data;
class AccessorInfo;
class StackTrace;
class StackFrame;
+class Isolate;
namespace internal {
@@ -862,13 +866,13 @@ class Value : public Data {
* Returns true if this value is the undefined value. See ECMA-262
* 4.3.10.
*/
- V8EXPORT bool IsUndefined() const;
+ inline bool IsUndefined() const;
/**
* Returns true if this value is the null value. See ECMA-262
* 4.3.11.
*/
- V8EXPORT bool IsNull() const;
+ inline bool IsNull() const;
/**
* Returns true if this value is true.
@@ -982,7 +986,11 @@ class Value : public Data {
V8EXPORT bool StrictEquals(Handle<Value> that) const;
private:
+ inline bool QuickIsUndefined() const;
+ inline bool QuickIsNull() const;
inline bool QuickIsString() const;
+ V8EXPORT bool FullIsUndefined() const;
+ V8EXPORT bool FullIsNull() const;
V8EXPORT bool FullIsString() const;
};
@@ -1079,6 +1087,7 @@ class String : public Primitive {
* A zero length string.
*/
V8EXPORT static v8::Local<v8::String> Empty();
+ inline static v8::Local<v8::String> Empty(Isolate* isolate);
/**
* Returns true if the string is external
@@ -1236,8 +1245,7 @@ class String : public Primitive {
* this function should not otherwise delete or modify the resource. Neither
* should the underlying buffer be deallocated or modified except through the
* destructor of the external string resource.
- */
- V8EXPORT static Local<String> NewExternal(
+ */ V8EXPORT static Local<String> NewExternal(
ExternalAsciiStringResource* resource);
/**
@@ -1968,10 +1976,13 @@ class Arguments {
inline Local<Object> Holder() const;
inline bool IsConstructCall() const;
inline Local<Value> Data() const;
+ inline Isolate* GetIsolate() const;
+
private:
- static const int kDataIndex = 0;
- static const int kCalleeIndex = -1;
- static const int kHolderIndex = -2;
+ static const int kIsolateIndex = 0;
+ static const int kDataIndex = -1;
+ static const int kCalleeIndex = -2;
+ static const int kHolderIndex = -3;
friend class ImplementationUtilities;
inline Arguments(internal::Object** implicit_args,
@@ -1993,9 +2004,11 @@ class V8EXPORT AccessorInfo {
public:
inline AccessorInfo(internal::Object** args)
: args_(args) { }
+ inline Isolate* GetIsolate() const;
inline Local<Value> Data() const;
inline Local<Object> This() const;
inline Local<Object> Holder() const;
+
private:
internal::Object** args_;
};
@@ -2277,7 +2290,8 @@ class V8EXPORT FunctionTemplate : public Template {
AccessorSetter setter,
Handle<Value> data,
AccessControl settings,
- PropertyAttribute attributes);
+ PropertyAttribute attributes,
+ Handle<AccessorSignature> signature);
void SetNamedInstancePropertyHandler(NamedPropertyGetter getter,
NamedPropertySetter setter,
NamedPropertyQuery query,
@@ -2335,13 +2349,20 @@ class V8EXPORT ObjectTemplate : public Template {
* cross-context access.
* \param attribute The attributes of the property for which an accessor
* is added.
+ * \param signature The signature describes valid receivers for the accessor
+ * and is used to perform implicit instance checks against them. If the
+ * receiver is incompatible (i.e. is not an instance of the constructor as
+ * defined by FunctionTemplate::HasInstance()), an implicit TypeError is
+ * thrown and no callback is invoked.
*/
void SetAccessor(Handle<String> name,
AccessorGetter getter,
AccessorSetter setter = 0,
Handle<Value> data = Handle<Value>(),
AccessControl settings = DEFAULT,
- PropertyAttribute attribute = None);
+ PropertyAttribute attribute = None,
+ Handle<AccessorSignature> signature =
+ Handle<AccessorSignature>());
/**
* Sets a named property handler on the object template.
@@ -2445,8 +2466,8 @@ class V8EXPORT ObjectTemplate : public Template {
/**
- * A Signature specifies which receivers and arguments a function can
- * legally be called with.
+ * A Signature specifies which receivers and arguments are valid
+ * parameters to a function.
*/
class V8EXPORT Signature : public Data {
public:
@@ -2460,6 +2481,19 @@ class V8EXPORT Signature : public Data {
/**
+ * An AccessorSignature specifies which receivers are valid parameters
+ * to an accessor callback.
+ */
+class V8EXPORT AccessorSignature : public Data {
+ public:
+ static Local<AccessorSignature> New(Handle<FunctionTemplate> receiver =
+ Handle<FunctionTemplate>());
+ private:
+ AccessorSignature();
+};
+
+
+/**
* A utility for determining the type of objects based on the template
* they were constructed from.
*/
@@ -2552,6 +2586,11 @@ Handle<Primitive> V8EXPORT Null();
Handle<Boolean> V8EXPORT True();
Handle<Boolean> V8EXPORT False();
+inline Handle<Primitive> Undefined(Isolate* isolate);
+inline Handle<Primitive> Null(Isolate* isolate);
+inline Handle<Boolean> True(Isolate* isolate);
+inline Handle<Boolean> False(Isolate* isolate);
+
/**
* A set of constraints that specifies the limits of the runtime's memory use.
@@ -2802,13 +2841,13 @@ class V8EXPORT Isolate {
/**
* Associate embedder-specific data with the isolate
*/
- void SetData(void* data);
+ inline void SetData(void* data);
/**
- * Retrive embedder-specific data from the isolate.
+ * Retrieve embedder-specific data from the isolate.
* Returns NULL if SetData has never been called.
*/
- void* GetData();
+ inline void* GetData();
private:
Isolate();
@@ -3153,7 +3192,8 @@ class V8EXPORT V8 {
* that is kept alive by JavaScript objects.
* \returns the adjusted value.
*/
- static int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
+ static intptr_t AdjustAmountOfExternalAllocatedMemory(
+ intptr_t change_in_bytes);
/**
* Suspends recording of tick samples in the profiler.
@@ -3736,6 +3776,12 @@ class V8EXPORT Locker {
/**
+ * A struct for exporting HeapStats data from V8, using "push" model.
+ */
+struct HeapStatsUpdate;
+
+
+/**
* An interface for exporting data from V8, using "push" model.
*/
class V8EXPORT OutputStream { // NOLINT
@@ -3760,6 +3806,14 @@ class V8EXPORT OutputStream { // NOLINT
* will not be called in case writing was aborted.
*/
virtual WriteResult WriteAsciiChunk(char* data, int size) = 0;
+ /**
+ * Writes the next chunk of heap stats data into the stream. Writing
+ * can be stopped by returning kAbort as function result. EndOfStream
+ * will not be called in case writing was aborted.
+ */
+ virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate* data, int count) {
+ return kAbort;
+ };
};
@@ -3848,18 +3902,6 @@ const uintptr_t kEncodablePointerMask =
PlatformSmiTagging::kEncodablePointerMask;
const int kPointerToSmiShift = PlatformSmiTagging::kPointerToSmiShift;
-template <size_t ptr_size> struct InternalConstants;
-
-// Internal constants for 32-bit systems.
-template <> struct InternalConstants<4> {
- static const int kStringResourceOffset = 3 * kApiPointerSize;
-};
-
-// Internal constants for 64-bit systems.
-template <> struct InternalConstants<8> {
- static const int kStringResourceOffset = 3 * kApiPointerSize;
-};
-
/**
* This class exports constants and functionality from within v8 that
* is necessary to implement inline functions in the v8 api. Don't
@@ -3871,18 +3913,31 @@ class Internals {
// the implementation of v8.
static const int kHeapObjectMapOffset = 0;
static const int kMapInstanceTypeOffset = 1 * kApiPointerSize + kApiIntSize;
- static const int kStringResourceOffset =
- InternalConstants<kApiPointerSize>::kStringResourceOffset;
+ static const int kStringResourceOffset = 3 * kApiPointerSize;
+ static const int kOddballKindOffset = 3 * kApiPointerSize;
static const int kForeignAddressOffset = kApiPointerSize;
static const int kJSObjectHeaderSize = 3 * kApiPointerSize;
static const int kFullStringRepresentationMask = 0x07;
static const int kExternalTwoByteRepresentationTag = 0x02;
+ static const int kIsolateStateOffset = 0;
+ static const int kIsolateEmbedderDataOffset = 1 * kApiPointerSize;
+ static const int kIsolateRootsOffset = 3 * kApiPointerSize;
+ static const int kUndefinedValueRootIndex = 5;
+ static const int kNullValueRootIndex = 7;
+ static const int kTrueValueRootIndex = 8;
+ static const int kFalseValueRootIndex = 9;
+ static const int kEmptySymbolRootIndex = 128;
+
static const int kJSObjectType = 0xaa;
static const int kFirstNonstringType = 0x80;
+ static const int kOddballType = 0x82;
static const int kForeignType = 0x85;
+ static const int kUndefinedOddballKind = 5;
+ static const int kNullOddballKind = 3;
+
static inline bool HasHeapObjectTag(internal::Object* value) {
return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) ==
kHeapObjectTag);
@@ -3902,6 +3957,11 @@ class Internals {
return ReadField<uint8_t>(map, kMapInstanceTypeOffset);
}
+ static inline int GetOddballKind(internal::Object* obj) {
+ typedef internal::Object O;
+ return SmiValue(ReadField<O*>(obj, kOddballKindOffset));
+ }
+
static inline void* GetExternalPointerFromSmi(internal::Object* value) {
const uintptr_t address = reinterpret_cast<uintptr_t>(value);
return reinterpret_cast<void*>(address >> kPointerToSmiShift);
@@ -3922,6 +3982,28 @@ class Internals {
return representation == kExternalTwoByteRepresentationTag;
}
+ static inline bool IsInitialized(v8::Isolate* isolate) {
+ uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) + kIsolateStateOffset;
+ return *reinterpret_cast<int*>(addr) == 1;
+ }
+
+ static inline void SetEmbedderData(v8::Isolate* isolate, void* data) {
+ uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) +
+ kIsolateEmbedderDataOffset;
+ *reinterpret_cast<void**>(addr) = data;
+ }
+
+ static inline void* GetEmbedderData(v8::Isolate* isolate) {
+ uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) +
+ kIsolateEmbedderDataOffset;
+ return *reinterpret_cast<void**>(addr);
+ }
+
+ static inline internal::Object** GetRoot(v8::Isolate* isolate, int index) {
+ uint8_t* addr = reinterpret_cast<uint8_t*>(isolate) + kIsolateRootsOffset;
+ return reinterpret_cast<internal::Object**>(addr + index * kApiPointerSize);
+ }
+
template <typename T>
static inline T ReadField(Object* ptr, int offset) {
uint8_t* addr = reinterpret_cast<uint8_t*>(ptr) + offset - kHeapObjectTag;
@@ -4048,6 +4130,11 @@ Local<Value> Arguments::Data() const {
}
+Isolate* Arguments::GetIsolate() const {
+ return *reinterpret_cast<Isolate**>(&implicit_args_[kIsolateIndex]);
+}
+
+
bool Arguments::IsConstructCall() const {
return is_construct_call_;
}
@@ -4160,6 +4247,15 @@ String* String::Cast(v8::Value* value) {
}
+Local<String> String::Empty(Isolate* isolate) {
+ typedef internal::Object* S;
+ typedef internal::Internals I;
+ if (!I::IsInitialized(isolate)) return Empty();
+ S* slot = I::GetRoot(isolate, I::kEmptySymbolRootIndex);
+ return Local<String>(reinterpret_cast<String*>(slot));
+}
+
+
String::ExternalStringResource* String::GetExternalStringResource() const {
typedef internal::Object O;
typedef internal::Internals I;
@@ -4178,6 +4274,42 @@ String::ExternalStringResource* String::GetExternalStringResource() const {
}
+bool Value::IsUndefined() const {
+#ifdef V8_ENABLE_CHECKS
+ return FullIsUndefined();
+#else
+ return QuickIsUndefined();
+#endif
+}
+
+bool Value::QuickIsUndefined() const {
+ typedef internal::Object O;
+ typedef internal::Internals I;
+ O* obj = *reinterpret_cast<O**>(const_cast<Value*>(this));
+ if (!I::HasHeapObjectTag(obj)) return false;
+ if (I::GetInstanceType(obj) != I::kOddballType) return false;
+ return (I::GetOddballKind(obj) == I::kUndefinedOddballKind);
+}
+
+
+bool Value::IsNull() const {
+#ifdef V8_ENABLE_CHECKS
+ return FullIsNull();
+#else
+ return QuickIsNull();
+#endif
+}
+
+bool Value::QuickIsNull() const {
+ typedef internal::Object O;
+ typedef internal::Internals I;
+ O* obj = *reinterpret_cast<O**>(const_cast<Value*>(this));
+ if (!I::HasHeapObjectTag(obj)) return false;
+ if (I::GetInstanceType(obj) != I::kOddballType) return false;
+ return (I::GetOddballKind(obj) == I::kNullOddballKind);
+}
+
+
bool Value::IsString() const {
#ifdef V8_ENABLE_CHECKS
return FullIsString();
@@ -4283,6 +4415,11 @@ External* External::Cast(v8::Value* value) {
}
+Isolate* AccessorInfo::GetIsolate() const {
+ return *reinterpret_cast<Isolate**>(&args_[-3]);
+}
+
+
Local<Value> AccessorInfo::Data() const {
return Local<Value>(reinterpret_cast<Value*>(&args_[-2]));
}
@@ -4298,6 +4435,54 @@ Local<Object> AccessorInfo::Holder() const {
}
+Handle<Primitive> Undefined(Isolate* isolate) {
+ typedef internal::Object* S;
+ typedef internal::Internals I;
+ if (!I::IsInitialized(isolate)) return Undefined();
+ S* slot = I::GetRoot(isolate, I::kUndefinedValueRootIndex);
+ return Handle<Primitive>(reinterpret_cast<Primitive*>(slot));
+}
+
+
+Handle<Primitive> Null(Isolate* isolate) {
+ typedef internal::Object* S;
+ typedef internal::Internals I;
+ if (!I::IsInitialized(isolate)) return Null();
+ S* slot = I::GetRoot(isolate, I::kNullValueRootIndex);
+ return Handle<Primitive>(reinterpret_cast<Primitive*>(slot));
+}
+
+
+Handle<Boolean> True(Isolate* isolate) {
+ typedef internal::Object* S;
+ typedef internal::Internals I;
+ if (!I::IsInitialized(isolate)) return True();
+ S* slot = I::GetRoot(isolate, I::kTrueValueRootIndex);
+ return Handle<Boolean>(reinterpret_cast<Boolean*>(slot));
+}
+
+
+Handle<Boolean> False(Isolate* isolate) {
+ typedef internal::Object* S;
+ typedef internal::Internals I;
+ if (!I::IsInitialized(isolate)) return False();
+ S* slot = I::GetRoot(isolate, I::kFalseValueRootIndex);
+ return Handle<Boolean>(reinterpret_cast<Boolean*>(slot));
+}
+
+
+void Isolate::SetData(void* data) {
+ typedef internal::Internals I;
+ I::SetEmbedderData(this, data);
+}
+
+
+void* Isolate::GetData() {
+ typedef internal::Internals I;
+ return I::GetEmbedderData(this);
+}
+
+
/**
* \example shell.cc
* A simple shell that takes a list of expressions on the
diff --git a/deps/v8/samples/lineprocessor.cc b/deps/v8/samples/lineprocessor.cc
index 1606a8f99c..7a84a2a0ff 100644
--- a/deps/v8/samples/lineprocessor.cc
+++ b/deps/v8/samples/lineprocessor.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -434,9 +434,9 @@ v8::Handle<v8::String> ReadLine() {
}
if (res == NULL) {
v8::Handle<v8::Primitive> t = v8::Undefined();
- return reinterpret_cast<v8::Handle<v8::String>&>(t);
+ return v8::Handle<v8::String>(v8::String::Cast(*t));
}
- // remove newline char
+ // Remove newline char
for (char* pos = buffer; *pos != '\0'; pos++) {
if (*pos == '\n') {
*pos = '\0';
diff --git a/deps/v8/samples/samples.gyp b/deps/v8/samples/samples.gyp
index 55b2a98acd..3c720a748a 100644
--- a/deps/v8/samples/samples.gyp
+++ b/deps/v8/samples/samples.gyp
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -48,6 +48,12 @@
'sources': [
'process.cc',
],
+ },
+ {
+ 'target_name': 'lineprocessor',
+ 'sources': [
+ 'lineprocessor.cc',
+ ],
}
],
}
diff --git a/deps/v8/samples/shell.cc b/deps/v8/samples/shell.cc
index b40eca2f7c..db0cc1a930 100644
--- a/deps/v8/samples/shell.cc
+++ b/deps/v8/samples/shell.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -67,17 +67,20 @@ static bool run_shell;
int main(int argc, char* argv[]) {
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
run_shell = (argc == 1);
- v8::HandleScope handle_scope;
- v8::Persistent<v8::Context> context = CreateShellContext();
- if (context.IsEmpty()) {
- printf("Error creating context\n");
- return 1;
+ int result;
+ {
+ v8::HandleScope handle_scope;
+ v8::Persistent<v8::Context> context = CreateShellContext();
+ if (context.IsEmpty()) {
+ printf("Error creating context\n");
+ return 1;
+ }
+ context->Enter();
+ result = RunMain(argc, argv);
+ if (run_shell) RunShell(context);
+ context->Exit();
+ context.Dispose();
}
- context->Enter();
- int result = RunMain(argc, argv);
- if (run_shell) RunShell(context);
- context->Exit();
- context.Dispose();
v8::V8::Dispose();
return result;
}
diff --git a/deps/v8/src/SConscript b/deps/v8/src/SConscript
index 0d0b5357d5..2482b379ac 100755
--- a/deps/v8/src/SConscript
+++ b/deps/v8/src/SConscript
@@ -68,6 +68,7 @@ SOURCES = {
diy-fp.cc
dtoa.cc
elements.cc
+ elements-kind.cc
execution.cc
factory.cc
flags.cc
diff --git a/deps/v8/src/allocation-inl.h b/deps/v8/src/allocation-inl.h
index 04a3fe667a..d32db4b17f 100644
--- a/deps/v8/src/allocation-inl.h
+++ b/deps/v8/src/allocation-inl.h
@@ -34,12 +34,12 @@ namespace v8 {
namespace internal {
-void* PreallocatedStorage::New(size_t size) {
+void* PreallocatedStorageAllocationPolicy::New(size_t size) {
return Isolate::Current()->PreallocatedStorageNew(size);
}
-void PreallocatedStorage::Delete(void* p) {
+void PreallocatedStorageAllocationPolicy::Delete(void* p) {
return Isolate::Current()->PreallocatedStorageDelete(p);
}
diff --git a/deps/v8/src/allocation.h b/deps/v8/src/allocation.h
index 31067dda81..45bde4c4cb 100644
--- a/deps/v8/src/allocation.h
+++ b/deps/v8/src/allocation.h
@@ -104,7 +104,7 @@ char* StrNDup(const char* str, int n);
// and free. Used as the default policy for lists.
class FreeStoreAllocationPolicy {
public:
- INLINE(static void* New(size_t size)) { return Malloced::New(size); }
+ INLINE(void* New(size_t size)) { return Malloced::New(size); }
INLINE(static void Delete(void* p)) { Malloced::Delete(p); }
};
@@ -117,12 +117,6 @@ class PreallocatedStorage {
explicit PreallocatedStorage(size_t size);
size_t size() { return size_; }
- // TODO(isolates): Get rid of these-- we'll have to change the allocator
- // interface to include a pointer to an isolate to do this
- // efficiently.
- static inline void* New(size_t size);
- static inline void Delete(void* p);
-
private:
size_t size_;
PreallocatedStorage* previous_;
@@ -137,6 +131,12 @@ class PreallocatedStorage {
};
+struct PreallocatedStorageAllocationPolicy {
+ INLINE(void* New(size_t size));
+ INLINE(static void Delete(void* ptr));
+};
+
+
} } // namespace v8::internal
#endif // V8_ALLOCATION_H_
diff --git a/deps/v8/src/api.cc b/deps/v8/src/api.cc
index 4e731fbec8..0d88047aa2 100644
--- a/deps/v8/src/api.cc
+++ b/deps/v8/src/api.cc
@@ -512,6 +512,16 @@ void RegisteredExtension::Register(RegisteredExtension* that) {
}
+void RegisteredExtension::UnregisterAll() {
+ RegisteredExtension* re = first_extension_;
+ while (re != NULL) {
+ RegisteredExtension* next = re->next();
+ delete re;
+ re = next;
+ }
+}
+
+
void RegisterExtension(Extension* that) {
RegisteredExtension* extension = new RegisteredExtension(that);
RegisteredExtension::Register(extension);
@@ -980,6 +990,12 @@ Local<Signature> Signature::New(Handle<FunctionTemplate> receiver,
}
+Local<AccessorSignature> AccessorSignature::New(
+ Handle<FunctionTemplate> receiver) {
+ return Utils::AccessorSignatureToLocal(Utils::OpenHandle(*receiver));
+}
+
+
Local<TypeSwitch> TypeSwitch::New(Handle<FunctionTemplate> type) {
Handle<FunctionTemplate> types[1] = { type };
return TypeSwitch::New(1, types);
@@ -1047,7 +1063,8 @@ static i::Handle<i::AccessorInfo> MakeAccessorInfo(
AccessorSetter setter,
v8::Handle<Value> data,
v8::AccessControl settings,
- v8::PropertyAttribute attributes) {
+ v8::PropertyAttribute attributes,
+ v8::Handle<AccessorSignature> signature) {
i::Handle<i::AccessorInfo> obj = FACTORY->NewAccessorInfo();
ASSERT(getter != NULL);
SET_FIELD_WRAPPED(obj, set_getter, getter);
@@ -1059,6 +1076,9 @@ static i::Handle<i::AccessorInfo> MakeAccessorInfo(
if (settings & ALL_CAN_WRITE) obj->set_all_can_write(true);
if (settings & PROHIBITS_OVERWRITING) obj->set_prohibits_overwriting(true);
obj->set_property_attributes(static_cast<PropertyAttributes>(attributes));
+ if (!signature.IsEmpty()) {
+ obj->set_expected_receiver_type(*Utils::OpenHandle(*signature));
+ }
return obj;
}
@@ -1069,7 +1089,8 @@ void FunctionTemplate::AddInstancePropertyAccessor(
AccessorSetter setter,
v8::Handle<Value> data,
v8::AccessControl settings,
- v8::PropertyAttribute attributes) {
+ v8::PropertyAttribute attributes,
+ v8::Handle<AccessorSignature> signature) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate,
"v8::FunctionTemplate::AddInstancePropertyAccessor()")) {
@@ -1078,9 +1099,9 @@ void FunctionTemplate::AddInstancePropertyAccessor(
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name,
- getter, setter, data,
- settings, attributes);
+ i::Handle<i::AccessorInfo> obj = MakeAccessorInfo(name, getter, setter, data,
+ settings, attributes,
+ signature);
i::Handle<i::Object> list(Utils::OpenHandle(this)->property_accessors());
if (list->IsUndefined()) {
list = NeanderArray().value();
@@ -1265,7 +1286,8 @@ void ObjectTemplate::SetAccessor(v8::Handle<String> name,
AccessorSetter setter,
v8::Handle<Value> data,
AccessControl settings,
- PropertyAttribute attribute) {
+ PropertyAttribute attribute,
+ v8::Handle<AccessorSignature> signature) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetAccessor()")) return;
ENTER_V8(isolate);
@@ -1279,7 +1301,8 @@ void ObjectTemplate::SetAccessor(v8::Handle<String> name,
setter,
data,
settings,
- attribute);
+ attribute,
+ signature);
}
@@ -2091,17 +2114,21 @@ bool StackFrame::IsConstructor() const {
// --- D a t a ---
-bool Value::IsUndefined() const {
+bool Value::FullIsUndefined() const {
if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsUndefined()")) {
return false;
}
- return Utils::OpenHandle(this)->IsUndefined();
+ bool result = Utils::OpenHandle(this)->IsUndefined();
+ ASSERT_EQ(result, QuickIsUndefined());
+ return result;
}
-bool Value::IsNull() const {
+bool Value::FullIsNull() const {
if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsNull()")) return false;
- return Utils::OpenHandle(this)->IsNull();
+ bool result = Utils::OpenHandle(this)->IsNull();
+ ASSERT_EQ(result, QuickIsNull());
+ return result;
}
@@ -2799,9 +2826,13 @@ bool v8::Object::ForceDelete(v8::Handle<Value> key) {
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
- // When turning on access checks for a global object deoptimize all functions
- // as optimized code does not always handle access checks.
- i::Deoptimizer::DeoptimizeGlobalObject(*self);
+ // When deleting a property on the global object using ForceDelete
+ // deoptimize all functions as optimized code does not check for the hole
+ // value with DontDelete properties. We have to deoptimize all contexts
+ // because of possible cross-context inlined functions.
+ if (self->IsJSGlobalProxy() || self->IsGlobalObject()) {
+ i::Deoptimizer::DeoptimizeAll();
+ }
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> obj = i::ForceDeleteProperty(self, key_obj);
@@ -3061,9 +3092,10 @@ bool Object::SetAccessor(Handle<String> name,
ON_BAILOUT(isolate, "v8::Object::SetAccessor()", return false);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
- i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name,
- getter, setter, data,
- settings, attributes);
+ v8::Handle<AccessorSignature> signature;
+ i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name, getter, setter, data,
+ settings, attributes,
+ signature);
bool fast = Utils::OpenHandle(this)->HasFastProperties();
i::Handle<i::Object> result = i::SetAccessor(Utils::OpenHandle(this), info);
if (result.is_null() || result->IsUndefined()) return false;
@@ -4612,7 +4644,9 @@ void* External::Value() const {
Local<String> v8::String::Empty() {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::String::Empty()");
+ if (!EnsureInitializedForIsolate(isolate, "v8::String::Empty()")) {
+ return v8::Local<String>();
+ }
LOG_API(isolate, "String::Empty()");
return Utils::ToLocal(isolate->factory()->empty_symbol());
}
@@ -5020,7 +5054,7 @@ Local<Object> Array::CloneElementAt(uint32_t index) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
ON_BAILOUT(isolate, "v8::Array::CloneElementAt()", return Local<Object>());
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- if (!self->HasFastElements()) {
+ if (!self->HasFastObjectElements()) {
return Local<Object>();
}
i::FixedArray* elms = i::FixedArray::cast(self->elements());
@@ -5198,7 +5232,7 @@ void V8::AddImplicitReferences(Persistent<Object> parent,
}
-int V8::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
+intptr_t V8::AdjustAmountOfExternalAllocatedMemory(intptr_t change_in_bytes) {
i::Isolate* isolate = i::Isolate::Current();
if (IsDeadCheck(isolate, "v8::V8::AdjustAmountOfExternalAllocatedMemory()")) {
return 0;
@@ -5378,17 +5412,6 @@ void Isolate::Exit() {
}
-void Isolate::SetData(void* data) {
- i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
- isolate->SetData(data);
-}
-
-void* Isolate::GetData() {
- i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
- return isolate->GetData();
-}
-
-
String::Utf8Value::Utf8Value(v8::Handle<v8::Value> obj)
: str_(NULL), length_(0) {
i::Isolate* isolate = i::Isolate::Current();
@@ -5988,7 +6011,7 @@ Handle<Value> HeapGraphEdge::GetName() const {
const HeapGraphNode* HeapGraphEdge::GetFromNode() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphEdge::GetFromNode");
- const i::HeapEntry* from = ToInternal(this)->From();
+ const i::HeapEntry* from = ToInternal(this)->from();
return reinterpret_cast<const HeapGraphNode*>(from);
}
@@ -6022,7 +6045,7 @@ Handle<String> HeapGraphNode::GetName() const {
}
-uint64_t HeapGraphNode::GetId() const {
+SnapshotObjectId HeapGraphNode::GetId() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
return ToInternal(this)->id();
@@ -6036,13 +6059,6 @@ int HeapGraphNode::GetSelfSize() const {
}
-int HeapGraphNode::GetRetainedSize() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
- return ToInternal(this)->retained_size();
-}
-
-
int HeapGraphNode::GetChildrenCount() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetChildrenCount");
@@ -6054,29 +6070,7 @@ const HeapGraphEdge* HeapGraphNode::GetChild(int index) const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetChild");
return reinterpret_cast<const HeapGraphEdge*>(
- &ToInternal(this)->children()[index]);
-}
-
-
-int HeapGraphNode::GetRetainersCount() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainersCount");
- return ToInternal(this)->retainers().length();
-}
-
-
-const HeapGraphEdge* HeapGraphNode::GetRetainer(int index) const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainer");
- return reinterpret_cast<const HeapGraphEdge*>(
- ToInternal(this)->retainers()[index]);
-}
-
-
-const HeapGraphNode* HeapGraphNode::GetDominatorNode() const {
- i::Isolate* isolate = i::Isolate::Current();
- IsDeadCheck(isolate, "v8::HeapSnapshot::GetDominatorNode");
- return reinterpret_cast<const HeapGraphNode*>(ToInternal(this)->dominator());
+ ToInternal(this)->children()[index]);
}
@@ -6137,18 +6131,18 @@ const HeapGraphNode* HeapSnapshot::GetRoot() const {
}
-const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
+const HeapGraphNode* HeapSnapshot::GetNodeById(SnapshotObjectId id) const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById");
return reinterpret_cast<const HeapGraphNode*>(
- ToInternal(this)->GetEntryById(static_cast<i::SnapshotObjectId>(id)));
+ ToInternal(this)->GetEntryById(id));
}
int HeapSnapshot::GetNodesCount() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodesCount");
- return ToInternal(this)->entries()->length();
+ return ToInternal(this)->entries().length();
}
@@ -6156,7 +6150,14 @@ const HeapGraphNode* HeapSnapshot::GetNode(int index) const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNode");
return reinterpret_cast<const HeapGraphNode*>(
- ToInternal(this)->entries()->at(index));
+ &ToInternal(this)->entries().at(index));
+}
+
+
+SnapshotObjectId HeapSnapshot::GetMaxSnapshotJSObjectId() const {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapSnapshot::GetMaxSnapshotJSObjectId");
+ return ToInternal(this)->max_snapshot_js_object_id();
}
@@ -6201,6 +6202,14 @@ const HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
}
+SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Value> value) {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapProfiler::GetSnapshotObjectId");
+ i::Handle<i::Object> obj = Utils::OpenHandle(*value);
+ return i::HeapProfiler::GetSnapshotObjectId(obj);
+}
+
+
const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
HeapSnapshot::Type type,
ActivityControl* control) {
@@ -6220,6 +6229,27 @@ const HeapSnapshot* HeapProfiler::TakeSnapshot(Handle<String> title,
}
+void HeapProfiler::StartHeapObjectsTracking() {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapProfiler::StartHeapObjectsTracking");
+ i::HeapProfiler::StartHeapObjectsTracking();
+}
+
+
+void HeapProfiler::StopHeapObjectsTracking() {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapProfiler::StopHeapObjectsTracking");
+ i::HeapProfiler::StopHeapObjectsTracking();
+}
+
+
+SnapshotObjectId HeapProfiler::PushHeapObjectsStats(OutputStream* stream) {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapProfiler::PushHeapObjectsStats");
+ return i::HeapProfiler::PushHeapObjectsStats(stream);
+}
+
+
void HeapProfiler::DeleteAllSnapshots() {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapProfiler::DeleteAllSnapshots");
@@ -6240,6 +6270,11 @@ int HeapProfiler::GetPersistentHandleCount() {
}
+size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
+ return i::HeapProfiler::GetMemorySizeUsedByProfiler();
+}
+
+
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
@@ -6267,7 +6302,11 @@ static void SetFlagsFromString(const char* flags) {
void Testing::PrepareStressRun(int run) {
static const char* kLazyOptimizations =
- "--prepare-always-opt --nolimit-inlining --noalways-opt";
+ "--prepare-always-opt "
+ "--max-inlined-source-size=999999 "
+ "--max-inlined-nodes=999999 "
+ "--max-inlined-nodes-cumulative=999999 "
+ "--noalways-opt";
static const char* kForcedOptimizations = "--always-opt";
// If deoptimization stressed turn on frequent deoptimization. If no value
diff --git a/deps/v8/src/api.h b/deps/v8/src/api.h
index 89cf0c864c..58e6a6e410 100644
--- a/deps/v8/src/api.h
+++ b/deps/v8/src/api.h
@@ -105,13 +105,13 @@ NeanderArray::NeanderArray(v8::internal::Handle<v8::internal::Object> obj)
v8::internal::Object* NeanderObject::get(int offset) {
- ASSERT(value()->HasFastElements());
+ ASSERT(value()->HasFastObjectElements());
return v8::internal::FixedArray::cast(value()->elements())->get(offset);
}
void NeanderObject::set(int offset, v8::internal::Object* value) {
- ASSERT(value_->HasFastElements());
+ ASSERT(value_->HasFastObjectElements());
v8::internal::FixedArray::cast(value_->elements())->set(offset, value);
}
@@ -146,6 +146,7 @@ class RegisteredExtension {
public:
explicit RegisteredExtension(Extension* extension);
static void Register(RegisteredExtension* that);
+ static void UnregisterAll();
Extension* extension() { return extension_; }
RegisteredExtension* next() { return next_; }
RegisteredExtension* next_auto() { return next_auto_; }
@@ -199,6 +200,8 @@ class Utils {
v8::internal::Handle<v8::internal::ObjectTemplateInfo> obj);
static inline Local<Signature> ToLocal(
v8::internal::Handle<v8::internal::SignatureInfo> obj);
+ static inline Local<AccessorSignature> AccessorSignatureToLocal(
+ v8::internal::Handle<v8::internal::FunctionTemplateInfo> obj);
static inline Local<TypeSwitch> ToLocal(
v8::internal::Handle<v8::internal::TypeSwitchInfo> obj);
@@ -232,6 +235,8 @@ class Utils {
OpenHandle(const v8::Context* context);
static inline v8::internal::Handle<v8::internal::SignatureInfo>
OpenHandle(const v8::Signature* sig);
+ static inline v8::internal::Handle<v8::internal::FunctionTemplateInfo>
+ OpenHandle(const v8::AccessorSignature* sig);
static inline v8::internal::Handle<v8::internal::TypeSwitchInfo>
OpenHandle(const v8::TypeSwitch* that);
static inline v8::internal::Handle<v8::internal::Foreign>
@@ -275,6 +280,7 @@ MAKE_TO_LOCAL(ToLocal, Foreign, External)
MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate)
MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate)
MAKE_TO_LOCAL(ToLocal, SignatureInfo, Signature)
+MAKE_TO_LOCAL(AccessorSignatureToLocal, FunctionTemplateInfo, AccessorSignature)
MAKE_TO_LOCAL(ToLocal, TypeSwitchInfo, TypeSwitch)
MAKE_TO_LOCAL(MessageToLocal, Object, Message)
MAKE_TO_LOCAL(StackTraceToLocal, JSArray, StackTrace)
@@ -299,6 +305,7 @@ MAKE_OPEN_HANDLE(Template, TemplateInfo)
MAKE_OPEN_HANDLE(FunctionTemplate, FunctionTemplateInfo)
MAKE_OPEN_HANDLE(ObjectTemplate, ObjectTemplateInfo)
MAKE_OPEN_HANDLE(Signature, SignatureInfo)
+MAKE_OPEN_HANDLE(AccessorSignature, FunctionTemplateInfo)
MAKE_OPEN_HANDLE(TypeSwitch, TypeSwitchInfo)
MAKE_OPEN_HANDLE(Data, Object)
MAKE_OPEN_HANDLE(RegExp, JSRegExp)
diff --git a/deps/v8/src/apiutils.h b/deps/v8/src/apiutils.h
index 68579af1b3..71c0e1c2c4 100644
--- a/deps/v8/src/apiutils.h
+++ b/deps/v8/src/apiutils.h
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,14 +40,17 @@ class ImplementationUtilities {
}
// Packs additional parameters for the NewArguments function. |implicit_args|
- // is a pointer to the last element of 3-elements array controlled by GC.
+ // is a pointer to the last element of 4-elements array controlled by GC.
static void PrepareArgumentsData(internal::Object** implicit_args,
+ internal::Isolate* isolate,
internal::Object* data,
internal::JSFunction* callee,
internal::Object* holder) {
implicit_args[v8::Arguments::kDataIndex] = data;
implicit_args[v8::Arguments::kCalleeIndex] = callee;
implicit_args[v8::Arguments::kHolderIndex] = holder;
+ implicit_args[v8::Arguments::kIsolateIndex] =
+ reinterpret_cast<internal::Object*>(isolate);
}
static v8::Arguments NewArguments(internal::Object** implicit_args,
@@ -55,6 +58,8 @@ class ImplementationUtilities {
bool is_construct_call) {
ASSERT(implicit_args[v8::Arguments::kCalleeIndex]->IsJSFunction());
ASSERT(implicit_args[v8::Arguments::kHolderIndex]->IsHeapObject());
+ // The implicit isolate argument is not tagged and looks like a SMI.
+ ASSERT(implicit_args[v8::Arguments::kIsolateIndex]->IsSmi());
return v8::Arguments(implicit_args, argv, argc, is_construct_call);
}
diff --git a/deps/v8/src/arguments.h b/deps/v8/src/arguments.h
index e9a32702cf..f8fb00c575 100644
--- a/deps/v8/src/arguments.h
+++ b/deps/v8/src/arguments.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -91,9 +91,11 @@ class CustomArguments : public Relocatable {
Object* data,
Object* self,
JSObject* holder) : Relocatable(isolate) {
- values_[2] = self;
- values_[1] = holder;
- values_[0] = data;
+ ASSERT(reinterpret_cast<Object*>(isolate)->IsSmi());
+ values_[3] = self;
+ values_[2] = holder;
+ values_[1] = data;
+ values_[0] = reinterpret_cast<Object*>(isolate);
}
inline explicit CustomArguments(Isolate* isolate) : Relocatable(isolate) {
@@ -106,8 +108,9 @@ class CustomArguments : public Relocatable {
void IterateInstance(ObjectVisitor* v);
Object** end() { return values_ + ARRAY_SIZE(values_) - 1; }
+
private:
- Object* values_[3];
+ Object* values_[4];
};
diff --git a/deps/v8/src/arm/builtins-arm.cc b/deps/v8/src/arm/builtins-arm.cc
index c99e778a7f..578bd810d4 100644
--- a/deps/v8/src/arm/builtins-arm.cc
+++ b/deps/v8/src/arm/builtins-arm.cc
@@ -114,7 +114,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -208,7 +208,8 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
+ __ LoadInitialArrayMap(array_function, scratch2,
+ elements_array_storage, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ tst(array_size, array_size);
@@ -440,10 +441,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ b(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// r3: JSArray
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r2,
r9,
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index f772db9be2..761123f639 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -3737,9 +3737,13 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Compute the return address in lr to return to after the jump below. Pc is
// already at '+ 8' from the current instruction but return is after three
// instructions so add another 4 to pc to get the return address.
- masm->add(lr, pc, Operand(4));
- __ str(lr, MemOperand(sp, 0));
- masm->Jump(r5);
+ {
+ // Prevent literal pool emission before return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ masm->add(lr, pc, Operand(4));
+ __ str(lr, MemOperand(sp, 0));
+ masm->Jump(r5);
+ }
if (always_allocate) {
// It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
@@ -3956,14 +3960,21 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
__ jmp(&invoke);
- __ bind(&handler_entry);
- handler_offset_ = handler_entry.pos();
- // Caught exception: Store result (exception) in the pending exception
- // field in the JSEnv and return a failure sentinel. Coming in here the
- // fp will be invalid because the PushTryHandler below sets it to 0 to
- // signal the existence of the JSEntry frame.
- __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+
+ // Block literal pool emission whilst taking the position of the handler
+ // entry. This avoids making the assumption that literal pools are always
+ // emitted after an instruction is emitted, rather than before.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel. Coming in here the
+ // fp will be invalid because the PushTryHandler below sets it to 0 to
+ // signal the existence of the JSEntry frame.
+ __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
+ isolate)));
+ }
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
@@ -4006,9 +4017,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Branch and link to JSEntryTrampoline. We don't use the double underscore
// macro for the add instruction because we don't want the coverage tool
- // inserting instructions here after we read the pc.
- __ mov(lr, Operand(pc));
- masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // inserting instructions here after we read the pc. We block literal pool
+ // emission for the same reason.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ mov(lr, Operand(pc));
+ masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ }
// Unlink this frame from the handler chain.
__ PopTryHandler();
@@ -4824,27 +4839,32 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
- const int kRegExpExecuteArguments = 8;
+ const int kRegExpExecuteArguments = 9;
const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
// Stack pointer now points to cell where return address is to be written.
// Arguments are before that on the stack or in registers.
- // Argument 8 (sp[16]): Pass current isolate address.
+ // Argument 9 (sp[20]): Pass current isolate address.
__ mov(r0, Operand(ExternalReference::isolate_address()));
- __ str(r0, MemOperand(sp, 4 * kPointerSize));
+ __ str(r0, MemOperand(sp, 5 * kPointerSize));
- // Argument 7 (sp[12]): Indicate that this is a direct call from JavaScript.
+ // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
__ mov(r0, Operand(1));
- __ str(r0, MemOperand(sp, 3 * kPointerSize));
+ __ str(r0, MemOperand(sp, 4 * kPointerSize));
- // Argument 6 (sp[8]): Start (high end) of backtracking stack memory area.
+ // Argument 7 (sp[12]): Start (high end) of backtracking stack memory area.
__ mov(r0, Operand(address_of_regexp_stack_memory_address));
__ ldr(r0, MemOperand(r0, 0));
__ mov(r2, Operand(address_of_regexp_stack_memory_size));
__ ldr(r2, MemOperand(r2, 0));
__ add(r0, r0, Operand(r2));
+ __ str(r0, MemOperand(sp, 3 * kPointerSize));
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(r0, Operand(0));
__ str(r0, MemOperand(sp, 2 * kPointerSize));
// Argument 5 (sp[4]): static offsets vector buffer.
@@ -4893,7 +4913,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmp(r0, Operand(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ b(eq, &success);
Label failure;
__ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
@@ -5169,9 +5191,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(ne, &call);
// Patch the receiver on the stack with the global receiver object.
- __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
- __ str(r2, MemOperand(sp, argc_ * kPointerSize));
+ __ ldr(r3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalReceiverOffset));
+ __ str(r3, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
}
@@ -5179,9 +5201,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// r1: pushed function (to be verified)
__ JumpIfSmi(r1, &non_function);
// Get the map of the function object.
- __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
__ b(ne, &slow);
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
// Fast-case: Invoke the function now.
// r1: pushed function
ParameterCount actual(argc_);
@@ -5205,8 +5231,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Slow-case: Non-function called.
__ bind(&slow);
+ if (RecordCallTarget()) {
+ // If there is a call target cache, mark it megamorphic in the
+ // non-function case. MegamorphicSentinel is an immortal immovable
+ // object (undefined) so no write barrier is needed.
+ ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
+ masm->isolate()->heap()->undefined_value());
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ __ str(ip, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ }
// Check for function proxy.
- __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE));
__ b(ne, &non_function);
__ push(r1); // put proxy as additional argument
__ mov(r0, Operand(argc_ + 1, RelocInfo::NONE));
@@ -5873,36 +5908,12 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// r2: result string length
__ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
__ cmp(r2, Operand(r4, ASR, 1));
+ // Return original string.
__ b(eq, &return_r0);
+ // Longer than original string's length or negative: unsafe arguments.
+ __ b(hi, &runtime);
+ // Shorter than original string's length: an actual substring.
- Label result_longer_than_two;
- // Check for special case of two character ASCII string, in which case
- // we do a lookup in the symbol table first.
- __ cmp(r2, Operand(2));
- __ b(gt, &result_longer_than_two);
- __ b(lt, &runtime);
-
- __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime);
-
- // Get the two characters forming the sub string.
- __ add(r0, r0, Operand(r3));
- __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
- __ ldrb(r4, FieldMemOperand(r0, SeqAsciiString::kHeaderSize + 1));
-
- // Try to lookup two character string in symbol table.
- Label make_two_character_string;
- StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
- __ jmp(&return_r0);
-
- // r2: result string length.
- // r3: two characters combined into halfword in little endian byte order.
- __ bind(&make_two_character_string);
- __ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
- __ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
- __ jmp(&return_r0);
-
- __ bind(&result_longer_than_two);
// Deal with different string types: update the index if necessary
// and put the underlying string into r5.
// r0: original string
@@ -6816,6 +6827,10 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
RelocInfo::CODE_TARGET));
+
+ // Prevent literal pool emission during calculation of return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+
// Push return address (accessible to GC through exit frame pc).
// Note that using pc with str is deprecated.
Label start;
@@ -7106,8 +7121,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(r3), REG(r2), REG(r4), EMIT_REMEMBERED_SET },
{ REG(r2), REG(r3), REG(r4), EMIT_REMEMBERED_SET },
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(r2), REG(r3), REG(r9), EMIT_REMEMBERED_SET },
{ REG(r2), REG(r3), REG(r9), OMIT_REMEMBERED_SET },
@@ -7176,8 +7191,13 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
// forth between a compare instructions (a nop in this position) and the
// real branch when we start and stop incremental heap marking.
// See RecordWriteStub::Patch for details.
- __ b(&skip_to_incremental_noncompacting);
- __ b(&skip_to_incremental_compacting);
+ {
+ // Block literal pool emission, as the position of these two instructions
+ // is assumed by the patching code.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ b(&skip_to_incremental_noncompacting);
+ __ b(&skip_to_incremental_compacting);
+ }
if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
__ RememberedSetHelper(object_,
@@ -7370,9 +7390,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label fast_elements;
__ CheckFastElements(r2, r5, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
__ JumpIfSmi(r0, &smi_element);
- __ CheckFastSmiOnlyElements(r2, r5, &fast_elements);
+ __ CheckFastSmiElements(r2, r5, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7384,7 +7404,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Push(r5, r4);
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -7395,8 +7415,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
__ Ret();
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
__ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
diff --git a/deps/v8/src/arm/codegen-arm.cc b/deps/v8/src/arm/codegen-arm.cc
index befd8f2de7..e00afb9035 100644
--- a/deps/v8/src/arm/codegen-arm.cc
+++ b/deps/v8/src/arm/codegen-arm.cc
@@ -73,7 +73,7 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
// -------------------------------------------------------------------------
// Code generators
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : value
@@ -96,7 +96,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- r0 : value
diff --git a/deps/v8/src/arm/debug-arm.cc b/deps/v8/src/arm/debug-arm.cc
index 96139a2597..3e7a1e9d0e 100644
--- a/deps/v8/src/arm/debug-arm.cc
+++ b/deps/v8/src/arm/debug-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -125,6 +125,8 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() {
Assembler::kDebugBreakSlotInstructions);
}
+const bool Debug::FramePaddingLayout::kIsSupported = false;
+
#define __ ACCESS_MASM(masm)
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index 69b12ce5ee..ff7c3c139e 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -73,9 +73,6 @@ class JumpPatchSite BASE_EMBEDDED {
Assembler::BlockConstPoolScope block_const_pool(masm_);
__ bind(&patch_site_);
__ cmp(reg, Operand(reg));
- // Don't use b(al, ...) as that might emit the constant pool right after the
- // branch. After patching when the branch is no longer unconditional
- // execution can continue into the constant pool.
__ b(eq, target); // Always taken before patched.
}
@@ -90,6 +87,8 @@ class JumpPatchSite BASE_EMBEDDED {
}
void EmitPatchInfo() {
+ // Block literal pool emission whilst recording patch site information.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
if (patch_site_.is_bound()) {
int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
Register reg;
@@ -112,13 +111,6 @@ class JumpPatchSite BASE_EMBEDDED {
};
-// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
-int FullCodeGenerator::self_optimization_header_size() {
- UNREACHABLE();
- return 24;
-}
-
-
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -275,11 +267,11 @@ void FullCodeGenerator::Generate() {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- VariableProxy* proxy = scope()->function();
- ASSERT(proxy->var()->mode() == CONST ||
- proxy->var()->mode() == CONST_HARMONY);
- ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+ VariableDeclaration* function = scope()->function();
+ ASSERT(function->proxy()->var()->mode() == CONST ||
+ function->proxy()->var()->mode() == CONST_HARMONY);
+ ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+ VisitVariableDeclaration(function);
}
VisitDeclarations(scope()->declarations());
}
@@ -351,6 +343,8 @@ static const int kBackEdgeDistanceDivisor = 142;
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
+ // Block literal pools whilst emitting stack check code.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
Label ok;
if (FLAG_count_based_interrupts) {
@@ -789,62 +783,52 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
}
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+ // The variable in the declaration always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
+ __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
+ __ Check(ne, "Declaration in with context.");
+ __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
+ __ Check(ne, "Declaration in catch context.");
+ }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+ VariableDeclaration* declaration) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
+ VariableProxy* proxy = declaration->proxy();
+ VariableMode mode = declaration->mode();
Variable* variable = proxy->var();
- bool binding_needs_init = (function == NULL) &&
- (mode == CONST || mode == CONST_HARMONY || mode == LET);
+ bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++global_count_;
+ globals_->Add(variable->name(), zone());
+ globals_->Add(variable->binding_needs_init()
+ ? isolate()->factory()->the_hole_value()
+ : isolate()->factory()->undefined_value(),
+ zone());
break;
case Variable::PARAMETER:
case Variable::LOCAL:
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ str(result_register(), StackOperand(variable));
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ str(ip, StackOperand(variable));
}
break;
case Variable::CONTEXT:
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
- __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.");
- __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.");
- }
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ str(result_register(), ContextOperand(cp, variable->index()));
- int offset = Context::SlotOffset(variable->index());
- // We know that we have written a function, which is not a smi.
- __ RecordWriteContextSlot(cp,
- offset,
- result_register(),
- r2,
- kLRHasBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ str(ip, ContextOperand(cp, variable->index()));
// No write barrier since the_hole_value is in old space.
@@ -853,13 +837,11 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
break;
case Variable::LOOKUP: {
- Comment cmnt(masm_, "[ Declaration");
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ mov(r2, Operand(variable->name()));
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ ASSERT(mode == VAR || mode == LET ||
+ mode == CONST || mode == CONST_HARMONY);
PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
? READ_ONLY : NONE;
__ mov(r1, Operand(Smi::FromInt(attr)));
@@ -867,11 +849,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (function != NULL) {
- __ Push(cp, r2, r1);
- // Push initial value for function declaration.
- VisitForStackValue(function);
- } else if (binding_needs_init) {
+ if (hole_init) {
__ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
__ Push(cp, r2, r1, r0);
} else {
@@ -885,6 +863,122 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
+void FullCodeGenerator::VisitFunctionDeclaration(
+ FunctionDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ globals_->Add(variable->name(), zone());
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(declaration->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) return SetStackOverflow();
+ globals_->Add(function, zone());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ VisitForAccumulatorValue(declaration->fun());
+ __ str(result_register(), StackOperand(variable));
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ VisitForAccumulatorValue(declaration->fun());
+ __ str(result_register(), ContextOperand(cp, variable->index()));
+ int offset = Context::SlotOffset(variable->index());
+ // We know that we have written a function, which is not a smi.
+ __ RecordWriteContextSlot(cp,
+ offset,
+ result_register(),
+ r2,
+ kLRHasBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+ break;
+ }
+
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ __ mov(r2, Operand(variable->name()));
+ __ mov(r1, Operand(Smi::FromInt(NONE)));
+ __ Push(cp, r2, r1);
+ // Push initial value for function declaration.
+ VisitForStackValue(declaration->fun());
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
+ }
+ }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ Handle<JSModule> instance = declaration->module()->interface()->Instance();
+ ASSERT(!instance.is_null());
+
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ __ mov(r1, Operand(instance));
+ __ str(r1, ContextOperand(cp, variable->index()));
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ // TODO(rossberg)
+ break;
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ImportDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ // TODO(rossberg)
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+ // TODO(rossberg)
+}
+
+
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
@@ -1511,7 +1605,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
@@ -1609,7 +1703,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_fast_elements = IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1630,8 +1724,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1659,7 +1752,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ ldr(r6, MemOperand(sp)); // Copy of array literal.
__ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
@@ -2271,6 +2364,18 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
+
+ // Record call targets in unoptimized code, but not in the snapshot.
+ if (!Serializer::enabled()) {
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ mov(r2, Operand(cell));
+ }
+
CallFunctionStub stub(arg_count, flags);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
@@ -3564,7 +3669,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
__ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
- __ add(string_length, string_length, Operand(scratch1));
+ __ add(string_length, string_length, Operand(scratch1), SetCC);
__ b(vs, &bailout);
__ cmp(element, elements_end);
__ b(lt, &loop);
@@ -3601,7 +3706,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ b(ne, &bailout);
__ tst(scratch2, Operand(0x80000000));
__ b(ne, &bailout);
- __ add(string_length, string_length, Operand(scratch2));
+ __ add(string_length, string_length, Operand(scratch2), SetCC);
__ b(vs, &bailout);
__ SmiUntag(string_length);
@@ -4357,7 +4462,8 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
- if (declaration_scope->is_global_scope()) {
+ if (declaration_scope->is_global_scope() ||
+ declaration_scope->is_module_scope()) {
// Contexts nested in the global context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
@@ -4388,14 +4494,55 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
STATIC_ASSERT(kSmiTag == 0);
__ add(r1, r1, Operand(r1)); // Convert to smi.
+
+ // Store result register while executing finally block.
+ __ push(r1);
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ ldr(r1, MemOperand(ip));
+ __ push(r1);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ ldr(r1, MemOperand(ip));
+ __ push(r1);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ ldr(r1, MemOperand(ip));
__ push(r1);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(r1));
+ // Restore pending message from stack.
+ __ pop(r1);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(ip, Operand(pending_message_script));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(ip, Operand(has_pending_message));
+ __ str(r1, MemOperand(ip));
+
+ __ pop(r1);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(ip, Operand(pending_message_obj));
+ __ str(r1, MemOperand(ip));
+
// Restore result register from stack.
__ pop(r1);
+
// Uncook return address and return.
__ pop(result_register());
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc
index c88c257092..fd93480986 100644
--- a/deps/v8/src/arm/ic-arm.cc
+++ b/deps/v8/src/arm/ic-arm.cc
@@ -1249,7 +1249,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in r0.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ mov(r0, r2);
__ Ret();
__ bind(&fail);
@@ -1462,27 +1462,27 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
__ b(ne, &non_double_value);
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
// FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
receiver_map,
r4,
&slow);
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
r4,
&slow);
ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
__ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@@ -1690,12 +1690,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
Address cmp_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1729,34 +1729,31 @@ void PatchInlinedSmiCode(Address address) {
Instr instr_at_patch = Assembler::instr_at(patch_address);
Instr branch_instr =
Assembler::instr_at(patch_address + Instruction::kInstrSize);
- ASSERT(Assembler::IsCmpRegister(instr_at_patch));
- ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
- Assembler::GetRm(instr_at_patch).code());
+ // This is patching a conditional "jump if not smi/jump if smi" site.
+ // Enabling by changing from
+ // cmp rx, rx
+ // b eq/ne, <target>
+ // to
+ // tst rx, #kSmiTagMask
+ // b ne/eq, <target>
+ // and vice-versa to be disabled again.
+ CodePatcher patcher(patch_address, 2);
+ Register reg = Assembler::GetRn(instr_at_patch);
+ if (check == ENABLE_INLINED_SMI_CHECK) {
+ ASSERT(Assembler::IsCmpRegister(instr_at_patch));
+ ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
+ Assembler::GetRm(instr_at_patch).code());
+ patcher.masm()->tst(reg, Operand(kSmiTagMask));
+ } else {
+ ASSERT(check == DISABLE_INLINED_SMI_CHECK);
+ ASSERT(Assembler::IsTstImmediate(instr_at_patch));
+ patcher.masm()->cmp(reg, reg);
+ }
ASSERT(Assembler::IsBranch(branch_instr));
if (Assembler::GetCondition(branch_instr) == eq) {
- // This is patching a "jump if not smi" site to be active.
- // Changing
- // cmp rx, rx
- // b eq, <target>
- // to
- // tst rx, #kSmiTagMask
- // b ne, <target>
- CodePatcher patcher(patch_address, 2);
- Register reg = Assembler::GetRn(instr_at_patch);
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
patcher.EmitCondition(ne);
} else {
ASSERT(Assembler::GetCondition(branch_instr) == ne);
- // This is patching a "jump if smi" site to be active.
- // Changing
- // cmp rx, rx
- // b ne, <target>
- // to
- // tst rx, #kSmiTagMask
- // b eq, <target>
- CodePatcher patcher(patch_address, 2);
- Register reg = Assembler::GetRn(instr_at_patch);
- patcher.masm()->tst(reg, Operand(kSmiTagMask));
patcher.EmitCondition(eq);
}
}
diff --git a/deps/v8/src/arm/lithium-arm.cc b/deps/v8/src/arm/lithium-arm.cc
index c3dd1cbaa2..283862c787 100644
--- a/deps/v8/src/arm/lithium-arm.cc
+++ b/deps/v8/src/arm/lithium-arm.cc
@@ -108,22 +108,17 @@ void LInstruction::PrintTo(StringStream* stream) {
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintDataTo(StringStream* stream) {
+void LInstruction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- for (int i = 0; i < inputs_.length(); i++) {
+ for (int i = 0; i < InputCount(); i++) {
if (i > 0) stream->Add(" ");
- inputs_[i]->PrintTo(stream);
+ InputAt(i)->PrintTo(stream);
}
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintOutputOperandTo(StringStream* stream) {
- for (int i = 0; i < results_.length(); i++) {
- if (i > 0) stream->Add(" ");
- results_[i]->PrintTo(stream);
- }
+void LInstruction::PrintOutputOperandTo(StringStream* stream) {
+ if (HasResult()) result()->PrintTo(stream);
}
@@ -416,9 +411,9 @@ LChunk::LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) {
+ instructions_(32, graph->zone()),
+ pointer_maps_(8, graph->zone()),
+ inlined_closures_(1, graph->zone()) {
}
@@ -432,9 +427,9 @@ int LChunk::GetNextSpillIndex(bool is_double) {
LOperand* LChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
+ return LStackSlot::Create(index, zone());
}
}
@@ -479,23 +474,23 @@ void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
- instructions_.Add(gap);
+ instructions_.Add(gap, zone());
index = instructions_.length();
- instructions_.Add(instr);
+ instructions_.Add(instr, zone());
} else {
index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
+ instructions_.Add(instr, zone());
+ instructions_.Add(gap, zone());
}
if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
+ pointer_maps_.Add(instr->pointer_map(), zone());
instr->pointer_map()->set_lithium_position(index);
}
}
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
+ return LConstantOperand::Create(constant->id(), zone());
}
@@ -534,7 +529,8 @@ int LChunk::NearestGapPos(int index) const {
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
+ GetGapAt(index)->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(from, to, zone());
}
@@ -732,22 +728,6 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
}
-LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id) {
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = ast_id;
- return instr;
-}
-
-
-void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
-}
-
-
LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize) {
@@ -760,8 +740,10 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
- instr = SetInstructionPendingDeoptimizationEnvironment(
- instr, sim->ast_id());
+ ASSERT(instruction_pending_deoptimization_environment_ == NULL);
+ ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ instruction_pending_deoptimization_environment_ = instr;
+ pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
@@ -779,15 +761,9 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
}
-LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
- instr->MarkAsSaveDoubles();
- return instr;
-}
-
-
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -1010,7 +986,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1295,6 +1272,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
LOperand* value = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1319,6 +1297,76 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
+bool LChunkBuilder::HasMagicNumberForDivisor(int32_t divisor) {
+ uint32_t divisor_abs = abs(divisor);
+ // Dividing by 0, 1, and powers of 2 is easy.
+ // Note that IsPowerOf2(0) returns true;
+ ASSERT(IsPowerOf2(0) == true);
+ if (IsPowerOf2(divisor_abs)) return true;
+
+ // We have magic numbers for a few specific divisors.
+ // Details and proofs can be found in:
+ // - Hacker's Delight, Henry S. Warren, Jr.
+ // - The PowerPC Compiler Writer’s Guide
+ // and probably many others.
+ //
+ // We handle
+ // <divisor with magic numbers> * <power of 2>
+ // but not
+ // <divisor with magic numbers> * <other divisor with magic numbers>
+ int32_t power_of_2_factor =
+ CompilerIntrinsics::CountTrailingZeros(divisor_abs);
+ DivMagicNumbers magic_numbers =
+ DivMagicNumberFor(divisor_abs >> power_of_2_factor);
+ if (magic_numbers.M != InvalidDivMagicNumber.M) return true;
+
+ return false;
+}
+
+
+HValue* LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(HValue* dividend) {
+ // A value with an integer representation does not need to be transformed.
+ if (dividend->representation().IsInteger32()) {
+ return dividend;
+ // A change from an integer32 can be replaced by the integer32 value.
+ } else if (dividend->IsChange() &&
+ HChange::cast(dividend)->from().IsInteger32()) {
+ return HChange::cast(dividend)->value();
+ }
+ return NULL;
+}
+
+
+HValue* LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(HValue* divisor) {
+ // Only optimize when we have magic numbers for the divisor.
+ // The standard integer division routine is usually slower than transitionning
+ // to VFP.
+ if (divisor->IsConstant() &&
+ HConstant::cast(divisor)->HasInteger32Value()) {
+ HConstant* constant_val = HConstant::cast(divisor);
+ int32_t int32_val = constant_val->Integer32Value();
+ if (LChunkBuilder::HasMagicNumberForDivisor(int32_val)) {
+ return constant_val->CopyToRepresentation(Representation::Integer32(),
+ divisor->block()->zone());
+ }
+ }
+ return NULL;
+}
+
+
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ HValue* right = instr->right();
+ LOperand* dividend = UseRegister(instr->left());
+ LOperand* divisor = UseRegisterOrConstant(right);
+ LOperand* remainder = TempRegister();
+ ASSERT(right->IsConstant() &&
+ HConstant::cast(right)->HasInteger32Value() &&
+ HasMagicNumberForDivisor(HConstant::cast(right)->Integer32Value()));
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LMathFloorOfDiv(dividend, divisor, remainder)));
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1612,7 +1660,8 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), r0);
- LDateField* result = new LDateField(object, FixedTemp(r1), instr->index());
+ LDateField* result =
+ new(zone()) LDateField(object, FixedTemp(r1), instr->index());
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1661,10 +1710,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegisterAtStart(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
- if (!needs_check) {
- res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
+ if (instr->value()->type().IsSmi()) {
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
@@ -1753,9 +1801,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -2037,8 +2085,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
@@ -2059,16 +2108,28 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new(zone()) LStoreNamedField(obj, val);
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
+
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2111,7 +2172,8 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new LAllocateObject(TempRegister(), TempRegister());
+ LAllocateObject* result =
+ new(zone()) LAllocateObject(TempRegister(), TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2242,9 +2304,12 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
+ // Store the lazy deopt environment with the instruction if needed. Right
+ // now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
- set_deoptimization_environment(result->environment());
- ClearInstructionPendingDeoptimizationEnvironment();
+ SetDeferredLazyDeoptimizationEnvironment(result->environment());
+ instruction_pending_deoptimization_environment_ = NULL;
+ pending_deoptimization_ast_id_ = AstNode::kNoNumber;
return result;
}
@@ -2271,8 +2336,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
undefined,
instr->call_kind(),
instr->is_construct());
- if (instr->arguments() != NULL) {
- inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ if (instr->arguments_var() != NULL) {
+ inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
@@ -2281,10 +2346,21 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
+ LInstruction* pop = NULL;
+
+ HEnvironment* env = current_block_->last_environment();
+
+ if (instr->arguments_pushed()) {
+ int argument_count = env->arguments_environment()->parameter_count();
+ pop = new(zone()) LDrop(argument_count);
+ argument_count_ -= argument_count;
+ }
+
HEnvironment* outer = current_block_->last_environment()->
DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
- return NULL;
+
+ return pop;
}
diff --git a/deps/v8/src/arm/lithium-arm.h b/deps/v8/src/arm/lithium-arm.h
index 62cde6e249..869a80a280 100644
--- a/deps/v8/src/arm/lithium-arm.h
+++ b/deps/v8/src/arm/lithium-arm.h
@@ -72,7 +72,7 @@ class LCodeGen;
V(CheckFunction) \
V(CheckInstanceType) \
V(CheckNonSmi) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
@@ -132,6 +132,7 @@ class LCodeGen;
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MathFloorOfDiv) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -179,7 +180,8 @@ class LCodeGen;
V(CheckMapValue) \
V(LoadFieldByIndex) \
V(DateField) \
- V(WrapReceiver)
+ V(WrapReceiver) \
+ V(Drop)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -203,15 +205,14 @@ class LInstruction: public ZoneObject {
LInstruction()
: environment_(NULL),
hydrogen_value_(NULL),
- is_call_(false),
- is_save_doubles_(false) { }
+ is_call_(false) { }
virtual ~LInstruction() { }
virtual void CompileToNative(LCodeGen* generator) = 0;
virtual const char* Mnemonic() const = 0;
virtual void PrintTo(StringStream* stream);
- virtual void PrintDataTo(StringStream* stream) = 0;
- virtual void PrintOutputOperandTo(StringStream* stream) = 0;
+ virtual void PrintDataTo(StringStream* stream);
+ virtual void PrintOutputOperandTo(StringStream* stream);
enum Opcode {
// Declare a unique enum value for each instruction.
@@ -246,22 +247,12 @@ class LInstruction: public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
- void set_deoptimization_environment(LEnvironment* env) {
- deoptimization_environment_.set(env);
- }
- LEnvironment* deoptimization_environment() const {
- return deoptimization_environment_.get();
- }
- bool HasDeoptimizationEnvironment() const {
- return deoptimization_environment_.is_set();
- }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { is_call_ = true; }
- void MarkAsSaveDoubles() { is_save_doubles_ = true; }
// Interface to the register allocator and iterators.
bool IsMarkedAsCall() const { return is_call_; }
- bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; }
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
@@ -282,9 +273,7 @@ class LInstruction: public ZoneObject {
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
- SetOncePointer<LEnvironment> deoptimization_environment_;
bool is_call_;
- bool is_save_doubles_;
};
@@ -306,9 +295,6 @@ class LTemplateInstruction: public LInstruction {
int TempCount() { return T; }
LOperand* TempAt(int i) { return temps_[i]; }
- virtual void PrintDataTo(StringStream* stream);
- virtual void PrintOutputOperandTo(StringStream* stream);
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
@@ -347,8 +333,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -534,9 +522,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
class LArgumentsElements: public LTemplateInstruction<1, 0, 0> {
public:
- LArgumentsElements() { }
-
DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements")
+ DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements)
};
@@ -582,6 +569,21 @@ class LDivI: public LTemplateInstruction<1, 2, 0> {
};
+class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LMathFloorOfDiv(LOperand* left,
+ LOperand* right,
+ LOperand* temp = NULL) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv, "math-floor-of-div")
+ DECLARE_HYDROGEN_ACCESSOR(MathFloorOfDiv)
+};
+
+
class LMulI: public LTemplateInstruction<1, 2, 1> {
public:
LMulI(LOperand* left, LOperand* right, LOperand* temp) {
@@ -834,6 +836,15 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
+ LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
+ return lazy_deopt_env_;
+ }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) {
+ lazy_deopt_env_ = env;
+ }
+
+ private:
+ LEnvironment* lazy_deopt_env_;
};
@@ -1227,6 +1238,7 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1243,13 +1255,13 @@ class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key) {
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
inputs_[0] = external_pointer;
inputs_[1] = key;
}
@@ -1263,6 +1275,7 @@ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1378,6 +1391,19 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
};
+class LDrop: public LTemplateInstruction<0, 0, 0> {
+ public:
+ explicit LDrop(int count) : count_(count) { }
+
+ int count() const { return count_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Drop, "drop")
+
+ private:
+ int count_;
+};
+
+
class LThisFunction: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
@@ -1460,6 +1486,7 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
+ Handle<JSFunction> known_function() { return hydrogen()->known_function(); }
};
@@ -1659,11 +1686,12 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val) {
+ LStoreNamedField(LOperand* obj, LOperand* val, LOperand* temp) {
inputs_[0] = obj;
inputs_[1] = val;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
@@ -1717,6 +1745,7 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1739,6 +1768,9 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
+
+ bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
};
@@ -1781,6 +1813,7 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1889,14 +1922,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -2236,9 +2269,11 @@ class LChunk: public ZoneObject {
}
void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
+ inlined_closures_.Add(closure, zone());
}
+ Zone* zone() const { return graph_->zone(); }
+
private:
int spill_slot_count_;
CompilationInfo* info_;
@@ -2255,7 +2290,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2274,6 +2309,10 @@ class LChunkBuilder BASE_EMBEDDED {
HYDROGEN_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
#undef DECLARE_DO
+ static bool HasMagicNumberForDivisor(int32_t divisor);
+ static HValue* SimplifiedDividendForMathFloorOfDiv(HValue* val);
+ static HValue* SimplifiedDivisorForMathFloorOfDiv(HValue* val);
+
private:
enum Status {
UNUSED,
@@ -2369,11 +2408,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
- LInstruction* MarkAsSaveDoubles(LInstruction* instr);
-
- LInstruction* SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id);
- void ClearInstructionPendingDeoptimizationEnvironment();
LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
int* argument_index_accumulator);
diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc
index 82b80a2b80..256d180f2f 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.cc
+++ b/deps/v8/src/arm/lithium-codegen-arm.cc
@@ -571,6 +571,9 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
LInstruction* instr,
SafepointMode safepoint_mode) {
ASSERT(instr != NULL);
+ // Block literal pool emission to ensure nop indicating no inlined smi code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
__ Call(code, mode);
@@ -631,14 +634,15 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
+ Translation translation(&translations_, frame_count, jsframe_count,
+ zone());
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -670,7 +674,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
// jump entry if this is the case.
if (deopt_jump_table_.is_empty() ||
(deopt_jump_table_.last().address != entry)) {
- deopt_jump_table_.Add(JumpTableEntry(entry));
+ deopt_jump_table_.Add(JumpTableEntry(entry), zone());
}
__ b(cc, &deopt_jump_table_.last().label);
}
@@ -715,7 +719,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -761,14 +765,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register cp always contains a pointer to the context.
- safepoint.DefinePointerRegister(cp);
+ safepoint.DefinePointerRegister(cp, zone());
}
}
@@ -780,7 +784,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -1034,6 +1038,100 @@ void LCodeGen::DoModI(LModI* instr) {
}
+void LCodeGen::EmitSignedIntegerDivisionByConstant(
+ Register result,
+ Register dividend,
+ int32_t divisor,
+ Register remainder,
+ Register scratch,
+ LEnvironment* environment) {
+ ASSERT(!AreAliased(dividend, scratch, ip));
+ ASSERT(LChunkBuilder::HasMagicNumberForDivisor(divisor));
+
+ uint32_t divisor_abs = abs(divisor);
+
+ int32_t power_of_2_factor =
+ CompilerIntrinsics::CountTrailingZeros(divisor_abs);
+
+ switch (divisor_abs) {
+ case 0:
+ DeoptimizeIf(al, environment);
+ return;
+
+ case 1:
+ if (divisor > 0) {
+ __ Move(result, dividend);
+ } else {
+ __ rsb(result, dividend, Operand(0), SetCC);
+ DeoptimizeIf(vs, environment);
+ }
+ // Compute the remainder.
+ __ mov(remainder, Operand(0));
+ return;
+
+ default:
+ if (IsPowerOf2(divisor_abs)) {
+ // Branch and condition free code for integer division by a power
+ // of two.
+ int32_t power = WhichPowerOf2(divisor_abs);
+ if (power > 1) {
+ __ mov(scratch, Operand(dividend, ASR, power - 1));
+ }
+ __ add(scratch, dividend, Operand(scratch, LSR, 32 - power));
+ __ mov(result, Operand(scratch, ASR, power));
+ // Negate if necessary.
+ // We don't need to check for overflow because the case '-1' is
+ // handled separately.
+ if (divisor < 0) {
+ ASSERT(divisor != -1);
+ __ rsb(result, result, Operand(0));
+ }
+ // Compute the remainder.
+ if (divisor > 0) {
+ __ sub(remainder, dividend, Operand(result, LSL, power));
+ } else {
+ __ add(remainder, dividend, Operand(result, LSL, power));
+ }
+ return;
+ } else {
+ // Use magic numbers for a few specific divisors.
+ // Details and proofs can be found in:
+ // - Hacker's Delight, Henry S. Warren, Jr.
+ // - The PowerPC Compiler Writer’s Guide
+ // and probably many others.
+ //
+ // We handle
+ // <divisor with magic numbers> * <power of 2>
+ // but not
+ // <divisor with magic numbers> * <other divisor with magic numbers>
+ DivMagicNumbers magic_numbers =
+ DivMagicNumberFor(divisor_abs >> power_of_2_factor);
+ // Branch and condition free code for integer division by a power
+ // of two.
+ const int32_t M = magic_numbers.M;
+ const int32_t s = magic_numbers.s + power_of_2_factor;
+
+ __ mov(ip, Operand(M));
+ __ smull(ip, scratch, dividend, ip);
+ if (M < 0) {
+ __ add(scratch, scratch, Operand(dividend));
+ }
+ if (s > 0) {
+ __ mov(scratch, Operand(scratch, ASR, s));
+ }
+ __ add(result, scratch, Operand(dividend, LSR, 31));
+ if (divisor < 0) __ rsb(result, result, Operand(0));
+ // Compute the remainder.
+ __ mov(ip, Operand(divisor));
+ // This sequence could be replaced with 'mls' when
+ // it gets implemented.
+ __ mul(scratch, result, ip);
+ __ sub(remainder, dividend, scratch);
+ }
+ }
+}
+
+
void LCodeGen::DoDivI(LDivI* instr) {
class DeferredDivI: public LDeferredCode {
public:
@@ -1096,7 +1194,7 @@ void LCodeGen::DoDivI(LDivI* instr) {
// Call the stub. The numbers in r0 and r1 have
// to be tagged to Smis. If that is not possible, deoptimize.
- DeferredDivI* deferred = new DeferredDivI(this, instr);
+ DeferredDivI* deferred = new(zone()) DeferredDivI(this, instr);
__ TrySmiTag(left, &deoptimize, scratch);
__ TrySmiTag(right, &deoptimize, scratch);
@@ -1115,6 +1213,34 @@ void LCodeGen::DoDivI(LDivI* instr) {
}
+void LCodeGen::DoMathFloorOfDiv(LMathFloorOfDiv* instr) {
+ const Register result = ToRegister(instr->result());
+ const Register left = ToRegister(instr->InputAt(0));
+ const Register remainder = ToRegister(instr->TempAt(0));
+ const Register scratch = scratch0();
+
+ // We only optimize this for division by constants, because the standard
+ // integer division routine is usually slower than transitionning to VFP.
+ // This could be optimized on processors with SDIV available.
+ ASSERT(instr->InputAt(1)->IsConstantOperand());
+ int32_t divisor = ToInteger32(LConstantOperand::cast(instr->InputAt(1)));
+ if (divisor < 0) {
+ __ cmp(left, Operand(0));
+ DeoptimizeIf(eq, instr->environment());
+ }
+ EmitSignedIntegerDivisionByConstant(result,
+ left,
+ divisor,
+ remainder,
+ scratch,
+ instr->environment());
+ // We operated a truncating division. Correct the result if necessary.
+ __ cmp(remainder, Operand(0));
+ __ teq(remainder, Operand(divisor), ne);
+ __ sub(result, result, Operand(1), LeaveCC, mi);
+}
+
+
template<int T>
void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
Token::Value op) {
@@ -1562,6 +1688,9 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->result()).is(r0));
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
+ // Block literal pool emission to ensure nop indicating no inlined smi code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@@ -2174,7 +2303,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->InputAt(0));
@@ -2193,20 +2322,25 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Label cache_miss;
Register map = temp;
__ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
- __ bind(deferred->map_check()); // Label for calculating code patching.
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch with
- // the cached map.
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
- __ mov(ip, Operand(Handle<Object>(cell)));
- __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
- __ cmp(map, Operand(ip));
- __ b(ne, &cache_miss);
- // We use Factory::the_hole_value() on purpose instead of loading from the
- // root array to force relocation to be able to later patch
- // with true or false.
- __ mov(result, Operand(factory()->the_hole_value()));
+ {
+ // Block constant pool emission to ensure the positions of instructions are
+ // as expected by the patcher. See InstanceofStub::Generate().
+ Assembler::BlockConstPoolScope block_const_pool(masm());
+ __ bind(deferred->map_check()); // Label for calculating code patching.
+ // We use Factory::the_hole_value() on purpose instead of loading from the
+ // root array to force relocation to be able to later patch with
+ // the cached map.
+ Handle<JSGlobalPropertyCell> cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
+ __ cmp(map, Operand(ip));
+ __ b(ne, &cache_miss);
+ // We use Factory::the_hole_value() on purpose instead of loading from the
+ // root array to force relocation to be able to later patch
+ // with true or false.
+ __ mov(result, Operand(factory()->the_hole_value()));
+ }
__ b(&done);
// The inlined call site cache did not match. Check null and string before
@@ -2267,8 +2401,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- ASSERT(instr->HasDeoptimizationEnvironment());
- LEnvironment* env = instr->deoptimization_environment();
+ LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
@@ -2438,12 +2571,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsFound() && lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2455,9 +2588,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ ldr(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ HeapObject* current = HeapObject::cast((*type)->prototype());
+ Heap* heap = type->GetHeap();
+ while (current != heap->null_value()) {
+ Handle<HeapObject> link(current);
+ __ LoadHeapObject(result, link);
+ __ ldr(result, FieldMemOperand(result, HeapObject::kMapOffset));
+ __ cmp(result, Operand(Handle<Map>(JSObject::cast(current)->map())));
+ DeoptimizeIf(ne, env);
+ current = HeapObject::cast(current->map()->prototype());
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
@@ -2465,43 +2612,45 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
- Register scratch = scratch0();
+ Register object_map = scratch0();
+
int map_count = instr->hydrogen()->types()->length();
+ bool need_generic = instr->hydrogen()->need_generic();
+
+ if (map_count == 0 && !need_generic) {
+ DeoptimizeIf(al, instr->environment());
+ return;
+ }
Handle<String> name = instr->hydrogen()->name();
- if (map_count == 0) {
- ASSERT(instr->hydrogen()->need_generic());
- __ mov(r2, Operand(name));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- Label done;
- __ ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
- for (int i = 0; i < map_count - 1; ++i) {
- Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label done;
+ __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
+ for (int i = 0; i < map_count; ++i) {
+ bool last = (i == map_count - 1);
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMap(
+ object_map, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
+ if (last && !need_generic) {
+ DeoptimizeIf(ne, instr->environment());
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ } else {
Label next;
- __ cmp(scratch, Operand(map));
__ b(ne, &next);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
__ b(&done);
__ bind(&next);
}
- Handle<Map> map = instr->hydrogen()->types()->last();
- __ cmp(scratch, Operand(map));
- if (instr->hydrogen()->need_generic()) {
- Label generic;
- __ b(ne, &generic);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ b(&done);
- __ bind(&generic);
- __ mov(r2, Operand(name));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- DeoptimizeIf(ne, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- }
- __ bind(&done);
}
+ if (need_generic) {
+ __ mov(r2, Operand(name));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ }
+ __ bind(&done);
}
@@ -2579,8 +2728,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ ldr(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
__ ubfx(scratch, scratch, Map::kElementsKindShift,
Map::kElementsKindBitCount);
- __ cmp(scratch, Operand(FAST_ELEMENTS));
- __ b(eq, &done);
+ __ cmp(scratch, Operand(GetInitialFastElementsKind()));
+ __ b(lt, &fail);
+ __ cmp(scratch, Operand(TERMINAL_FAST_ELEMENTS_KIND));
+ __ b(le, &done);
__ cmp(scratch, Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ b(lt, &fail);
__ cmp(scratch, Operand(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
@@ -2627,13 +2778,20 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
// Load the result.
__ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
- __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
+ uint32_t offset = FixedArray::kHeaderSize +
+ (instr->additional_index() << kPointerSizeLog2);
+ __ ldr(result, FieldMemOperand(scratch, offset));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- __ cmp(result, scratch);
- DeoptimizeIf(eq, instr->environment());
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ tst(result, Operand(kSmiTagMask));
+ DeoptimizeIf(ne, instr->environment());
+ } else {
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ __ cmp(result, scratch);
+ DeoptimizeIf(eq, instr->environment());
+ }
}
}
@@ -2659,18 +2817,21 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
}
Operand operand = key_is_constant
- ? Operand(constant_key * (1 << shift_size) +
+ ? Operand(((constant_key + instr->additional_index()) << shift_size) +
FixedDoubleArray::kHeaderSize - kHeapObjectTag)
: Operand(key, LSL, shift_size);
__ add(elements, elements, operand);
if (!key_is_constant) {
__ add(elements, elements,
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
+ (instr->additional_index() << shift_size)));
}
- __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
- __ cmp(scratch, Operand(kHoleNanUpper32));
- DeoptimizeIf(eq, instr->environment());
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ ldr(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
+ __ cmp(scratch, Operand(kHoleNanUpper32));
+ DeoptimizeIf(eq, instr->environment());
+ }
__ vldr(result, elements, 0);
}
@@ -2692,26 +2853,33 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
key = ToRegister(instr->key());
}
int shift_size = ElementsKindToShiftSize(elements_kind);
+ int additional_offset = instr->additional_index() << shift_size;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
CpuFeatures::Scope scope(VFP3);
DwVfpRegister result = ToDoubleRegister(instr->result());
Operand operand = key_is_constant
- ? Operand(constant_key * (1 << shift_size))
+ ? Operand(constant_key << shift_size)
: Operand(key, LSL, shift_size);
__ add(scratch0(), external_pointer, operand);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
- __ vldr(result.low(), scratch0(), 0);
+ __ vldr(result.low(), scratch0(), additional_offset);
__ vcvt_f64_f32(result, result.low());
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ vldr(result, scratch0(), 0);
+ __ vldr(result, scratch0(), additional_offset);
}
} else {
Register result = ToRegister(instr->result());
+ if (instr->additional_index() != 0 && !key_is_constant) {
+ __ add(scratch0(), key, Operand(instr->additional_index()));
+ }
MemOperand mem_operand(key_is_constant
- ? MemOperand(external_pointer, constant_key * (1 << shift_size))
- : MemOperand(external_pointer, key, LSL, shift_size));
+ ? MemOperand(external_pointer,
+ (constant_key << shift_size) + additional_offset)
+ : (instr->additional_index() == 0
+ ? MemOperand(external_pointer, key, LSL, shift_size)
+ : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
switch (elements_kind) {
case EXTERNAL_BYTE_ELEMENTS:
__ ldrsb(result, mem_operand);
@@ -2739,9 +2907,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2764,16 +2935,20 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Register scratch = scratch0();
Register result = ToRegister(instr->result());
- // Check if the calling frame is an arguments adaptor frame.
- Label done, adapted;
- __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
- __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ if (instr->hydrogen()->from_inlined()) {
+ __ sub(result, sp, Operand(2 * kPointerSize));
+ } else {
+ // Check if the calling frame is an arguments adaptor frame.
+ Label done, adapted;
+ __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
+ __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- // Result is the frame pointer for the frame if not adapted and for the real
- // frame below the adaptor frame if adapted.
- __ mov(result, fp, LeaveCC, ne);
- __ mov(result, scratch, LeaveCC, eq);
+ // Result is the frame pointer for the frame if not adapted and for the real
+ // frame below the adaptor frame if adapted.
+ __ mov(result, fp, LeaveCC, ne);
+ __ mov(result, scratch, LeaveCC, eq);
+ }
}
@@ -2882,7 +3057,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ b(ne, &loop);
__ bind(&invoke);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -2907,6 +3082,11 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
}
+void LCodeGen::DoDrop(LDrop* instr) {
+ __ Drop(instr->count());
+}
+
+
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
__ LoadHeapObject(result, instr->hydrogen()->closure());
@@ -2953,7 +3133,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind) {
+ CallKind call_kind,
+ R1State r1_state) {
bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
function->shared()->formal_parameter_count() == arity;
@@ -2961,7 +3142,10 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
if (can_invoke_directly) {
- __ LoadHeapObject(r1, function);
+ if (r1_state == R1_UNINITIALIZED) {
+ __ LoadHeapObject(r1, function);
+ }
+
// Change context if needed.
bool change_context =
(info()->closure()->context() != function->context()) ||
@@ -3000,7 +3184,8 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
CallKnownFunction(instr->function(),
instr->arity(),
instr,
- CALL_AS_METHOD);
+ CALL_AS_METHOD,
+ R1_UNINITIALIZED);
}
@@ -3109,7 +3294,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
} else {
// Representation is tagged.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Register input = ToRegister(instr->InputAt(0));
// Smi check.
__ JumpIfNotSmi(input, deferred->entry());
@@ -3286,7 +3471,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
@@ -3424,13 +3609,21 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(ToRegister(instr->function()).is(r1));
ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- RecordPosition(pointers->position());
- SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+
+ if (instr->known_function().is_null()) {
+ LPointerMap* pointers = instr->pointer_map();
+ RecordPosition(pointers->position());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(instr->arity());
+ __ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ } else {
+ CallKnownFunction(instr->known_function(),
+ instr->arity(),
+ instr,
+ CALL_AS_METHOD,
+ R1_CONTAINS_TARGET);
+ }
}
@@ -3485,7 +3678,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(r0));
- CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
+ CallKnownFunction(instr->target(),
+ instr->arity(),
+ instr,
+ CALL_AS_FUNCTION,
+ R1_UNINITIALIZED);
}
@@ -3515,6 +3712,18 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (!instr->transition().is_null()) {
__ mov(scratch, Operand(instr->transition()));
__ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ Register temp = ToRegister(instr->TempAt(0));
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ scratch,
+ temp,
+ kLRHasBeenSaved,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3583,10 +3792,16 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
int offset =
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
+ (ToInteger32(const_operand) + instr->additional_index()) * kPointerSize
+ + FixedArray::kHeaderSize;
__ str(value, FieldMemOperand(elements, offset));
} else {
__ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
+ if (instr->additional_index() != 0) {
+ __ add(scratch,
+ scratch,
+ Operand(instr->additional_index() << kPointerSizeLog2));
+ }
__ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
}
@@ -3615,7 +3830,6 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
Register scratch = scratch0();
bool key_is_constant = instr->key()->IsConstantOperand();
int constant_key = 0;
- Label not_nan;
// Calculate the effective address of the slot in the array to store the
// double value.
@@ -3629,7 +3843,7 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
}
int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
Operand operand = key_is_constant
- ? Operand(constant_key * (1 << shift_size) +
+ ? Operand((constant_key << shift_size) +
FixedDoubleArray::kHeaderSize - kHeapObjectTag)
: Operand(key, LSL, shift_size);
__ add(scratch, elements, operand);
@@ -3638,14 +3852,16 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
}
- // Check for NaN. All NaNs must be canonicalized.
- __ VFPCompareAndSetFlags(value, value);
-
- // Only load canonical NaN if the comparison above set the overflow.
- __ Vmov(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double(), vs);
+ if (instr->NeedsCanonicalization()) {
+ // Check for NaN. All NaNs must be canonicalized.
+ __ VFPCompareAndSetFlags(value, value);
+ // Only load canonical NaN if the comparison above set the overflow.
+ __ Vmov(value,
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double(),
+ vs);
+ }
- __ bind(&not_nan);
- __ vstr(value, scratch, 0);
+ __ vstr(value, scratch, instr->additional_index() << shift_size);
}
@@ -3666,25 +3882,33 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
key = ToRegister(instr->key());
}
int shift_size = ElementsKindToShiftSize(elements_kind);
+ int additional_offset = instr->additional_index() << shift_size;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
CpuFeatures::Scope scope(VFP3);
DwVfpRegister value(ToDoubleRegister(instr->value()));
- Operand operand(key_is_constant ? Operand(constant_key * (1 << shift_size))
+ Operand operand(key_is_constant ? Operand(constant_key << shift_size)
: Operand(key, LSL, shift_size));
__ add(scratch0(), external_pointer, operand);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ vcvt_f32_f64(double_scratch0().low(), value);
- __ vstr(double_scratch0().low(), scratch0(), 0);
+ __ vstr(double_scratch0().low(), scratch0(), additional_offset);
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ vstr(value, scratch0(), 0);
+ __ vstr(value, scratch0(), additional_offset);
}
} else {
Register value(ToRegister(instr->value()));
+ if (instr->additional_index() != 0 && !key_is_constant) {
+ __ add(scratch0(), key, Operand(instr->additional_index()));
+ }
MemOperand mem_operand(key_is_constant
- ? MemOperand(external_pointer, constant_key * (1 << shift_size))
- : MemOperand(external_pointer, key, LSL, shift_size));
+ ? MemOperand(external_pointer,
+ ((constant_key + instr->additional_index())
+ << shift_size))
+ : (instr->additional_index() == 0
+ ? MemOperand(external_pointer, key, LSL, shift_size)
+ : MemOperand(external_pointer, scratch0(), LSL, shift_size)));
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -3703,7 +3927,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3740,20 +3967,22 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ cmp(scratch, Operand(from_map));
__ b(ne, &not_applicable);
__ mov(new_map_reg, Operand(to_map));
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
__ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
// Write barrier.
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
scratch, kLRHasBeenSaved, kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(r2));
ASSERT(new_map_reg.is(r3));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(r2));
ASSERT(new_map_reg.is(r3));
@@ -3787,7 +4016,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
@@ -3842,7 +4071,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3916,7 +4145,7 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
Register src = ToRegister(instr->InputAt(0));
Register dst = ToRegister(instr->result());
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTag(dst, src, SetCC);
__ b(vs, deferred->entry());
__ bind(deferred->exit());
@@ -3987,7 +4216,7 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Register temp1 = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
@@ -4189,7 +4418,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Optimistically untag the input.
// If the input is a HeapObject, SmiUntag will set the carry flag.
@@ -4338,14 +4567,22 @@ void LCodeGen::DoCheckMapCommon(Register reg,
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Handle<Map> map = instr->hydrogen()->map();
- DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(),
- instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, scratch, map, &success, REQUIRE_EXACT_MAP);
+ __ b(eq, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4441,7 +4678,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->TempAt(0));
@@ -4464,6 +4702,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
deferred->entry(),
TAG_OBJECT);
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
// Load the initial map.
Register map = scratch;
__ LoadHeapObject(map, constructor);
@@ -4482,14 +4728,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ str(scratch, FieldMemOperand(result, property_offset));
}
}
-
- __ bind(deferred->exit());
}
void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
Register result = ToRegister(instr->result());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
@@ -4497,9 +4743,9 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
__ mov(result, Operand(0));
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ LoadHeapObject(r0, constructor);
+ __ mov(r0, Operand(Smi::FromInt(instance_size)));
__ push(r0);
- CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
__ StoreToSafepointRegisterSlot(r0, result);
}
@@ -4511,8 +4757,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(r1, instr->hydrogen()->boilerplate_object());
// Load map into r2.
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -4633,9 +4880,10 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ str(r2, FieldMemOperand(result, total_offset + 4));
}
} else if (elements->IsFixedArray()) {
+ Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
for (int i = 0; i < elements_length; i++) {
int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
- Handle<Object> value = JSObject::GetElement(object, i);
+ Handle<Object> value(fast_elements->get(i));
if (value->IsJSObject()) {
Handle<JSObject> value_object = Handle<JSObject>::cast(value);
__ add(r2, result, Operand(*offset));
@@ -4659,6 +4907,24 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
int size = instr->hydrogen()->total_size();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate()->GetElementsKind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
+ __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
+ // Load map into r2.
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount);
+ __ cmp(r2, Operand(boilerplate_elements_kind));
+ DeoptimizeIf(ne, instr->environment());
+ }
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
@@ -4923,6 +5189,8 @@ void LCodeGen::EnsureSpaceForLazyDeopt() {
int current_pc = masm()->pc_offset();
int patch_size = Deoptimizer::patch_size();
if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ // Block literal pool emission for duration of padding.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
while (padding_size > 0) {
@@ -4954,7 +5222,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Register strict = scratch0();
__ mov(strict, Operand(Smi::FromInt(strict_mode_flag())));
__ Push(object, key, strict);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -4967,7 +5235,7 @@ void LCodeGen::DoIn(LIn* instr) {
Register obj = ToRegister(instr->object());
Register key = ToRegister(instr->key());
__ Push(key, obj);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
@@ -5017,7 +5285,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(lo, deferred_stack_check->entry());
diff --git a/deps/v8/src/arm/lithium-codegen-arm.h b/deps/v8/src/arm/lithium-codegen-arm.h
index adb6e1bb73..f35c69b8a3 100644
--- a/deps/v8/src/arm/lithium-codegen-arm.h
+++ b/deps/v8/src/arm/lithium-codegen-arm.h
@@ -43,22 +43,26 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
- LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
+ LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info,
+ Zone* zone)
: chunk_(chunk),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deopt_jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, zone),
+ deopt_jump_table_(4, zone),
+ deoptimization_literals_(8, zone),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(zone),
+ deferred_(8, zone),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(zone),
+ zone_(zone),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -71,6 +75,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
// LOperand must be a register.
@@ -176,7 +181,7 @@ class LCodeGen BASE_EMBEDDED {
void Abort(const char* format, ...);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -215,12 +220,18 @@ class LCodeGen BASE_EMBEDDED {
int argc,
LInstruction* instr);
+ enum R1State {
+ R1_UNINITIALIZED,
+ R1_CONTAINS_TARGET
+ };
+
// Generate a direct call to a known function. Expects the function
// to be in r1.
void CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind);
+ CallKind call_kind,
+ R1State r1_state);
void LoadHeapObject(Register result, Handle<HeapObject> object);
@@ -308,7 +319,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -317,6 +329,17 @@ class LCodeGen BASE_EMBEDDED {
Register source,
int* offset);
+ // Emit optimized code for integer division.
+ // Inputs are signed.
+ // All registers are clobbered.
+ // If 'remainder' is no_reg, it is not computed.
+ void EmitSignedIntegerDivisionByConstant(Register result,
+ Register dividend,
+ int32_t divisor,
+ Register remainder,
+ Register scratch,
+ LEnvironment* environment);
+
struct JumpTableEntry {
explicit inline JumpTableEntry(Address entry)
: label(),
@@ -349,6 +372,8 @@ class LCodeGen BASE_EMBEDDED {
// itself is emitted at the end of the generated code.
SafepointTableBuilder safepoints_;
+ Zone* zone_;
+
// Compiler from a set of parallel moves to a sequential list of moves.
LGapResolver resolver_;
diff --git a/deps/v8/src/arm/lithium-gap-resolver-arm.cc b/deps/v8/src/arm/lithium-gap-resolver-arm.cc
index cefca476ad..c100720d89 100644
--- a/deps/v8/src/arm/lithium-gap-resolver-arm.cc
+++ b/deps/v8/src/arm/lithium-gap-resolver-arm.cc
@@ -36,7 +36,7 @@ namespace internal {
static const Register kSavedValueRegister = { 9 };
LGapResolver::LGapResolver(LCodeGen* owner)
- : cgen_(owner), moves_(32), root_index_(0), in_cycle_(false),
+ : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
saved_destination_(NULL) { }
@@ -79,7 +79,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/deps/v8/src/arm/macro-assembler-arm.cc b/deps/v8/src/arm/macro-assembler-arm.cc
index 857c2bf770..7c49e9e58a 100644
--- a/deps/v8/src/arm/macro-assembler-arm.cc
+++ b/deps/v8/src/arm/macro-assembler-arm.cc
@@ -1868,10 +1868,12 @@ void MacroAssembler::CompareRoot(Register obj,
void MacroAssembler::CheckFastElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
b(hi, fail);
}
@@ -1879,22 +1881,25 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
b(ls, fail);
- cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
b(hi, fail);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
b(hi, fail);
}
@@ -1995,24 +2000,27 @@ void MacroAssembler::CompareMap(Register obj,
Label* early_success,
CompareMapMode mode) {
ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
- cmp(scratch, Operand(map));
- if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- b(eq, early_success);
- cmp(scratch, Operand(Handle<Map>(transitioned_fast_element_map)));
- }
+ CompareMap(scratch, map, early_success, mode);
+}
+
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- b(eq, early_success);
- cmp(scratch, Operand(Handle<Map>(transitioned_double_map)));
+void MacroAssembler::CompareMap(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode) {
+ cmp(obj_map, Operand(map));
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ b(eq, early_success);
+ cmp(obj_map, Operand(Handle<Map>(current_map)));
+ }
}
}
}
@@ -2865,28 +2873,38 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- ldr(ip, MemOperand(scratch, Context::SlotOffset(expected_index)));
- cmp(map_in_out, ip);
+ ldr(scratch,
+ MemOperand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmp(map_in_out, scratch);
b(ne, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- ldr(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ ldr(map_in_out, FieldMemOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
ldr(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -3710,22 +3728,35 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
}
-bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
- if (r1.is(r2)) return true;
- if (r1.is(r3)) return true;
- if (r1.is(r4)) return true;
- if (r2.is(r3)) return true;
- if (r2.is(r4)) return true;
- if (r3.is(r4)) return true;
- return false;
+#ifdef DEBUG
+bool AreAliased(Register reg1,
+ Register reg2,
+ Register reg3,
+ Register reg4,
+ Register reg5,
+ Register reg6) {
+ int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
+ reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid();
+
+ RegList regs = 0;
+ if (reg1.is_valid()) regs |= reg1.bit();
+ if (reg2.is_valid()) regs |= reg2.bit();
+ if (reg3.is_valid()) regs |= reg3.bit();
+ if (reg4.is_valid()) regs |= reg4.bit();
+ if (reg5.is_valid()) regs |= reg5.bit();
+ if (reg6.is_valid()) regs |= reg6.bit();
+ int n_of_non_aliasing_regs = NumRegs(regs);
+
+ return n_of_valid_regs != n_of_non_aliasing_regs;
}
+#endif
CodePatcher::CodePatcher(byte* address, int instructions)
: address_(address),
instructions_(instructions),
size_(instructions * Assembler::kInstrSize),
- masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
+ masm_(NULL, address, size_ + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints.
diff --git a/deps/v8/src/arm/macro-assembler-arm.h b/deps/v8/src/arm/macro-assembler-arm.h
index 47afa93a6e..6b7d116357 100644
--- a/deps/v8/src/arm/macro-assembler-arm.h
+++ b/deps/v8/src/arm/macro-assembler-arm.h
@@ -85,7 +85,14 @@ enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
enum LinkRegisterStatus { kLRHasNotBeenSaved, kLRHasBeenSaved };
-bool AreAliased(Register r1, Register r2, Register r3, Register r4);
+#ifdef DEBUG
+bool AreAliased(Register reg1,
+ Register reg2,
+ Register reg3 = no_reg,
+ Register reg4 = no_reg,
+ Register reg5 = no_reg,
+ Register reg6 = no_reg);
+#endif
// MacroAssembler implements a collection of frequently used macros.
@@ -505,7 +512,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
void LoadGlobalFunction(int index, Register function);
@@ -795,9 +803,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail);
+ void CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
@@ -823,6 +831,13 @@ class MacroAssembler: public Assembler {
Label* early_success,
CompareMapMode mode = REQUIRE_EXACT_MAP);
+ // As above, but the map of the object is already loaded into the register
+ // which is preserved by the code generated.
+ void CompareMap(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
// Check if the map of an object is equal to a specified map and branch to
// label if not. Skip the smi check if not required (object is known to be a
// heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
@@ -1321,7 +1336,6 @@ class MacroAssembler: public Assembler {
};
-#ifdef ENABLE_DEBUGGER_SUPPORT
// The code patcher is used to patch (typically) small parts of code e.g. for
// debugging and other types of instrumentation. When using the code patcher
// the exact number of bytes specified must be emitted. It is not legal to emit
@@ -1351,7 +1365,6 @@ class CodePatcher {
int size_; // Number of bytes of the expected patch size.
MacroAssembler masm_; // Macro assembler used to generate the code.
};
-#endif // ENABLE_DEBUGGER_SUPPORT
// -----------------------------------------------------------------------------
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
index 10ff2dd96c..66cdd8435e 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,45 +43,49 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
+ * - r4 : Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
* - r5 : Pointer to current code object (Code*) including heap object tag.
* - r6 : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - r7 : Currently loaded character. Must be loaded using
* LoadCurrentCharacter before using any of the dispatch methods.
- * - r8 : points to tip of backtrack stack
+ * - r8 : Points to tip of backtrack stack
* - r9 : Unused, might be used by C code and expected unchanged.
* - r10 : End of input (points to byte after last character in input).
* - r11 : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
* - r12 : IP register, used by assembler. Very volatile.
- * - r13/sp : points to tip of C stack.
+ * - r13/sp : Points to tip of C stack.
*
* The remaining registers are free for computations.
* Each call to a public method should retain this convention.
*
* The stack will have the following structure:
- * - fp[52] Isolate* isolate (Address of the current isolate)
- * - fp[48] direct_call (if 1, direct call from JavaScript code,
- * if 0, call through the runtime system).
- * - fp[44] stack_area_base (High end of the memory area to use as
- * backtracking stack).
+ * - fp[56] Isolate* isolate (address of the current isolate)
+ * - fp[52] direct_call (if 1, direct call from JavaScript code,
+ * if 0, call through the runtime system).
+ * - fp[48] stack_area_base (high end of the memory area to use as
+ * backtracking stack).
+ * - fp[44] capture array size (may fit multiple sets of matches)
* - fp[40] int* capture_array (int[num_saved_registers_], for output).
* - fp[36] secondary link/return address used by native call.
* --- sp when called ---
- * - fp[32] return address (lr).
- * - fp[28] old frame pointer (r11).
+ * - fp[32] return address (lr).
+ * - fp[28] old frame pointer (r11).
* - fp[0..24] backup of registers r4..r10.
* --- frame pointer ----
- * - fp[-4] end of input (Address of end of string).
- * - fp[-8] start of input (Address of first character in string).
+ * - fp[-4] end of input (address of end of string).
+ * - fp[-8] start of input (address of first character in string).
* - fp[-12] start index (character index of start).
* - fp[-16] void* input_string (location of a handle containing the string).
- * - fp[-20] Offset of location before start of input (effectively character
+ * - fp[-20] success counter (only for global regexps to count matches).
+ * - fp[-24] Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a
* non-position.
- * - fp[-24] At start (if 1, we are starting at the start of the
+ * - fp[-28] At start (if 1, we are starting at the start of the
* string, otherwise 0)
- * - fp[-28] register 0 (Only positions must be stored in the first
+ * - fp[-32] register 0 (Only positions must be stored in the first
* - register 1 num_saved_registers_ registers)
* - ...
* - register num_registers-1
@@ -115,8 +119,10 @@ namespace internal {
RegExpMacroAssemblerARM::RegExpMacroAssemblerARM(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -197,9 +203,9 @@ void RegExpMacroAssemblerARM::CheckCharacterGT(uc16 limit, Label* on_greater) {
void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
Label not_at_start;
// Did we start the match at the start of the string at all?
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
+ __ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
__ cmp(r0, Operand(0, RelocInfo::NONE));
- BranchOrBacktrack(eq, &not_at_start);
+ BranchOrBacktrack(ne, &not_at_start);
// If we did, are we still at the start of the input?
__ ldr(r1, MemOperand(frame_pointer(), kInputStart));
@@ -212,9 +218,9 @@ void RegExpMacroAssemblerARM::CheckAtStart(Label* on_at_start) {
void RegExpMacroAssemblerARM::CheckNotAtStart(Label* on_not_at_start) {
// Did we start the match at the start of the string at all?
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
+ __ ldr(r0, MemOperand(frame_pointer(), kStartIndex));
__ cmp(r0, Operand(0, RelocInfo::NONE));
- BranchOrBacktrack(eq, on_not_at_start);
+ BranchOrBacktrack(ne, on_not_at_start);
// If we did, are we still at the start of the input?
__ ldr(r1, MemOperand(frame_pointer(), kInputStart));
__ add(r0, end_of_input_address(), Operand(current_input_offset()));
@@ -432,16 +438,6 @@ void RegExpMacroAssemblerARM::CheckNotBackReference(
}
-void RegExpMacroAssemblerARM::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ ldr(r0, register_location(reg1));
- __ ldr(r1, register_location(reg2));
- __ cmp(r0, r1);
- BranchOrBacktrack(ne, on_not_equal);
-}
-
-
void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c,
Label* on_not_equal) {
__ cmp(current_character(), Operand(c));
@@ -452,8 +448,12 @@ void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c,
void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ and_(r0, current_character(), Operand(mask));
- __ cmp(r0, Operand(c));
+ if (c == 0) {
+ __ tst(current_character(), Operand(mask));
+ } else {
+ __ and_(r0, current_character(), Operand(mask));
+ __ cmp(r0, Operand(c));
+ }
BranchOrBacktrack(eq, on_equal);
}
@@ -461,8 +461,12 @@ void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerARM::CheckNotCharacterAfterAnd(unsigned c,
unsigned mask,
Label* on_not_equal) {
- __ and_(r0, current_character(), Operand(mask));
- __ cmp(r0, Operand(c));
+ if (c == 0) {
+ __ tst(current_character(), Operand(mask));
+ } else {
+ __ and_(r0, current_character(), Operand(mask));
+ __ cmp(r0, Operand(c));
+ }
BranchOrBacktrack(ne, on_not_equal);
}
@@ -480,6 +484,44 @@ void RegExpMacroAssemblerARM::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerARM::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ sub(r0, current_character(), Operand(from));
+ __ cmp(r0, Operand(to - from));
+ BranchOrBacktrack(ls, on_in_range); // Unsigned lower-or-same condition.
+}
+
+
+void RegExpMacroAssemblerARM::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ sub(r0, current_character(), Operand(from));
+ __ cmp(r0, Operand(to - from));
+ BranchOrBacktrack(hi, on_not_in_range); // Unsigned higher condition.
+}
+
+
+void RegExpMacroAssemblerARM::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ mov(r0, Operand(table));
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ and_(r1, current_character(), Operand(kTableSize - 1));
+ __ add(r1, r1, Operand(ByteArray::kHeaderSize - kHeapObjectTag));
+ } else {
+ __ add(r1,
+ current_character(),
+ Operand(ByteArray::kHeaderSize - kHeapObjectTag));
+ }
+ __ ldrb(r0, MemOperand(r0, r1));
+ __ cmp(r0, Operand(0));
+ BranchOrBacktrack(ne, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -609,6 +651,7 @@ void RegExpMacroAssemblerARM::Fail() {
Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
+ Label return_r0;
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -632,8 +675,9 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Set frame pointer in space for it if this is not a direct call
// from generated code.
__ add(frame_pointer(), sp, Operand(4 * kPointerSize));
+ __ mov(r0, Operand(0, RelocInfo::NONE));
+ __ push(r0); // Make room for success counter and initialize it to 0.
__ push(r0); // Make room for "position - 1" constant (value is irrelevant).
- __ push(r0); // Make room for "at start" constant (value is irrelevant).
// Check if we have space on the stack for registers.
Label stack_limit_hit;
Label stack_ok;
@@ -652,13 +696,13 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ mov(r0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_r0);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(r0);
__ cmp(r0, Operand(0, RelocInfo::NONE));
// If returned value is non-zero, we exit with the returned value as result.
- __ b(ne, &exit_label_);
+ __ b(ne, &return_r0);
__ bind(&stack_ok);
@@ -679,41 +723,45 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// position registers.
__ str(r0, MemOperand(frame_pointer(), kInputStartMinusOne));
- // Determine whether the start index is zero, that is at the start of the
- // string, and store that value in a local variable.
- __ cmp(r1, Operand(0));
- __ mov(r1, Operand(1), LeaveCC, eq);
- __ mov(r1, Operand(0, RelocInfo::NONE), LeaveCC, ne);
- __ str(r1, MemOperand(frame_pointer(), kAtStart));
+ // Initialize code pointer register
+ __ mov(code_pointer(), Operand(masm_->CodeObject()));
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmp(r1, Operand(0, RelocInfo::NONE));
+ __ b(ne, &load_char_start_regexp);
+ __ mov(current_character(), Operand('\n'), LeaveCC, eq);
+ __ jmp(&start_regexp);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+
+ // Initialize on-stack registers.
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1
-
- // Address of register 0.
- __ add(r1, frame_pointer(), Operand(kRegisterZero));
- __ mov(r2, Operand(num_saved_registers_));
- Label init_loop;
- __ bind(&init_loop);
- __ str(r0, MemOperand(r1, kPointerSize, NegPostIndex));
- __ sub(r2, r2, Operand(1), SetCC);
- __ b(ne, &init_loop);
+ if (num_saved_registers_ > 8) {
+ // Address of register 0.
+ __ add(r1, frame_pointer(), Operand(kRegisterZero));
+ __ mov(r2, Operand(num_saved_registers_));
+ Label init_loop;
+ __ bind(&init_loop);
+ __ str(r0, MemOperand(r1, kPointerSize, NegPostIndex));
+ __ sub(r2, r2, Operand(1), SetCC);
+ __ b(ne, &init_loop);
+ } else {
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ str(r0, register_location(i));
+ }
+ }
}
// Initialize backtrack stack pointer.
__ ldr(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackHighEnd));
- // Initialize code pointer register
- __ mov(code_pointer(), Operand(masm_->CodeObject()));
- // Load previous char as initial value of current character register.
- Label at_start;
- __ ldr(r0, MemOperand(frame_pointer(), kAtStart));
- __ cmp(r0, Operand(0, RelocInfo::NONE));
- __ b(ne, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ mov(current_character(), Operand('\n'));
- __ jmp(&start_label_);
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -740,6 +788,10 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
for (int i = 0; i < num_saved_registers_; i += 2) {
__ ldr(r2, register_location(i));
__ ldr(r3, register_location(i + 1));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in r4 for the zero-length check later.
+ __ mov(r4, r2);
+ }
if (mode_ == UC16) {
__ add(r2, r1, Operand(r2, ASR, 1));
__ add(r3, r1, Operand(r3, ASR, 1));
@@ -751,10 +803,58 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ str(r3, MemOperand(r0, kPointerSize, PostIndex));
}
}
- __ mov(r0, Operand(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ __ ldr(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ __ ldr(r1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ __ ldr(r2, MemOperand(frame_pointer(), kRegisterOutput));
+ // Increment success counter.
+ __ add(r0, r0, Operand(1));
+ __ str(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ sub(r1, r1, Operand(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmp(r1, Operand(num_saved_registers_));
+ __ b(lt, &return_r0);
+
+ __ str(r1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ // Advance the location for output.
+ __ add(r2, r2, Operand(num_saved_registers_ * kPointerSize));
+ __ str(r2, MemOperand(frame_pointer(), kRegisterOutput));
+
+ // Prepare r0 to initialize registers with its value in the next run.
+ __ ldr(r0, MemOperand(frame_pointer(), kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // r4: capture start index
+ __ cmp(current_input_offset(), r4);
+ // Not a zero-length match, restart.
+ __ b(ne, &load_char_start_regexp);
+ // Offset from the end is zero if we already reached the end.
+ __ cmp(current_input_offset(), Operand(0));
+ __ b(eq, &exit_label_);
+ // Advance current position after a zero-length match.
+ __ add(current_input_offset(),
+ current_input_offset(),
+ Operand((mode_ == UC16) ? 2 : 1));
+ }
+
+ __ b(&load_char_start_regexp);
+ } else {
+ __ mov(r0, Operand(SUCCESS));
+ }
}
+
// Exit and return r0
__ bind(&exit_label_);
+ if (global()) {
+ __ ldr(r0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ }
+
+ __ bind(&return_r0);
// Skip sp past regexp registers and local variables..
__ mov(sp, frame_pointer());
// Restore registers r4..r11 and return (restoring lr to pc).
@@ -776,7 +876,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ cmp(r0, Operand(0, RelocInfo::NONE));
// If returning non-zero, we should end execution with the given
// result as return value.
- __ b(ne, &exit_label_);
+ __ b(ne, &return_r0);
// String might have moved: Reload end of string from frame.
__ ldr(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
@@ -813,7 +913,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ mov(r0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_r0);
}
CodeDesc code_desc;
@@ -968,8 +1068,9 @@ void RegExpMacroAssemblerARM::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerARM::Succeed() {
+bool RegExpMacroAssemblerARM::Succeed() {
__ jmp(&success_label_);
+ return global();
}
@@ -1261,8 +1362,9 @@ void RegExpMacroAssemblerARM::LoadCurrentCharacterUnchecked(int cp_offset,
int characters) {
Register offset = current_input_offset();
if (cp_offset != 0) {
- __ add(r0, current_input_offset(), Operand(cp_offset * char_size()));
- offset = r0;
+ // r4 is not being used to store the capture start index at this point.
+ __ add(r4, current_input_offset(), Operand(cp_offset * char_size()));
+ offset = r4;
}
// The ldr, str, ldrh, strh instructions can do unaligned accesses, if the CPU
// and the operating system running on the target allow it.
diff --git a/deps/v8/src/arm/regexp-macro-assembler-arm.h b/deps/v8/src/arm/regexp-macro-assembler-arm.h
index 5c8ed0693f..9bebb4d406 100644
--- a/deps/v8/src/arm/regexp-macro-assembler-arm.h
+++ b/deps/v8/src/arm/regexp-macro-assembler-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,7 +45,7 @@ class RegExpMacroAssemblerARM: public RegExpMacroAssembler {
#else // V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerARM(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerARM(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerARM();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -70,7 +70,6 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(unsigned c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned mask,
@@ -79,6 +78,14 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
@@ -105,7 +112,7 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -129,7 +136,8 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kSecondaryReturnAddress = kReturnAddress + kPointerSize;
// Stack parameters placed by caller.
static const int kRegisterOutput = kSecondaryReturnAddress + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -141,10 +149,10 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kInputString = kStartIndex - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne = kInputString - kPointerSize;
- static const int kAtStart = kInputStartMinusOne - kPointerSize;
+ static const int kSuccessfulCaptures = kInputString - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
- static const int kRegisterZero = kAtStart - kPointerSize;
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
// Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024;
diff --git a/deps/v8/src/arm/simulator-arm.h b/deps/v8/src/arm/simulator-arm.h
index 585f1e0176..d1cad15bd0 100644
--- a/deps/v8/src/arm/simulator-arm.h
+++ b/deps/v8/src/arm/simulator-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,16 +49,16 @@ namespace internal {
(entry(p0, p1, p2, p3, p4))
typedef int (*arm_regexp_matcher)(String*, int, const byte*, const byte*,
- void*, int*, Address, int, Isolate*);
+ void*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address
// should act as a function matching the type arm_regexp_matcher.
// The fifth argument is a dummy that reserves the space used for
// the return address added by the ExitFrame in native calls.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
(FUNCTION_CAST<arm_regexp_matcher>(entry)( \
- p0, p1, p2, p3, NULL, p4, p5, p6, p7))
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address)
@@ -401,9 +401,9 @@ class Simulator {
reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
Simulator::current(Isolate::Current())->Call( \
- entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
+ entry, 10, p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == NULL ? \
diff --git a/deps/v8/src/arm/stub-cache-arm.cc b/deps/v8/src/arm/stub-cache-arm.cc
index d514b607ae..dd9de23fa4 100644
--- a/deps/v8/src/arm/stub-cache-arm.cc
+++ b/deps/v8/src/arm/stub-cache-arm.cc
@@ -435,22 +435,59 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
// r0 : value
Label exit;
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
- __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
DO_SMI_CHECK, mode);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -473,10 +510,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ mov(ip, Operand(transition));
- __ str(ip, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ // Update the map of the object.
+ __ mov(scratch1, Operand(transition));
+ __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -498,15 +545,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kLRHasNotBeenSaved,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array
- __ ldr(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ str(r0, FieldMemOperand(scratch, offset));
+ __ ldr(scratch1,
+ FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ str(r0, FieldMemOperand(scratch1, offset));
// Skip updating write barrier if storing a smi.
__ JumpIfSmi(r0, &exit);
@@ -514,7 +562,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Update the write barrier for the array address.
// Ok to clobber receiver_reg and name_reg, since we return.
__ mov(name_reg, r0);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -582,6 +630,8 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ push(holder);
__ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
__ push(scratch);
+ __ mov(scratch, Operand(ExternalReference::isolate_address()));
+ __ push(scratch);
}
@@ -596,7 +646,7 @@ static void CompileCallLoadPropertyWithInterceptor(
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
masm->isolate());
- __ mov(r0, Operand(5));
+ __ mov(r0, Operand(6));
__ mov(r1, Operand(ref));
CEntryStub stub(1);
@@ -604,9 +654,9 @@ static void CompileCallLoadPropertyWithInterceptor(
}
-static const int kFastApiCallArguments = 3;
+static const int kFastApiCallArguments = 4;
-// Reserves space for the extra arguments to FastHandleApiCall in the
+// Reserves space for the extra arguments to API function in the
// caller's frame.
//
// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
@@ -632,7 +682,8 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
// -- sp[0] : holder (set by CheckPrototypes)
// -- sp[4] : callee JS function
// -- sp[8] : call data
- // -- sp[12] : last JS argument
+ // -- sp[12] : isolate
+ // -- sp[16] : last JS argument
// -- ...
// -- sp[(argc + 3) * 4] : first JS argument
// -- sp[(argc + 4) * 4] : receiver
@@ -642,7 +693,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
__ LoadHeapObject(r5, function);
__ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
- // Pass the additional arguments FastHandleApiCall expects.
+ // Pass the additional arguments.
Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
Handle<Object> call_data(api_call_info->data());
if (masm->isolate()->heap()->InNewSpace(*call_data)) {
@@ -651,13 +702,15 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
} else {
__ Move(r6, call_data);
}
- // Store JS function and call data.
- __ stm(ib, sp, r5.bit() | r6.bit());
+ __ mov(r7, Operand(ExternalReference::isolate_address()));
+ // Store JS function, call data and isolate.
+ __ stm(ib, sp, r5.bit() | r6.bit() | r7.bit());
- // r2 points to call data as expected by Arguments
- // (refer to layout above).
- __ add(r2, sp, Operand(2 * kPointerSize));
+ // Prepare arguments.
+ __ add(r2, sp, Operand(3 * kPointerSize));
+ // Allocate the v8::Arguments structure in the arguments' space since
+ // it's not controlled by GC.
const int kApiStackSpace = 4;
FrameScope frame_scope(masm, StackFrame::MANUAL);
@@ -666,9 +719,9 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
// r0 = v8::Arguments&
// Arguments is after the return address.
__ add(r0, sp, Operand(1 * kPointerSize));
- // v8::Arguments::implicit_args = data
+ // v8::Arguments::implicit_args_
__ str(r2, MemOperand(r0, 0 * kPointerSize));
- // v8::Arguments::values = last argument
+ // v8::Arguments::values_
__ add(ip, r2, Operand(argc * kPointerSize));
__ str(ip, MemOperand(r0, 1 * kPointerSize));
// v8::Arguments::length_ = argc
@@ -845,7 +898,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ CallExternalReference(
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
masm->isolate()),
- 5);
+ 6);
// Restore the name_ register.
__ pop(name_);
// Leave the internal frame.
@@ -1204,7 +1257,9 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
} else {
__ Move(scratch3, Handle<Object>(callback->data()));
}
- __ Push(reg, scratch3, name_reg);
+ __ Push(reg, scratch3);
+ __ mov(scratch3, Operand(ExternalReference::isolate_address()));
+ __ Push(scratch3, name_reg);
__ mov(r0, sp); // r0 = Handle<String>
const int kApiStackSpace = 1;
@@ -1216,7 +1271,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ str(scratch2, MemOperand(sp, 1 * kPointerSize));
__ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
- const int kStackUnwindSpace = 4;
+ const int kStackUnwindSpace = 5;
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
ExternalReference ref =
@@ -1252,8 +1307,9 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1344,20 +1400,19 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
if (!receiver.is(holder_reg)) {
ASSERT(scratch1.is(holder_reg));
__ Push(receiver, holder_reg);
- __ ldr(scratch3,
- FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
- __ Push(scratch3, scratch2, name_reg);
} else {
__ push(receiver);
- __ ldr(scratch3,
- FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
- __ Push(holder_reg, scratch3, scratch2, name_reg);
+ __ push(holder_reg);
}
+ __ ldr(scratch3,
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
+ __ mov(scratch1, Operand(ExternalReference::isolate_address()));
+ __ Push(scratch3, scratch1, scratch2, name_reg);
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
masm()->isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
@@ -1371,7 +1426,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
masm()->isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
}
@@ -1575,16 +1630,29 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
+ __ CheckFastSmiElements(r3, r7, &call_builtin);
// edx: receiver
// r3: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r3,
r7,
+ &try_holey_map);
+ __ mov(r2, receiver);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ r3,
+ r7,
&call_builtin);
__ mov(r2, receiver);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(r3, r3, &call_builtin);
@@ -1739,7 +1807,7 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ cmp(r0, r6);
__ b(eq, &call_builtin);
@@ -1747,7 +1815,7 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
__ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
- __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Drop(argc + 1);
__ Ret();
@@ -2539,7 +2607,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// -----------------------------------
Label miss;
- GenerateStoreField(masm(), object, index, transition, r1, r2, r3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ r1, r2, r3, r4,
+ &miss);
__ bind(&miss);
Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
__ Jump(ic, RelocInfo::CODE_TARGET);
@@ -2594,6 +2668,51 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
}
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ Handle<String> name) {
+ // ----------- S t a t e -------------
+ // -- r0 : value
+ // -- r1 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(r0);
+
+ // Call the JavaScript getter with the receiver and the value on the stack.
+ __ Push(r1, r0);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(r0);
+
+ // Restore context register.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+
+ __ bind(&miss);
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> receiver,
Handle<String> name) {
@@ -2761,6 +2880,44 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
}
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- r0 : receiver
+ // -- r2 : name
+ // -- lr : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(r0, &miss);
+ CheckPrototypes(receiver, r0, holder, r3, r4, r1, name, &miss);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(r0);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // Restore context register.
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
@@ -3085,7 +3242,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// r3 is used as scratch register. r1 and r2 keep their values if a jump to
// the miss label is generated.
- GenerateStoreField(masm(), object, index, transition, r2, r1, r3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ r2, r1, r3, r4,
+ &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
@@ -3366,8 +3529,11 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3377,6 +3543,44 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
}
+static void GenerateSmiKeyCheck(MacroAssembler* masm,
+ Register key,
+ Register scratch0,
+ Register scratch1,
+ DwVfpRegister double_scratch0,
+ Label* fail) {
+ if (CpuFeatures::IsSupported(VFP3)) {
+ CpuFeatures::Scope scope(VFP3);
+ Label key_ok;
+ // Check for smi or a smi inside a heap number. We convert the heap
+ // number and check if the conversion is exact and fits into the smi
+ // range.
+ __ JumpIfSmi(key, &key_ok);
+ __ CheckMap(key,
+ scratch0,
+ Heap::kHeapNumberMapRootIndex,
+ fail,
+ DONT_DO_SMI_CHECK);
+ __ sub(ip, key, Operand(kHeapObjectTag));
+ __ vldr(double_scratch0, ip, HeapNumber::kValueOffset);
+ __ EmitVFPTruncate(kRoundToZero,
+ double_scratch0.low(),
+ double_scratch0,
+ scratch0,
+ scratch1,
+ kCheckForInexactConversion);
+ __ b(ne, fail);
+ __ vmov(scratch0, double_scratch0.low());
+ __ TrySmiTag(scratch0, fail, scratch1);
+ __ mov(key, scratch0);
+ __ bind(&key_ok);
+ } else {
+ // Check that the key is a smi.
+ __ JumpIfNotSmi(key, fail);
+ }
+}
+
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
@@ -3393,8 +3597,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
// r3: elements array
@@ -3453,8 +3657,11 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3724,8 +3931,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key, r4, r5, d1, &miss_force_generic);
__ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
@@ -3794,8 +4001,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3858,8 +4068,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3998,8 +4211,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4050,8 +4266,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(r0, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, r0, r4, r5, d1, &miss_force_generic);
// Get the elements array.
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
@@ -4102,8 +4318,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
// Get the elements array.
__ ldr(elements_reg,
@@ -4178,10 +4394,10 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
}
@@ -4209,7 +4425,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
DONT_DO_SMI_CHECK);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4219,7 +4435,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
Operand(key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
__ str(value_reg, MemOperand(scratch));
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4345,7 +4561,9 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, r4, r5, d1, &miss_force_generic);
__ ldr(elements_reg,
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
diff --git a/deps/v8/src/array.js b/deps/v8/src/array.js
index 00a4fee5cd..a1cc5b6a7d 100644
--- a/deps/v8/src/array.js
+++ b/deps/v8/src/array.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -465,15 +465,19 @@ function ArrayPush() {
}
+// Returns an array containing the array elements of the object followed
+// by the array elements of each argument in order. See ECMA-262,
+// section 15.4.4.7.
function ArrayConcat(arg1) { // length == 1
if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
throw MakeTypeError("called_on_null_or_undefined",
["Array.prototype.concat"]);
}
+ var array = ToObject(this);
var arg_count = %_ArgumentsLength();
var arrays = new InternalArray(1 + arg_count);
- arrays[0] = this;
+ arrays[0] = array;
for (var i = 0; i < arg_count; i++) {
arrays[i + 1] = %_Arguments(i);
}
@@ -1027,13 +1031,28 @@ function ArrayFilter(f, receiver) {
var result = new $Array();
var accumulator = new InternalArray();
var accumulator_length = 0;
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- if (%_CallFunction(receiver, element, i, array, f)) {
- accumulator[accumulator_length++] = element;
+ if (%DebugCallbackSupportsStepping(f)) {
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(f);
+ if (%_CallFunction(receiver, element, i, array, f)) {
+ accumulator[accumulator_length++] = element;
+ }
+ }
+ }
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(receiver, element, i, array, f)) {
+ accumulator[accumulator_length++] = element;
+ }
}
}
+ // End of duplicate.
}
%MoveArrayContents(accumulator, result);
return result;
@@ -1059,12 +1078,24 @@ function ArrayForEach(f, receiver) {
} else if (!IS_SPEC_OBJECT(receiver)) {
receiver = ToObject(receiver);
}
-
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- %_CallFunction(receiver, element, i, array, f);
+ if (%DebugCallbackSupportsStepping(f)) {
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(f);
+ %_CallFunction(receiver, element, i, array, f);
+ }
}
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ %_CallFunction(receiver, element, i, array, f);
+ }
+ }
+ // End of duplicate.
}
}
@@ -1091,11 +1122,24 @@ function ArraySome(f, receiver) {
receiver = ToObject(receiver);
}
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- if (%_CallFunction(receiver, element, i, array, f)) return true;
+ if (%DebugCallbackSupportsStepping(f)) {
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(f);
+ if (%_CallFunction(receiver, element, i, array, f)) return true;
+ }
+ }
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(receiver, element, i, array, f)) return true;
+ }
}
+ // End of duplicate.
}
return false;
}
@@ -1121,11 +1165,24 @@ function ArrayEvery(f, receiver) {
receiver = ToObject(receiver);
}
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- if (!%_CallFunction(receiver, element, i, array, f)) return false;
+ if (%DebugCallbackSupportsStepping(f)) {
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(f);
+ if (!%_CallFunction(receiver, element, i, array, f)) return false;
+ }
+ }
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ if (!%_CallFunction(receiver, element, i, array, f)) return false;
+ }
}
+ // End of duplicate.
}
return true;
}
@@ -1152,11 +1209,24 @@ function ArrayMap(f, receiver) {
var result = new $Array();
var accumulator = new InternalArray(length);
- for (var i = 0; i < length; i++) {
- if (i in array) {
- var element = array[i];
- accumulator[i] = %_CallFunction(receiver, element, i, array, f);
+ if (%DebugCallbackSupportsStepping(f)) {
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(f);
+ accumulator[i] = %_CallFunction(receiver, element, i, array, f);
+ }
}
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (var i = 0; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ accumulator[i] = %_CallFunction(receiver, element, i, array, f);
+ }
+ }
+ // End of duplicate.
}
%MoveArrayContents(accumulator, result);
return result;
@@ -1311,11 +1381,27 @@ function ArrayReduce(callback, current) {
}
var receiver = %GetDefaultReceiver(callback);
- for (; i < length; i++) {
- if (i in array) {
- var element = array[i];
- current = %_CallFunction(receiver, current, element, i, array, callback);
+
+ if (%DebugCallbackSupportsStepping(callback)) {
+ for (; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(callback);
+ current =
+ %_CallFunction(receiver, current, element, i, array, callback);
+ }
+ }
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (; i < length; i++) {
+ if (i in array) {
+ var element = array[i];
+ current =
+ %_CallFunction(receiver, current, element, i, array, callback);
+ }
}
+ // End of duplicate.
}
return current;
}
@@ -1348,11 +1434,27 @@ function ArrayReduceRight(callback, current) {
}
var receiver = %GetDefaultReceiver(callback);
- for (; i >= 0; i--) {
- if (i in array) {
- var element = array[i];
- current = %_CallFunction(receiver, current, element, i, array, callback);
+
+ if (%DebugCallbackSupportsStepping(callback)) {
+ for (; i >= 0; i--) {
+ if (i in array) {
+ var element = array[i];
+ // Prepare break slots for debugger step in.
+ %DebugPrepareStepInIfStepping(callback);
+ current =
+ %_CallFunction(receiver, current, element, i, array, callback);
+ }
+ }
+ } else {
+ // This is a duplicate of the previous loop sans debug stepping.
+ for (; i >= 0; i--) {
+ if (i in array) {
+ var element = array[i];
+ current =
+ %_CallFunction(receiver, current, element, i, array, callback);
+ }
}
+ // End of duplicate.
}
return current;
}
diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc
index 4944202f07..d4c49ddd45 100644
--- a/deps/v8/src/assembler.cc
+++ b/deps/v8/src/assembler.cc
@@ -99,21 +99,7 @@ struct DoubleConstant BASE_EMBEDDED {
double the_hole_nan;
};
-struct InitializeDoubleConstants {
- static void Construct(DoubleConstant* double_constants) {
- double_constants->min_int = kMinInt;
- double_constants->one_half = 0.5;
- double_constants->minus_zero = -0.0;
- double_constants->uint8_max_value = 255;
- double_constants->zero = 0.0;
- double_constants->canonical_non_hole_nan = OS::nan_value();
- double_constants->the_hole_nan = BitCast<double>(kHoleNanInt64);
- double_constants->negative_infinity = -V8_INFINITY;
- }
-};
-
-static LazyInstance<DoubleConstant, InitializeDoubleConstants>::type
- double_constants = LAZY_INSTANCE_INITIALIZER;
+static DoubleConstant double_constants;
const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
@@ -726,6 +712,18 @@ void RelocInfo::Verify() {
// -----------------------------------------------------------------------------
// Implementation of ExternalReference
+void ExternalReference::SetUp() {
+ double_constants.min_int = kMinInt;
+ double_constants.one_half = 0.5;
+ double_constants.minus_zero = -0.0;
+ double_constants.uint8_max_value = 255;
+ double_constants.zero = 0.0;
+ double_constants.canonical_non_hole_nan = OS::nan_value();
+ double_constants.the_hole_nan = BitCast<double>(kHoleNanInt64);
+ double_constants.negative_infinity = -V8_INFINITY;
+}
+
+
ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
: address_(Redirect(isolate, Builtins::c_function_address(id))) {}
@@ -957,51 +955,66 @@ ExternalReference ExternalReference::scheduled_exception_address(
}
+ExternalReference ExternalReference::address_of_pending_message_obj(
+ Isolate* isolate) {
+ return ExternalReference(isolate->pending_message_obj_address());
+}
+
+
+ExternalReference ExternalReference::address_of_has_pending_message(
+ Isolate* isolate) {
+ return ExternalReference(isolate->has_pending_message_address());
+}
+
+
+ExternalReference ExternalReference::address_of_pending_message_script(
+ Isolate* isolate) {
+ return ExternalReference(isolate->pending_message_script_address());
+}
+
+
ExternalReference ExternalReference::address_of_min_int() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->min_int));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
}
ExternalReference ExternalReference::address_of_one_half() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->one_half));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
}
ExternalReference ExternalReference::address_of_minus_zero() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->minus_zero));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.minus_zero));
}
ExternalReference ExternalReference::address_of_zero() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->zero));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.zero));
}
ExternalReference ExternalReference::address_of_uint8_max_value() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->uint8_max_value));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.uint8_max_value));
}
ExternalReference ExternalReference::address_of_negative_infinity() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->negative_infinity));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.negative_infinity));
}
ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->canonical_non_hole_nan));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
}
ExternalReference ExternalReference::address_of_the_hole_nan() {
- return ExternalReference(reinterpret_cast<void*>(
- &double_constants.Pointer()->the_hole_nan));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.the_hole_nan));
}
@@ -1138,6 +1151,12 @@ ExternalReference ExternalReference::math_log_double_function(
}
+ExternalReference ExternalReference::page_flags(Page* page) {
+ return ExternalReference(reinterpret_cast<Address>(page) +
+ MemoryChunk::kFlagsOffset);
+}
+
+
// Helper function to compute x^y, where y is known to be an
// integer. Uses binary decomposition to limit the number of
// multiplications; see the discussion in "Hacker's Delight" by Henry
@@ -1158,6 +1177,20 @@ double power_double_int(double x, int y) {
double power_double_double(double x, double y) {
+#ifdef __MINGW64_VERSION_MAJOR
+ // MinGW64 has a custom implementation for pow. This handles certain
+ // special cases that are different.
+ if ((x == 0.0 || isinf(x)) && isfinite(y)) {
+ double f;
+ if (modf(y, &f) != 0.0) return ((x == 0.0) ^ (y > 0)) ? V8_INFINITY : 0;
+ }
+
+ if (x == 2.0) {
+ int y_int = static_cast<int>(y);
+ if (y == y_int) return ldexp(1.0, y_int);
+ }
+#endif
+
// The checks for special cases can be dropped in ia32 because it has already
// been done in generated code before bailing out here.
if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) return OS::nan_value();
diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h
index f960b58691..619c69c4b2 100644
--- a/deps/v8/src/assembler.h
+++ b/deps/v8/src/assembler.h
@@ -539,6 +539,8 @@ class ExternalReference BASE_EMBEDDED {
DIRECT_GETTER_CALL
};
+ static void SetUp();
+
typedef void* ExternalReferenceRedirector(void* original, Type type);
ExternalReference(Builtins::CFunctionId id, Isolate* isolate);
@@ -638,6 +640,9 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference handle_scope_level_address();
static ExternalReference scheduled_exception_address(Isolate* isolate);
+ static ExternalReference address_of_pending_message_obj(Isolate* isolate);
+ static ExternalReference address_of_has_pending_message(Isolate* isolate);
+ static ExternalReference address_of_pending_message_script(Isolate* isolate);
// Static variables containing common double constants.
static ExternalReference address_of_min_int();
@@ -654,6 +659,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference math_tan_double_function(Isolate* isolate);
static ExternalReference math_log_double_function(Isolate* isolate);
+ static ExternalReference page_flags(Page* page);
+
Address address() const {return reinterpret_cast<Address>(address_);}
#ifdef ENABLE_DEBUGGER_SUPPORT
diff --git a/deps/v8/src/ast.cc b/deps/v8/src/ast.cc
index 4b6ae680a4..0970253c29 100644
--- a/deps/v8/src/ast.cc
+++ b/deps/v8/src/ast.cc
@@ -242,8 +242,11 @@ bool IsEqualNumber(void* first, void* second) {
}
-void ObjectLiteral::CalculateEmitStore() {
- ZoneHashMap table(Literal::Match);
+void ObjectLiteral::CalculateEmitStore(Zone* zone) {
+ ZoneAllocationPolicy allocator(zone);
+
+ ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
+ allocator);
for (int i = properties()->length() - 1; i >= 0; i--) {
ObjectLiteral::Property* property = properties()->at(i);
Literal* literal = property->key();
@@ -252,23 +255,23 @@ void ObjectLiteral::CalculateEmitStore() {
// If the key of a computed property is in the table, do not emit
// a store for the property later.
if (property->kind() == ObjectLiteral::Property::COMPUTED &&
- table.Lookup(literal, hash, false) != NULL) {
+ table.Lookup(literal, hash, false, allocator) != NULL) {
property->set_emit_store(false);
} else {
// Add key to the table.
- table.Lookup(literal, hash, true);
+ table.Lookup(literal, hash, true, allocator);
}
}
}
-void TargetCollector::AddTarget(Label* target) {
+void TargetCollector::AddTarget(Label* target, Zone* zone) {
// Add the label to the collector, but discard duplicates.
int length = targets_.length();
for (int i = 0; i < length; i++) {
if (targets_[i] == target) return;
}
- targets_.Add(target);
+ targets_.Add(target, zone);
}
@@ -397,7 +400,8 @@ bool FunctionDeclaration::IsInlineable() const {
// ----------------------------------------------------------------------------
// Recording of type feedback
-void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
// Record type feedback from the oracle in the AST.
is_uninitialized_ = oracle->LoadIsUninitialized(this);
if (is_uninitialized_) return;
@@ -421,15 +425,17 @@ void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
} else if (oracle->LoadIsBuiltin(this, Builtins::kKeyedLoadIC_String)) {
is_string_access_ = true;
} else if (is_monomorphic_) {
- receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this));
+ receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this),
+ zone);
} else if (oracle->LoadIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
-void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
Property* prop = target()->AsProperty();
ASSERT(prop != NULL);
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
@@ -441,22 +447,23 @@ void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
oracle->StoreReceiverTypes(this, name, &receiver_types_);
} else if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
+ receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this), zone);
} else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
-void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle,
+ Zone* zone) {
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
receiver_types_.Clear();
if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
+ receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this), zone);
} else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism, zone);
oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
@@ -507,7 +514,6 @@ bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
// We don't know the target.
return false;
case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
// Perhaps something interesting is up in the prototype chain...
@@ -784,7 +790,7 @@ bool RegExpCapture::IsAnchoredAtEnd() {
// output formats are alike.
class RegExpUnparser: public RegExpVisitor {
public:
- RegExpUnparser();
+ explicit RegExpUnparser(Zone* zone);
void VisitCharacterRange(CharacterRange that);
SmartArrayPointer<const char> ToString() { return stream_.ToCString(); }
#define MAKE_CASE(Name) virtual void* Visit##Name(RegExp##Name*, void* data);
@@ -794,10 +800,11 @@ class RegExpUnparser: public RegExpVisitor {
StringStream* stream() { return &stream_; }
HeapStringAllocator alloc_;
StringStream stream_;
+ Zone* zone_;
};
-RegExpUnparser::RegExpUnparser() : stream_(&alloc_) {
+RegExpUnparser::RegExpUnparser(Zone* zone) : stream_(&alloc_), zone_(zone) {
}
@@ -837,9 +844,9 @@ void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that,
if (that->is_negated())
stream()->Add("^");
stream()->Add("[");
- for (int i = 0; i < that->ranges()->length(); i++) {
+ for (int i = 0; i < that->ranges(zone_)->length(); i++) {
if (i > 0) stream()->Add(" ");
- VisitCharacterRange(that->ranges()->at(i));
+ VisitCharacterRange(that->ranges(zone_)->at(i));
}
stream()->Add("]");
return NULL;
@@ -941,8 +948,8 @@ void* RegExpUnparser::VisitEmpty(RegExpEmpty* that, void* data) {
}
-SmartArrayPointer<const char> RegExpTree::ToString() {
- RegExpUnparser unparser;
+SmartArrayPointer<const char> RegExpTree::ToString(Zone* zone) {
+ RegExpUnparser unparser(zone);
Accept(&unparser, NULL);
return unparser.ToString();
}
@@ -962,6 +969,14 @@ RegExpDisjunction::RegExpDisjunction(ZoneList<RegExpTree*>* alternatives)
}
+static int IncreaseBy(int previous, int increase) {
+ if (RegExpTree::kInfinity - previous < increase) {
+ return RegExpTree::kInfinity;
+ } else {
+ return previous + increase;
+ }
+}
+
RegExpAlternative::RegExpAlternative(ZoneList<RegExpTree*>* nodes)
: nodes_(nodes) {
ASSERT(nodes->length() > 1);
@@ -969,13 +984,10 @@ RegExpAlternative::RegExpAlternative(ZoneList<RegExpTree*>* nodes)
max_match_ = 0;
for (int i = 0; i < nodes->length(); i++) {
RegExpTree* node = nodes->at(i);
- min_match_ += node->min_match();
+ int node_min_match = node->min_match();
+ min_match_ = IncreaseBy(min_match_, node_min_match);
int node_max_match = node->max_match();
- if (kInfinity - max_match_ < node_max_match) {
- max_match_ = kInfinity;
- } else {
- max_match_ += node->max_match();
- }
+ max_match_ = IncreaseBy(max_match_, node_max_match);
}
}
@@ -993,138 +1005,78 @@ CaseClause::CaseClause(Isolate* isolate,
}
-#define INCREASE_NODE_COUNT(NodeType) \
+#define REGULAR_NODE(NodeType) \
void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
increase_node_count(); \
}
+#define DONT_OPTIMIZE_NODE(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ add_flag(kDontOptimize); \
+ add_flag(kDontInline); \
+ add_flag(kDontSelfOptimize); \
+ }
+#define DONT_INLINE_NODE(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ add_flag(kDontInline); \
+ }
+#define DONT_SELFOPTIMIZE_NODE(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ add_flag(kDontSelfOptimize); \
+ }
-INCREASE_NODE_COUNT(VariableDeclaration)
-INCREASE_NODE_COUNT(FunctionDeclaration)
-INCREASE_NODE_COUNT(ModuleDeclaration)
-INCREASE_NODE_COUNT(ImportDeclaration)
-INCREASE_NODE_COUNT(ExportDeclaration)
-INCREASE_NODE_COUNT(ModuleLiteral)
-INCREASE_NODE_COUNT(ModuleVariable)
-INCREASE_NODE_COUNT(ModulePath)
-INCREASE_NODE_COUNT(ModuleUrl)
-INCREASE_NODE_COUNT(Block)
-INCREASE_NODE_COUNT(ExpressionStatement)
-INCREASE_NODE_COUNT(EmptyStatement)
-INCREASE_NODE_COUNT(IfStatement)
-INCREASE_NODE_COUNT(ContinueStatement)
-INCREASE_NODE_COUNT(BreakStatement)
-INCREASE_NODE_COUNT(ReturnStatement)
-INCREASE_NODE_COUNT(Conditional)
-INCREASE_NODE_COUNT(Literal)
-INCREASE_NODE_COUNT(ObjectLiteral)
-INCREASE_NODE_COUNT(Assignment)
-INCREASE_NODE_COUNT(Throw)
-INCREASE_NODE_COUNT(Property)
-INCREASE_NODE_COUNT(UnaryOperation)
-INCREASE_NODE_COUNT(CountOperation)
-INCREASE_NODE_COUNT(BinaryOperation)
-INCREASE_NODE_COUNT(CompareOperation)
-INCREASE_NODE_COUNT(ThisFunction)
-INCREASE_NODE_COUNT(Call)
-INCREASE_NODE_COUNT(CallNew)
-
-#undef INCREASE_NODE_COUNT
-
-
-void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
- increase_node_count();
- add_flag(kDontOptimize);
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
- increase_node_count();
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
- increase_node_count();
- add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
- increase_node_count();
- add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
- increase_node_count();
- add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
- increase_node_count();
- add_flag(kDontSelfOptimize);
-}
-
-
-void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
- increase_node_count();
- add_flag(kDontOptimize);
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitTryFinallyStatement(
- TryFinallyStatement* node) {
- increase_node_count();
- add_flag(kDontOptimize);
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
- increase_node_count();
- add_flag(kDontOptimize);
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
- increase_node_count();
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
- SharedFunctionInfoLiteral* node) {
- increase_node_count();
- add_flag(kDontOptimize);
- add_flag(kDontInline);
-}
-
-
-void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
- increase_node_count();
- // In theory, we'd have to add:
- // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
- // However, node->var() is usually not bound yet at VariableProxy creation
- // time, and LOOKUP variables only result from constructs that cannot
- // be inlined anyway.
-}
-
-
-void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
- increase_node_count();
- add_flag(kDontInline); // TODO(1322): Allow materialized literals.
-}
-
-
-void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
- increase_node_count();
- add_flag(kDontInline); // TODO(1322): Allow materialized literals.
-}
-
+REGULAR_NODE(VariableDeclaration)
+REGULAR_NODE(FunctionDeclaration)
+REGULAR_NODE(Block)
+REGULAR_NODE(ExpressionStatement)
+REGULAR_NODE(EmptyStatement)
+REGULAR_NODE(IfStatement)
+REGULAR_NODE(ContinueStatement)
+REGULAR_NODE(BreakStatement)
+REGULAR_NODE(ReturnStatement)
+REGULAR_NODE(SwitchStatement)
+REGULAR_NODE(Conditional)
+REGULAR_NODE(Literal)
+REGULAR_NODE(ObjectLiteral)
+REGULAR_NODE(Assignment)
+REGULAR_NODE(Throw)
+REGULAR_NODE(Property)
+REGULAR_NODE(UnaryOperation)
+REGULAR_NODE(CountOperation)
+REGULAR_NODE(BinaryOperation)
+REGULAR_NODE(CompareOperation)
+REGULAR_NODE(ThisFunction)
+REGULAR_NODE(Call)
+REGULAR_NODE(CallNew)
+// In theory, for VariableProxy we'd have to add:
+// if (node->var()->IsLookupSlot()) add_flag(kDontInline);
+// But node->var() is usually not bound yet at VariableProxy creation time, and
+// LOOKUP variables only result from constructs that cannot be inlined anyway.
+REGULAR_NODE(VariableProxy)
+
+DONT_OPTIMIZE_NODE(ModuleDeclaration)
+DONT_OPTIMIZE_NODE(ImportDeclaration)
+DONT_OPTIMIZE_NODE(ExportDeclaration)
+DONT_OPTIMIZE_NODE(ModuleLiteral)
+DONT_OPTIMIZE_NODE(ModuleVariable)
+DONT_OPTIMIZE_NODE(ModulePath)
+DONT_OPTIMIZE_NODE(ModuleUrl)
+DONT_OPTIMIZE_NODE(WithStatement)
+DONT_OPTIMIZE_NODE(TryCatchStatement)
+DONT_OPTIMIZE_NODE(TryFinallyStatement)
+DONT_OPTIMIZE_NODE(DebuggerStatement)
+DONT_OPTIMIZE_NODE(SharedFunctionInfoLiteral)
+
+DONT_INLINE_NODE(FunctionLiteral)
+DONT_INLINE_NODE(RegExpLiteral) // TODO(1322): Allow materialized literals.
+DONT_INLINE_NODE(ArrayLiteral) // TODO(1322): Allow materialized literals.
+
+DONT_SELFOPTIMIZE_NODE(DoWhileStatement)
+DONT_SELFOPTIMIZE_NODE(WhileStatement)
+DONT_SELFOPTIMIZE_NODE(ForStatement)
+DONT_SELFOPTIMIZE_NODE(ForInStatement)
void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
increase_node_count();
@@ -1142,6 +1094,11 @@ void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
}
}
+#undef REGULAR_NODE
+#undef DONT_OPTIMIZE_NODE
+#undef DONT_INLINE_NODE
+#undef DONT_SELFOPTIMIZE_NODE
+
Handle<String> Literal::ToString() {
if (handle_->IsString()) return Handle<String>::cast(handle_);
diff --git a/deps/v8/src/ast.h b/deps/v8/src/ast.h
index b827302ebd..02ece7fe61 100644
--- a/deps/v8/src/ast.h
+++ b/deps/v8/src/ast.h
@@ -266,16 +266,17 @@ class Statement: public AstNode {
class SmallMapList {
public:
SmallMapList() {}
- explicit SmallMapList(int capacity) : list_(capacity) {}
+ SmallMapList(int capacity, Zone* zone) : list_(capacity, zone) {}
- void Reserve(int capacity) { list_.Reserve(capacity); }
+ void Reserve(int capacity, Zone* zone) { list_.Reserve(capacity, zone); }
void Clear() { list_.Clear(); }
+ void Sort() { list_.Sort(); }
bool is_empty() const { return list_.is_empty(); }
int length() const { return list_.length(); }
- void Add(Handle<Map> handle) {
- list_.Add(handle.location());
+ void Add(Handle<Map> handle, Zone* zone) {
+ list_.Add(handle.location(), zone);
}
Handle<Map> at(int i) const {
@@ -415,13 +416,15 @@ class Block: public BreakableStatement {
public:
DECLARE_NODE_TYPE(Block)
- void AddStatement(Statement* statement) { statements_.Add(statement); }
+ void AddStatement(Statement* statement, Zone* zone) {
+ statements_.Add(statement, zone);
+ }
ZoneList<Statement*>* statements() { return &statements_; }
bool is_initializer_block() const { return is_initializer_block_; }
- Scope* block_scope() const { return block_scope_; }
- void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; }
+ Scope* scope() const { return scope_; }
+ void set_scope(Scope* scope) { scope_ = scope; }
protected:
template<class> friend class AstNodeFactory;
@@ -429,17 +432,18 @@ class Block: public BreakableStatement {
Block(Isolate* isolate,
ZoneStringList* labels,
int capacity,
- bool is_initializer_block)
+ bool is_initializer_block,
+ Zone* zone)
: BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
- statements_(capacity),
+ statements_(capacity, zone),
is_initializer_block_(is_initializer_block),
- block_scope_(NULL) {
+ scope_(NULL) {
}
private:
ZoneList<Statement*> statements_;
bool is_initializer_block_;
- Scope* block_scope_;
+ Scope* scope_;
};
@@ -594,7 +598,7 @@ class Module: public AstNode {
Interface* interface() const { return interface_; }
protected:
- Module() : interface_(Interface::NewModule()) {}
+ explicit Module(Zone* zone) : interface_(Interface::NewModule(zone)) {}
explicit Module(Interface* interface) : interface_(interface) {}
private:
@@ -607,6 +611,7 @@ class ModuleLiteral: public Module {
DECLARE_NODE_TYPE(ModuleLiteral)
Block* body() const { return body_; }
+ Handle<Context> context() const { return context_; }
protected:
template<class> friend class AstNodeFactory;
@@ -618,6 +623,7 @@ class ModuleLiteral: public Module {
private:
Block* body_;
+ Handle<Context> context_;
};
@@ -647,8 +653,9 @@ class ModulePath: public Module {
protected:
template<class> friend class AstNodeFactory;
- ModulePath(Module* module, Handle<String> name)
- : module_(module),
+ ModulePath(Module* module, Handle<String> name, Zone* zone)
+ : Module(zone),
+ module_(module),
name_(name) {
}
@@ -667,7 +674,8 @@ class ModuleUrl: public Module {
protected:
template<class> friend class AstNodeFactory;
- explicit ModuleUrl(Handle<String> url) : url_(url) {
+ ModuleUrl(Handle<String> url, Zone* zone)
+ : Module(zone), url_(url) {
}
private:
@@ -1095,12 +1103,12 @@ class IfStatement: public Statement {
// stack in the compiler; this should probably be reworked.
class TargetCollector: public AstNode {
public:
- TargetCollector() : targets_(0) { }
+ explicit TargetCollector(Zone* zone) : targets_(0, zone) { }
// Adds a jump target to the collector. The collector stores a pointer not
// a copy of the target to make binding work, so make sure not to pass in
// references to something on the stack.
- void AddTarget(Label* target);
+ void AddTarget(Label* target, Zone* zone);
// Virtual behaviour. TargetCollectors are never part of the AST.
virtual void Accept(AstVisitor* v) { UNREACHABLE(); }
@@ -1358,7 +1366,7 @@ class ObjectLiteral: public MaterializedLiteral {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- void CalculateEmitStore();
+ void CalculateEmitStore(Zone* zone);
enum Flags {
kNoFlags = 0,
@@ -1523,7 +1531,7 @@ class Property: public Expression {
bool IsFunctionPrototype() const { return is_function_prototype_; }
// Type feedback information.
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* zone);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
bool IsArrayLength() { return is_array_length_; }
@@ -1796,7 +1804,7 @@ class CountOperation: public Expression {
virtual void MarkAsStatement() { is_prefix_ = true; }
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* znoe);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
@@ -1949,7 +1957,7 @@ class Assignment: public Expression {
void mark_block_end() { block_end_ = true; }
// Type feedback information.
- void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle, Zone* zone);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
@@ -2208,8 +2216,8 @@ class RegExpTree: public ZoneObject {
// Returns the interval of registers used for captures within this
// expression.
virtual Interval CaptureRegisters() { return Interval::Empty(); }
- virtual void AppendToText(RegExpText* text);
- SmartArrayPointer<const char> ToString();
+ virtual void AppendToText(RegExpText* text, Zone* zone);
+ SmartArrayPointer<const char> ToString(Zone* zone);
#define MAKE_ASTYPE(Name) \
virtual RegExp##Name* As##Name(); \
virtual bool Is##Name();
@@ -2294,7 +2302,7 @@ class CharacterSet BASE_EMBEDDED {
explicit CharacterSet(ZoneList<CharacterRange>* ranges)
: ranges_(ranges),
standard_set_type_(0) {}
- ZoneList<CharacterRange>* ranges();
+ ZoneList<CharacterRange>* ranges(Zone* zone);
uc16 standard_set_type() { return standard_set_type_; }
void set_standard_set_type(uc16 special_set_type) {
standard_set_type_ = special_set_type;
@@ -2325,11 +2333,11 @@ class RegExpCharacterClass: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return 1; }
virtual int max_match() { return 1; }
- virtual void AppendToText(RegExpText* text);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
CharacterSet character_set() { return set_; }
// TODO(lrn): Remove need for complex version if is_standard that
// recognizes a mangled standard set and just do { return set_.is_special(); }
- bool is_standard();
+ bool is_standard(Zone* zone);
// Returns a value representing the standard character set if is_standard()
// returns true.
// Currently used values are:
@@ -2342,7 +2350,7 @@ class RegExpCharacterClass: public RegExpTree {
// . : non-unicode non-newline
// * : All characters
uc16 standard_type() { return set_.standard_set_type(); }
- ZoneList<CharacterRange>* ranges() { return set_.ranges(); }
+ ZoneList<CharacterRange>* ranges(Zone* zone) { return set_.ranges(zone); }
bool is_negated() { return is_negated_; }
private:
@@ -2362,7 +2370,7 @@ class RegExpAtom: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return data_.length(); }
virtual int max_match() { return data_.length(); }
- virtual void AppendToText(RegExpText* text);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
Vector<const uc16> data() { return data_; }
int length() { return data_.length(); }
private:
@@ -2372,7 +2380,7 @@ class RegExpAtom: public RegExpTree {
class RegExpText: public RegExpTree {
public:
- RegExpText() : elements_(2), length_(0) {}
+ explicit RegExpText(Zone* zone) : elements_(2, zone), length_(0) {}
virtual void* Accept(RegExpVisitor* visitor, void* data);
virtual RegExpNode* ToNode(RegExpCompiler* compiler,
RegExpNode* on_success);
@@ -2381,9 +2389,9 @@ class RegExpText: public RegExpTree {
virtual bool IsTextElement() { return true; }
virtual int min_match() { return length_; }
virtual int max_match() { return length_; }
- virtual void AppendToText(RegExpText* text);
- void AddElement(TextElement elm) {
- elements_.Add(elm);
+ virtual void AppendToText(RegExpText* text, Zone* zone);
+ void AddElement(TextElement elm, Zone* zone) {
+ elements_.Add(elm, zone);
length_ += elm.length();
}
ZoneList<TextElement>* elements() { return &elements_; }
@@ -2691,20 +2699,21 @@ class AstNodeFactory BASE_EMBEDDED {
}
ModulePath* NewModulePath(Module* origin, Handle<String> name) {
- ModulePath* module = new(zone_) ModulePath(origin, name);
+ ModulePath* module = new(zone_) ModulePath(origin, name, zone_);
VISIT_AND_RETURN(ModulePath, module)
}
ModuleUrl* NewModuleUrl(Handle<String> url) {
- ModuleUrl* module = new(zone_) ModuleUrl(url);
+ ModuleUrl* module = new(zone_) ModuleUrl(url, zone_);
VISIT_AND_RETURN(ModuleUrl, module)
}
Block* NewBlock(ZoneStringList* labels,
int capacity,
- bool is_initializer_block) {
+ bool is_initializer_block,
+ Zone* zone) {
Block* block = new(zone_) Block(
- isolate_, labels, capacity, is_initializer_block);
+ isolate_, labels, capacity, is_initializer_block, zone);
VISIT_AND_RETURN(Block, block)
}
diff --git a/deps/v8/src/bootstrapper.cc b/deps/v8/src/bootstrapper.cc
index 0e95b4b839..33cbb8149f 100644
--- a/deps/v8/src/bootstrapper.cc
+++ b/deps/v8/src/bootstrapper.cc
@@ -484,8 +484,8 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
global_context()->set_initial_object_prototype(*prototype);
SetPrototype(object_fun, prototype);
- object_function_map->
- set_instance_descriptors(heap->empty_descriptor_array());
+ object_function_map->set_instance_descriptors(
+ heap->empty_descriptor_array());
}
// Allocate the empty function as the prototype for function ECMAScript
@@ -516,12 +516,10 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
function_instance_map_writable_prototype_->set_prototype(*empty_function);
// Allocate the function map first and then patch the prototype later
- Handle<Map> empty_fm = factory->CopyMapDropDescriptors(
- function_without_prototype_map);
- empty_fm->set_instance_descriptors(
- function_without_prototype_map->instance_descriptors());
- empty_fm->set_prototype(global_context()->object_function()->prototype());
- empty_function->set_map(*empty_fm);
+ Handle<Map> empty_function_map = CreateFunctionMap(DONT_ADD_PROTOTYPE);
+ empty_function_map->set_prototype(
+ global_context()->object_function()->prototype());
+ empty_function->set_map(*empty_function_map);
return empty_function;
}
@@ -1011,7 +1009,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
proto_map->set_prototype(global_context()->initial_object_prototype());
Handle<JSObject> proto = factory->NewJSObjectFromMap(proto_map);
proto->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex,
- heap->empty_string());
+ heap->query_colon_symbol());
proto->InObjectPropertyAtPut(JSRegExp::kGlobalFieldIndex,
heap->false_value());
proto->InObjectPropertyAtPut(JSRegExp::kIgnoreCaseFieldIndex,
@@ -1094,7 +1092,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
// Check the state of the object.
ASSERT(result->HasFastProperties());
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
#endif
}
@@ -1187,7 +1185,7 @@ bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
// Check the state of the object.
ASSERT(result->HasFastProperties());
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
#endif
}
@@ -1634,10 +1632,11 @@ bool Genesis::InstallNatives() {
// through a common bottleneck that would make the SMI_ONLY -> FAST_ELEMENT
// transition easy to trap. Moreover, they rarely are smi-only.
MaybeObject* maybe_map =
- array_function->initial_map()->CopyDropTransitions();
+ array_function->initial_map()->CopyDropTransitions(
+ DescriptorArray::MAY_BE_SHARED);
Map* new_map;
- if (!maybe_map->To<Map>(&new_map)) return false;
- new_map->set_elements_kind(FAST_ELEMENTS);
+ if (!maybe_map->To(&new_map)) return false;
+ new_map->set_elements_kind(FAST_HOLEY_ELEMENTS);
array_function->set_initial_map(new_map);
// Make "length" magic on instances.
@@ -2094,14 +2093,10 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
Handle<JSFunction> function
= Handle<JSFunction>(JSFunction::cast(function_object));
builtins->set_javascript_builtin(id, *function);
- Handle<SharedFunctionInfo> shared
- = Handle<SharedFunctionInfo>(function->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, CLEAR_EXCEPTION)) {
+ if (!JSFunction::CompileLazy(function, CLEAR_EXCEPTION)) {
return false;
}
- // Set the code object on the function object.
- function->ReplaceCode(function->shared()->code());
- builtins->set_javascript_builtin_code(id, shared->code());
+ builtins->set_javascript_builtin_code(id, function->shared()->code());
}
return true;
}
@@ -2159,7 +2154,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(from->map()->instance_descriptors());
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details = PropertyDetails(descs->GetDetails(i));
+ PropertyDetails details = descs->GetDetails(i);
switch (details.type()) {
case FIELD: {
HandleScope inner;
@@ -2197,7 +2192,6 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
break;
}
case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
// Ignore non-properties.
diff --git a/deps/v8/src/builtins.cc b/deps/v8/src/builtins.cc
index 01e88f5593..64ec3d9fcc 100644
--- a/deps/v8/src/builtins.cc
+++ b/deps/v8/src/builtins.cc
@@ -200,9 +200,12 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
array->set_elements(heap->empty_fixed_array());
if (!FLAG_smi_only_arrays) {
Context* global_context = isolate->context()->global_context();
- if (array->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
- !global_context->object_js_array_map()->IsUndefined()) {
- array->set_map(Map::cast(global_context->object_js_array_map()));
+ if (array->GetElementsKind() == GetInitialFastElementsKind() &&
+ !global_context->js_array_maps()->IsUndefined()) {
+ FixedArray* map_array =
+ FixedArray::cast(global_context->js_array_maps());
+ array->set_map(Map::cast(map_array->
+ get(TERMINAL_FAST_ELEMENTS_KIND)));
}
}
} else {
@@ -222,6 +225,13 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
{ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
}
+ ElementsKind elements_kind = array->GetElementsKind();
+ if (!IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe_array =
+ array->TransitionElementsKind(elements_kind);
+ if (maybe_array->IsFailure()) return maybe_array;
+ }
// We do not use SetContent to skip the unnecessary elements type check.
array->set_elements(FixedArray::cast(fixed_array));
array->set_length(Smi::cast(obj));
@@ -250,7 +260,7 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
// Allocate an appropriately typed elements array.
MaybeObject* maybe_elms;
ElementsKind elements_kind = array->GetElementsKind();
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastDoubleElementsKind(elements_kind)) {
maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
number_of_elements);
} else {
@@ -261,13 +271,15 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
// Fill in the content
switch (array->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS: {
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS: {
FixedArray* smi_elms = FixedArray::cast(elms);
for (int index = 0; index < number_of_elements; index++) {
smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
}
break;
}
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
AssertNoAllocation no_gc;
WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
@@ -277,6 +289,7 @@ static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
for (int index = 0; index < number_of_elements; index++) {
@@ -412,7 +425,7 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
HeapObject* elms = array->elements();
Map* map = elms->map();
if (map == heap->fixed_array_map()) {
- if (args == NULL || array->HasFastElements()) return elms;
+ if (args == NULL || array->HasFastObjectElements()) return elms;
if (array->HasFastDoubleElements()) {
ASSERT(elms == heap->empty_fixed_array());
MaybeObject* maybe_transition =
@@ -422,7 +435,7 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
}
} else if (map == heap->fixed_cow_array_map()) {
MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
- if (args == NULL || array->HasFastElements() ||
+ if (args == NULL || array->HasFastObjectElements() ||
maybe_writable_result->IsFailure()) {
return maybe_writable_result;
}
@@ -516,8 +529,8 @@ BUILTIN(ArrayPush) {
}
FixedArray* new_elms = FixedArray::cast(obj);
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, 0, len);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, 0, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
@@ -588,7 +601,7 @@ BUILTIN(ArrayShift) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
int len = Smi::cast(array->length())->value();
if (len == 0) return heap->undefined_value();
@@ -630,7 +643,7 @@ BUILTIN(ArrayUnshift) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
int len = Smi::cast(array->length())->value();
int to_add = args.length() - 1;
@@ -652,8 +665,8 @@ BUILTIN(ArrayUnshift) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
FixedArray* new_elms = FixedArray::cast(obj);
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, to_add, len);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0, new_elms, kind, to_add, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
@@ -682,7 +695,7 @@ BUILTIN(ArraySlice) {
int len = -1;
if (receiver->IsJSArray()) {
JSArray* array = JSArray::cast(receiver);
- if (!array->HasFastTypeElements() ||
+ if (!array->HasFastSmiOrObjectElements() ||
!IsJSArrayFastElementMovingAllowed(heap, array)) {
return CallJsBuiltin(isolate, "ArraySlice", args);
}
@@ -698,7 +711,7 @@ BUILTIN(ArraySlice) {
bool is_arguments_object_with_fast_elements =
receiver->IsJSObject()
&& JSObject::cast(receiver)->map() == arguments_map
- && JSObject::cast(receiver)->HasFastTypeElements();
+ && JSObject::cast(receiver)->HasFastSmiOrObjectElements();
if (!is_arguments_object_with_fast_elements) {
return CallJsBuiltin(isolate, "ArraySlice", args);
}
@@ -763,9 +776,9 @@ BUILTIN(ArraySlice) {
JSArray* result_array;
if (!maybe_array->To(&result_array)) return maybe_array;
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, k,
+ CopyObjectToObjectElements(elms, elements_kind, k,
FixedArray::cast(result_array->elements()),
- FAST_ELEMENTS, 0, result_len);
+ elements_kind, 0, result_len);
return result_array;
}
@@ -786,7 +799,7 @@ BUILTIN(ArraySplice) {
}
FixedArray* elms = FixedArray::cast(elms_obj);
JSArray* array = JSArray::cast(receiver);
- ASSERT(array->HasFastTypeElements());
+ ASSERT(array->HasFastSmiOrObjectElements());
int len = Smi::cast(array->length())->value();
@@ -837,9 +850,9 @@ BUILTIN(ArraySplice) {
{
// Fill newly created array.
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, actual_start,
+ CopyObjectToObjectElements(elms, elements_kind, actual_start,
FixedArray::cast(result_array->elements()),
- FAST_ELEMENTS, 0, actual_delete_count);
+ elements_kind, 0, actual_delete_count);
}
int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
@@ -888,12 +901,13 @@ BUILTIN(ArraySplice) {
{
// Copy the part before actual_start as is.
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- new_elms, FAST_ELEMENTS, 0, actual_start);
+ ElementsKind kind = array->GetElementsKind();
+ CopyObjectToObjectElements(elms, kind, 0,
+ new_elms, kind, 0, actual_start);
const int to_copy = len - actual_delete_count - actual_start;
- CopyObjectToObjectElements(elms, FAST_ELEMENTS,
+ CopyObjectToObjectElements(elms, kind,
actual_start + actual_delete_count,
- new_elms, FAST_ELEMENTS,
+ new_elms, kind,
actual_start + item_count, to_copy);
}
@@ -940,11 +954,12 @@ BUILTIN(ArrayConcat) {
// and calculating total length.
int n_arguments = args.length();
int result_len = 0;
- ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
+ ElementsKind elements_kind = GetInitialFastElementsKind();
for (int i = 0; i < n_arguments; i++) {
Object* arg = args[i];
- if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastTypeElements()
- || JSArray::cast(arg)->GetPrototype() != array_proto) {
+ if (!arg->IsJSArray() ||
+ !JSArray::cast(arg)->HasFastSmiOrObjectElements() ||
+ JSArray::cast(arg)->GetPrototype() != array_proto) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
@@ -961,8 +976,18 @@ BUILTIN(ArrayConcat) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
- if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
- elements_kind = FAST_ELEMENTS;
+ if (!JSArray::cast(arg)->HasFastSmiElements()) {
+ if (IsFastSmiElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ elements_kind = FAST_ELEMENTS;
+ }
+ }
+ }
+
+ if (JSArray::cast(arg)->HasFastHoleyElements()) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
}
}
@@ -982,8 +1007,8 @@ BUILTIN(ArrayConcat) {
JSArray* array = JSArray::cast(args[i]);
int len = Smi::cast(array->length())->value();
FixedArray* elms = FixedArray::cast(array->elements());
- CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
- result_elms, FAST_ELEMENTS,
+ CopyObjectToObjectElements(elms, elements_kind, 0,
+ result_elms, elements_kind,
start_pos, len);
start_pos += len;
}
@@ -1103,7 +1128,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
CustomArguments custom(isolate);
v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
- data_obj, *function, raw_holder);
+ isolate, data_obj, *function, raw_holder);
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
custom.end(),
@@ -1143,68 +1168,6 @@ BUILTIN(HandleApiCallConstruct) {
}
-#ifdef DEBUG
-
-static void VerifyTypeCheck(Handle<JSObject> object,
- Handle<JSFunction> function) {
- ASSERT(function->shared()->IsApiFunction());
- FunctionTemplateInfo* info = function->shared()->get_api_func_data();
- if (info->signature()->IsUndefined()) return;
- SignatureInfo* signature = SignatureInfo::cast(info->signature());
- Object* receiver_type = signature->receiver();
- if (receiver_type->IsUndefined()) return;
- FunctionTemplateInfo* type = FunctionTemplateInfo::cast(receiver_type);
- ASSERT(object->IsInstanceOf(type));
-}
-
-#endif
-
-
-BUILTIN(FastHandleApiCall) {
- ASSERT(!CalledAsConstructor(isolate));
- Heap* heap = isolate->heap();
- const bool is_construct = false;
-
- // We expect four more arguments: callback, function, call data, and holder.
- const int args_length = args.length() - 4;
- ASSERT(args_length >= 0);
-
- Object* callback_obj = args[args_length];
-
- v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
- &args[args_length + 1],
- &args[0] - 1,
- args_length - 1,
- is_construct);
-
-#ifdef DEBUG
- VerifyTypeCheck(Utils::OpenHandle(*new_args.Holder()),
- Utils::OpenHandle(*new_args.Callee()));
-#endif
- HandleScope scope(isolate);
- Object* result;
- v8::Handle<v8::Value> value;
- {
- // Leaving JavaScript.
- VMState state(isolate, EXTERNAL);
- ExternalCallbackScope call_scope(isolate,
- v8::ToCData<Address>(callback_obj));
- v8::InvocationCallback callback =
- v8::ToCData<v8::InvocationCallback>(callback_obj);
-
- value = callback(new_args);
- }
- if (value.IsEmpty()) {
- result = heap->undefined_value();
- } else {
- result = *reinterpret_cast<Object**>(*value);
- }
-
- RETURN_IF_SCHEDULED_EXCEPTION(isolate);
- return result;
-}
-
-
// Helper function to handle calls to non-function objects created through the
// API. The object can be called as either a constructor (using new) or just as
// a function (without new).
@@ -1243,7 +1206,7 @@ MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
CustomArguments custom(isolate);
v8::ImplementationUtilities::PrepareArgumentsData(custom.end(),
- call_data->data(), constructor, obj);
+ isolate, call_data->data(), constructor, obj);
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
custom.end(),
&args[0] - 1,
diff --git a/deps/v8/src/builtins.h b/deps/v8/src/builtins.h
index f079139d45..3ea33938eb 100644
--- a/deps/v8/src/builtins.h
+++ b/deps/v8/src/builtins.h
@@ -56,7 +56,6 @@ enum BuiltinExtraArguments {
V(ArrayConcat, NO_EXTRA_ARGUMENTS) \
\
V(HandleApiCall, NEEDS_CALLED_FUNCTION) \
- V(FastHandleApiCall, NO_EXTRA_ARGUMENTS) \
V(HandleApiCallConstruct, NEEDS_CALLED_FUNCTION) \
V(HandleApiCallAsFunction, NO_EXTRA_ARGUMENTS) \
V(HandleApiCallAsConstructor, NO_EXTRA_ARGUMENTS) \
diff --git a/deps/v8/src/bytecodes-irregexp.h b/deps/v8/src/bytecodes-irregexp.h
index b13efb36f8..c7cc66e527 100644
--- a/deps/v8/src/bytecodes-irregexp.h
+++ b/deps/v8/src/bytecodes-irregexp.h
@@ -72,24 +72,23 @@ V(AND_CHECK_4_CHARS, 27, 16) /* bc8 pad24 uint32 uint32 addr32 */ \
V(AND_CHECK_CHAR, 28, 12) /* bc8 pad8 uint16 uint32 addr32 */ \
V(AND_CHECK_NOT_4_CHARS, 29, 16) /* bc8 pad24 uint32 uint32 addr32 */ \
V(AND_CHECK_NOT_CHAR, 30, 12) /* bc8 pad8 uint16 uint32 addr32 */ \
-V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 addr32 */ \
-V(CHECK_LT, 32, 8) /* bc8 pad8 uc16 addr32 */ \
-V(CHECK_GT, 33, 8) /* bc8 pad8 uc16 addr32 */ \
-V(CHECK_NOT_BACK_REF, 34, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_NOT_BACK_REF_NO_CASE, 35, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_NOT_REGS_EQUAL, 36, 12) /* bc8 regidx24 reg_idx32 addr32 */ \
-V(LOOKUP_MAP1, 37, 12) /* bc8 pad8 start16 bit_map_addr32 addr32 */ \
-V(LOOKUP_MAP2, 38, 96) /* bc8 pad8 start16 half_nibble_map_addr32* */ \
-V(LOOKUP_MAP8, 39, 96) /* bc8 pad8 start16 byte_map addr32* */ \
-V(LOOKUP_HI_MAP8, 40, 96) /* bc8 start24 byte_map_addr32 addr32* */ \
-V(CHECK_REGISTER_LT, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \
-V(CHECK_REGISTER_GE, 42, 12) /* bc8 reg_idx24 value32 addr32 */ \
-V(CHECK_REGISTER_EQ_POS, 43, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_AT_START, 44, 8) /* bc8 pad24 addr32 */ \
-V(CHECK_NOT_AT_START, 45, 8) /* bc8 pad24 addr32 */ \
-V(CHECK_GREEDY, 46, 8) /* bc8 pad24 addr32 */ \
-V(ADVANCE_CP_AND_GOTO, 47, 8) /* bc8 offset24 addr32 */ \
-V(SET_CURRENT_POSITION_FROM_END, 48, 4) /* bc8 idx24 */
+V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 uc16 addr32 */ \
+V(CHECK_CHAR_IN_RANGE, 32, 12) /* bc8 pad24 uc16 uc16 addr32 */ \
+V(CHECK_CHAR_NOT_IN_RANGE, 33, 12) /* bc8 pad24 uc16 uc16 addr32 */ \
+V(CHECK_BIT_IN_TABLE, 34, 24) /* bc8 pad24 addr32 bits128 */ \
+V(CHECK_LT, 35, 8) /* bc8 pad8 uc16 addr32 */ \
+V(CHECK_GT, 36, 8) /* bc8 pad8 uc16 addr32 */ \
+V(CHECK_NOT_BACK_REF, 37, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_NOT_BACK_REF_NO_CASE, 38, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_NOT_REGS_EQUAL, 39, 12) /* bc8 regidx24 reg_idx32 addr32 */ \
+V(CHECK_REGISTER_LT, 40, 12) /* bc8 reg_idx24 value32 addr32 */ \
+V(CHECK_REGISTER_GE, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \
+V(CHECK_REGISTER_EQ_POS, 42, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_AT_START, 43, 8) /* bc8 pad24 addr32 */ \
+V(CHECK_NOT_AT_START, 44, 8) /* bc8 pad24 addr32 */ \
+V(CHECK_GREEDY, 45, 8) /* bc8 pad24 addr32 */ \
+V(ADVANCE_CP_AND_GOTO, 46, 8) /* bc8 offset24 addr32 */ \
+V(SET_CURRENT_POSITION_FROM_END, 47, 4) /* bc8 idx24 */
#define DECLARE_BYTECODES(name, code, length) \
static const int BC_##name = code;
diff --git a/deps/v8/src/code-stubs.cc b/deps/v8/src/code-stubs.cc
index 11016c8238..8f316606c2 100644
--- a/deps/v8/src/code-stubs.cc
+++ b/deps/v8/src/code-stubs.cc
@@ -73,21 +73,12 @@ SmartArrayPointer<const char> CodeStub::GetName() {
void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) {
- code->set_major_key(MajorKey());
-
Isolate* isolate = masm->isolate();
SmartArrayPointer<const char> name = GetName();
PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, *name));
GDBJIT(AddCode(GDBJITInterface::STUB, *name, code));
Counters* counters = isolate->counters();
counters->total_stubs_code_size()->Increment(code->instruction_size());
-
-#ifdef ENABLE_DISASSEMBLER
- if (FLAG_print_code_stubs) {
- code->Disassemble(*name);
- PrintF("\n");
- }
-#endif
}
@@ -125,8 +116,16 @@ Handle<Code> CodeStub::GetCode() {
GetICState());
Handle<Code> new_object = factory->NewCode(
desc, flags, masm.CodeObject(), NeedsImmovableCode());
- RecordCodeGeneration(*new_object, &masm);
+ new_object->set_major_key(MajorKey());
FinishCode(new_object);
+ RecordCodeGeneration(*new_object, &masm);
+
+#ifdef ENABLE_DISASSEMBLER
+ if (FLAG_print_code_stubs) {
+ new_object->Disassemble(*GetName());
+ PrintF("\n");
+ }
+#endif
if (UseSpecialCache()) {
AddToSpecialCache(new_object);
@@ -263,10 +262,13 @@ void JSEntryStub::FinishCode(Handle<Code> code) {
void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind_) {
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
KeyedLoadStubCompiler::GenerateLoadFastElement(masm);
break;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(masm);
break;
case EXTERNAL_BYTE_ELEMENTS:
@@ -293,7 +295,9 @@ void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind_) {
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS: {
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS: {
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_js_array_,
elements_kind_,
@@ -301,6 +305,7 @@ void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
}
break;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
is_js_array_,
grow_mode_);
@@ -431,24 +436,32 @@ bool ToBooleanStub::Types::CanBeUndetectable() const {
void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
Label fail;
+ ASSERT(!IsFastHoleyElementsKind(from_) || IsFastHoleyElementsKind(to_));
if (!FLAG_trace_elements_transitions) {
- if (to_ == FAST_ELEMENTS) {
- if (from_ == FAST_SMI_ONLY_ELEMENTS) {
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
- } else if (from_ == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastSmiOrObjectElementsKind(to_)) {
+ if (IsFastSmiOrObjectElementsKind(from_)) {
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm);
+ } else if (IsFastDoubleElementsKind(from_)) {
+ ASSERT(!IsFastSmiElementsKind(to_));
ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
} else {
UNREACHABLE();
}
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_jsarray_,
- FAST_ELEMENTS,
+ to_,
grow_mode_);
- } else if (from_ == FAST_SMI_ONLY_ELEMENTS && to_ == FAST_DOUBLE_ELEMENTS) {
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ } else if (IsFastSmiElementsKind(from_) &&
+ IsFastDoubleElementsKind(to_)) {
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
is_jsarray_,
grow_mode_);
+ } else if (IsFastDoubleElementsKind(from_)) {
+ ASSERT(to_ == FAST_HOLEY_DOUBLE_ELEMENTS);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm);
} else {
UNREACHABLE();
}
diff --git a/deps/v8/src/code-stubs.h b/deps/v8/src/code-stubs.h
index b67e961ac7..5c8717838f 100644
--- a/deps/v8/src/code-stubs.h
+++ b/deps/v8/src/code-stubs.h
@@ -498,6 +498,7 @@ class ICCompareStub: public CodeStub {
virtual void FinishCode(Handle<Code> code) {
code->set_compare_state(state_);
+ code->set_compare_operation(op_);
}
virtual CodeStub::Major MajorKey() { return CompareIC; }
diff --git a/deps/v8/src/codegen.h b/deps/v8/src/codegen.h
index 50d70f265d..08a777f2ad 100644
--- a/deps/v8/src/codegen.h
+++ b/deps/v8/src/codegen.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -95,8 +95,8 @@ UnaryMathFunction CreateSqrtFunction();
class ElementsTransitionGenerator : public AllStatic {
public:
- static void GenerateSmiOnlyToObject(MacroAssembler* masm);
- static void GenerateSmiOnlyToDouble(MacroAssembler* masm, Label* fail);
+ static void GenerateMapChangeElementsTransition(MacroAssembler* masm);
+ static void GenerateSmiToDouble(MacroAssembler* masm, Label* fail);
static void GenerateDoubleToObject(MacroAssembler* masm, Label* fail);
private:
diff --git a/deps/v8/src/compiler-intrinsics.h b/deps/v8/src/compiler-intrinsics.h
index 3b9c59ea53..b73e8ac750 100644
--- a/deps/v8/src/compiler-intrinsics.h
+++ b/deps/v8/src/compiler-intrinsics.h
@@ -40,6 +40,9 @@ class CompilerIntrinsics {
// Returns number of zero bits following most significant 1 bit.
// Undefined for zero value.
INLINE(static int CountLeadingZeros(uint32_t value));
+
+ // Returns the number of bits set.
+ INLINE(static int CountSetBits(uint32_t value));
};
#ifdef __GNUC__
@@ -51,6 +54,10 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) {
return __builtin_clz(value);
}
+int CompilerIntrinsics::CountSetBits(uint32_t value) {
+ return __builtin_popcount(value);
+}
+
#elif defined(_MSC_VER)
#pragma intrinsic(_BitScanForward)
@@ -68,6 +75,16 @@ int CompilerIntrinsics::CountLeadingZeros(uint32_t value) {
return 31 - static_cast<int>(result);
}
+int CompilerIntrinsics::CountSetBits(uint32_t value) {
+ // Manually count set bits.
+ value = ((value >> 1) & 0x55555555) + (value & 0x55555555);
+ value = ((value >> 2) & 0x33333333) + (value & 0x33333333);
+ value = ((value >> 4) & 0x0f0f0f0f) + (value & 0x0f0f0f0f);
+ value = ((value >> 8) & 0x00ff00ff) + (value & 0x00ff00ff);
+ value = ((value >> 16) & 0x0000ffff) + (value & 0x0000ffff);
+ return value;
+}
+
#else
#error Unsupported compiler
#endif
diff --git a/deps/v8/src/compiler.cc b/deps/v8/src/compiler.cc
index ecac5cba69..d44718bc0f 100644
--- a/deps/v8/src/compiler.cc
+++ b/deps/v8/src/compiler.cc
@@ -294,8 +294,9 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
}
Handle<Context> global_context(info->closure()->context()->global_context());
- TypeFeedbackOracle oracle(code, global_context, info->isolate());
- HGraphBuilder builder(info, &oracle);
+ TypeFeedbackOracle oracle(code, global_context, info->isolate(),
+ info->isolate()->zone());
+ HGraphBuilder builder(info, &oracle, info->isolate()->zone());
HPhase phase(HPhase::kTotal);
HGraph* graph = builder.CreateGraph();
if (info->isolate()->has_pending_exception()) {
@@ -304,7 +305,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
}
if (graph != NULL) {
- Handle<Code> optimized_code = graph->Compile(info);
+ Handle<Code> optimized_code = graph->Compile(info, graph->zone());
if (!optimized_code.is_null()) {
info->SetCode(optimized_code);
FinishOptimization(info->closure(), start);
@@ -346,7 +347,8 @@ bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
// the compilation info is set if compilation succeeded.
bool succeeded = MakeCode(info);
if (!info->shared_info().is_null()) {
- Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
+ Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
+ info->isolate()->zone());
info->shared_info()->set_scope_info(*scope_info);
}
return succeeded;
@@ -420,7 +422,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
lit->name(),
lit->materialized_literal_count(),
info->code(),
- ScopeInfo::Create(info->scope()));
+ ScopeInfo::Create(info->scope(), info->isolate()->zone()));
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
Compiler::SetFunctionInfo(result, lit, true, script);
@@ -462,7 +464,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
script, Debugger::NO_AFTER_COMPILE_FLAGS);
#endif
- live_edit_tracker.RecordFunctionInfo(result, lit);
+ live_edit_tracker.RecordFunctionInfo(result, lit, isolate->zone());
return result;
}
@@ -651,7 +653,8 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// info initialization is important since set_scope_info might
// trigger a GC, causing the ASSERT below to be invalid if the code
// was flushed. By setting the code object last we avoid this.
- Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
+ Handle<ScopeInfo> scope_info =
+ ScopeInfo::Create(info->scope(), info->isolate()->zone());
shared->set_scope_info(*scope_info);
shared->set_code(*code);
if (!function.is_null()) {
@@ -728,7 +731,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
} else if ((V8::UseCrankshaft() && MakeCrankshaftCode(&info)) ||
(!V8::UseCrankshaft() && FullCodeGenerator::MakeCode(&info))) {
ASSERT(!info.code().is_null());
- scope_info = ScopeInfo::Create(info.scope());
+ scope_info = ScopeInfo::Create(info.scope(), info.isolate()->zone());
} else {
return Handle<SharedFunctionInfo>::null();
}
@@ -747,7 +750,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
// the resulting function.
SetExpectedNofPropertiesFromEstimate(result,
literal->expected_property_count());
- live_edit_tracker.RecordFunctionInfo(result, literal);
+ live_edit_tracker.RecordFunctionInfo(result, literal, info.isolate()->zone());
return result;
}
diff --git a/deps/v8/src/contexts.h b/deps/v8/src/contexts.h
index af5cb036c6..d154b82ca0 100644
--- a/deps/v8/src/contexts.h
+++ b/deps/v8/src/contexts.h
@@ -106,9 +106,7 @@ enum BindingFlags {
V(OBJECT_FUNCTION_INDEX, JSFunction, object_function) \
V(INTERNAL_ARRAY_FUNCTION_INDEX, JSFunction, internal_array_function) \
V(ARRAY_FUNCTION_INDEX, JSFunction, array_function) \
- V(SMI_JS_ARRAY_MAP_INDEX, Object, smi_js_array_map) \
- V(DOUBLE_JS_ARRAY_MAP_INDEX, Object, double_js_array_map) \
- V(OBJECT_JS_ARRAY_MAP_INDEX, Object, object_js_array_map) \
+ V(JS_ARRAY_MAPS_INDEX, Object, js_array_maps) \
V(DATE_FUNCTION_INDEX, JSFunction, date_function) \
V(JSON_OBJECT_INDEX, JSObject, json_object) \
V(REGEXP_FUNCTION_INDEX, JSFunction, regexp_function) \
@@ -248,9 +246,7 @@ class Context: public FixedArray {
OBJECT_FUNCTION_INDEX,
INTERNAL_ARRAY_FUNCTION_INDEX,
ARRAY_FUNCTION_INDEX,
- SMI_JS_ARRAY_MAP_INDEX,
- DOUBLE_JS_ARRAY_MAP_INDEX,
- OBJECT_JS_ARRAY_MAP_INDEX,
+ JS_ARRAY_MAPS_INDEX,
DATE_FUNCTION_INDEX,
JSON_OBJECT_INDEX,
REGEXP_FUNCTION_INDEX,
@@ -373,18 +369,6 @@ class Context: public FixedArray {
Object* OptimizedFunctionsListHead();
void ClearOptimizedFunctions();
- static int GetContextMapIndexFromElementsKind(
- ElementsKind elements_kind) {
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- return Context::DOUBLE_JS_ARRAY_MAP_INDEX;
- } else if (elements_kind == FAST_ELEMENTS) {
- return Context::OBJECT_JS_ARRAY_MAP_INDEX;
- } else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- return Context::SMI_JS_ARRAY_MAP_INDEX;
- }
- }
-
#define GLOBAL_CONTEXT_FIELD_ACCESSORS(index, type, name) \
void set_##name(type* value) { \
ASSERT(IsGlobalContext()); \
@@ -397,7 +381,7 @@ class Context: public FixedArray {
GLOBAL_CONTEXT_FIELDS(GLOBAL_CONTEXT_FIELD_ACCESSORS)
#undef GLOBAL_CONTEXT_FIELD_ACCESSORS
- // Lookup the the slot called name, starting with the current context.
+ // Lookup the slot called name, starting with the current context.
// There are three possibilities:
//
// 1) result->IsContext():
diff --git a/deps/v8/src/conversions-inl.h b/deps/v8/src/conversions-inl.h
index b098a1c29c..77b260f036 100644
--- a/deps/v8/src/conversions-inl.h
+++ b/deps/v8/src/conversions-inl.h
@@ -228,9 +228,7 @@ double InternalStringToIntDouble(UnicodeCache* unicode_cache,
}
ASSERT(number != 0);
- // The double could be constructed faster from number (mantissa), exponent
- // and sign. Assuming it's a rare case more simple code is used.
- return static_cast<double>(negative ? -number : number) * pow(2.0, exponent);
+ return ldexp(static_cast<double>(negative ? -number : number), exponent);
}
diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc
index 45781cf0d4..7a01d55148 100644
--- a/deps/v8/src/d8.cc
+++ b/deps/v8/src/d8.cc
@@ -26,7 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifdef USING_V8_SHARED // Defined when linking against shared lib on Windows.
+// Defined when linking against shared lib on Windows.
+#if defined(USING_V8_SHARED) && !defined(V8_SHARED)
#define V8_SHARED
#endif
@@ -315,151 +316,143 @@ static size_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
}
-const char kArrayBufferReferencePropName[] = "_is_array_buffer_";
-const char kArrayBufferMarkerPropName[] = "_array_buffer_ref_";
+const char kArrayBufferMarkerPropName[] = "d8::_is_array_buffer_";
-Handle<Value> Shell::CreateExternalArray(const Arguments& args,
- ExternalArrayType type,
- size_t element_size) {
- TryCatch try_catch;
- bool is_array_buffer_construct = element_size == 0;
- if (is_array_buffer_construct) {
- type = v8::kExternalByteArray;
- element_size = 1;
+Handle<Value> Shell::CreateExternalArrayBuffer(int32_t length) {
+ static const int32_t kMaxSize = 0x7fffffff;
+ // Make sure the total size fits into a (signed) int.
+ if (length < 0 || length > kMaxSize) {
+ return ThrowException(String::New("ArrayBuffer exceeds maximum size (2G)"));
+ }
+ uint8_t* data = new uint8_t[length];
+ if (data == NULL) {
+ return ThrowException(String::New("Memory allocation failed."));
}
- ASSERT(element_size == 1 || element_size == 2 || element_size == 4 ||
- element_size == 8);
+ memset(data, 0, length);
+
+ Handle<Object> buffer = Object::New();
+ buffer->SetHiddenValue(String::New(kArrayBufferMarkerPropName), True());
+ Persistent<Object> persistent_array = Persistent<Object>::New(buffer);
+ persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
+ persistent_array.MarkIndependent();
+ V8::AdjustAmountOfExternalAllocatedMemory(length);
+
+ buffer->SetIndexedPropertiesToExternalArrayData(
+ data, v8::kExternalByteArray, length);
+ buffer->Set(String::New("byteLength"), Int32::New(length), ReadOnly);
+
+ return buffer;
+}
+
+
+Handle<Value> Shell::CreateExternalArrayBuffer(const Arguments& args) {
if (args.Length() == 0) {
return ThrowException(
- String::New("Array constructor must have at least one "
- "parameter."));
+ String::New("ArrayBuffer constructor must have one parameter."));
}
- bool first_arg_is_array_buffer =
- args[0]->IsObject() &&
- args[0]->ToObject()->Get(
- String::New(kArrayBufferMarkerPropName))->IsTrue();
+ TryCatch try_catch;
+ int32_t length = convertToUint(args[0], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.Exception();
+
+ return CreateExternalArrayBuffer(length);
+}
+
+
+Handle<Value> Shell::CreateExternalArray(const Arguments& args,
+ ExternalArrayType type,
+ int32_t element_size) {
+ TryCatch try_catch;
+ ASSERT(element_size == 1 || element_size == 2 ||
+ element_size == 4 || element_size == 8);
+
// Currently, only the following constructors are supported:
// TypedArray(unsigned long length)
// TypedArray(ArrayBuffer buffer,
// optional unsigned long byteOffset,
// optional unsigned long length)
- if (args.Length() > 3) {
+ Handle<Object> buffer;
+ int32_t length;
+ int32_t byteLength;
+ int32_t byteOffset;
+ if (args.Length() == 0) {
return ThrowException(
- String::New("Array constructor from ArrayBuffer must "
- "have 1-3 parameters."));
+ String::New("Array constructor must have at least one parameter."));
}
-
- Local<Value> length_value = (args.Length() < 3)
- ? (first_arg_is_array_buffer
- ? args[0]->ToObject()->Get(String::New("length"))
- : args[0])
- : args[2];
- size_t length = convertToUint(length_value, &try_catch);
- if (try_catch.HasCaught()) return try_catch.Exception();
-
- void* data = NULL;
- size_t offset = 0;
-
- Handle<Object> array = Object::New();
- if (first_arg_is_array_buffer) {
- Handle<Object> derived_from = args[0]->ToObject();
- data = derived_from->GetIndexedPropertiesExternalArrayData();
-
- size_t array_buffer_length = convertToUint(
- derived_from->Get(String::New("length")),
- &try_catch);
+ if (args[0]->IsObject() &&
+ !args[0]->ToObject()->GetHiddenValue(
+ String::New(kArrayBufferMarkerPropName)).IsEmpty()) {
+ buffer = args[0]->ToObject();
+ int32_t bufferLength =
+ convertToUint(buffer->Get(String::New("byteLength")), &try_catch);
if (try_catch.HasCaught()) return try_catch.Exception();
- if (data == NULL && array_buffer_length != 0) {
- return ThrowException(
- String::New("ArrayBuffer doesn't have data"));
- }
-
- if (args.Length() > 1) {
- offset = convertToUint(args[1], &try_catch);
+ if (args.Length() < 2 || args[1]->IsUndefined()) {
+ byteOffset = 0;
+ } else {
+ byteOffset = convertToUint(args[1], &try_catch);
if (try_catch.HasCaught()) return try_catch.Exception();
-
- // The given byteOffset must be a multiple of the element size of the
- // specific type, otherwise an exception is raised.
- if (offset % element_size != 0) {
+ if (byteOffset > bufferLength) {
+ return ThrowException(String::New("byteOffset out of bounds"));
+ }
+ if (byteOffset % element_size != 0) {
return ThrowException(
- String::New("offset must be multiple of element_size"));
+ String::New("byteOffset must be multiple of element_size"));
}
}
- if (offset > array_buffer_length) {
- return ThrowException(
- String::New("byteOffset must be less than ArrayBuffer length."));
- }
-
- if (args.Length() == 2) {
- // If length is not explicitly specified, the length of the ArrayBuffer
- // minus the byteOffset must be a multiple of the element size of the
- // specific type, or an exception is raised.
- length = array_buffer_length - offset;
- }
-
- if (args.Length() != 3) {
- if (length % element_size != 0) {
+ if (args.Length() < 3 || args[2]->IsUndefined()) {
+ byteLength = bufferLength - byteOffset;
+ length = byteLength / element_size;
+ if (byteLength % element_size != 0) {
return ThrowException(
- String::New("ArrayBuffer length minus the byteOffset must be a "
- "multiple of the element size"));
+ String::New("buffer size must be multiple of element_size"));
+ }
+ } else {
+ length = convertToUint(args[2], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.Exception();
+ byteLength = length * element_size;
+ if (byteOffset + byteLength > bufferLength) {
+ return ThrowException(String::New("length out of bounds"));
}
- length /= element_size;
- }
-
- // If a given byteOffset and length references an area beyond the end of
- // the ArrayBuffer an exception is raised.
- if (offset + (length * element_size) > array_buffer_length) {
- return ThrowException(
- String::New("length references an area beyond the end of the "
- "ArrayBuffer"));
}
-
- // Hold a reference to the ArrayBuffer so its buffer doesn't get collected.
- array->Set(String::New(kArrayBufferReferencePropName), args[0], ReadOnly);
- }
-
- if (is_array_buffer_construct) {
- array->Set(String::New(kArrayBufferMarkerPropName), True(), ReadOnly);
+ } else {
+ length = convertToUint(args[0], &try_catch);
+ byteLength = length * element_size;
+ byteOffset = 0;
+ Handle<Value> result = CreateExternalArrayBuffer(byteLength);
+ if (!result->IsObject()) return result;
+ buffer = result->ToObject();
}
- Persistent<Object> persistent_array = Persistent<Object>::New(array);
- persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
- persistent_array.MarkIndependent();
- if (data == NULL && length != 0) {
- data = calloc(length, element_size);
- if (data == NULL) {
- return ThrowException(String::New("Memory allocation failed."));
- }
- }
+ void* data = buffer->GetIndexedPropertiesExternalArrayData();
+ ASSERT(data != NULL);
+ Handle<Object> array = Object::New();
array->SetIndexedPropertiesToExternalArrayData(
- reinterpret_cast<uint8_t*>(data) + offset, type,
- static_cast<int>(length));
- array->Set(String::New("length"),
- Int32::New(static_cast<int32_t>(length)), ReadOnly);
- array->Set(String::New("BYTES_PER_ELEMENT"),
- Int32::New(static_cast<int32_t>(element_size)));
+ static_cast<uint8_t*>(data) + byteOffset, type, length);
+ array->Set(String::New("byteLength"), Int32::New(byteLength), ReadOnly);
+ array->Set(String::New("byteOffset"), Int32::New(byteOffset), ReadOnly);
+ array->Set(String::New("length"), Int32::New(length), ReadOnly);
+ array->Set(String::New("BYTES_PER_ELEMENT"), Int32::New(element_size));
+ array->Set(String::New("buffer"), buffer, ReadOnly);
+
return array;
}
void Shell::ExternalArrayWeakCallback(Persistent<Value> object, void* data) {
HandleScope scope;
- Handle<String> prop_name = String::New(kArrayBufferReferencePropName);
- Handle<Object> converted_object = object->ToObject();
- Local<Value> prop_value = converted_object->Get(prop_name);
- if (data != NULL && !prop_value->IsObject()) {
- free(data);
- }
+ int32_t length =
+ object->ToObject()->Get(String::New("byteLength"))->Uint32Value();
+ V8::AdjustAmountOfExternalAllocatedMemory(-length);
+ delete[] static_cast<uint8_t*>(data);
object.Dispose();
}
Handle<Value> Shell::ArrayBuffer(const Arguments& args) {
- return CreateExternalArray(args, v8::kExternalByteArray, 0);
+ return CreateExternalArrayBuffer(args);
}
@@ -806,8 +799,8 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
global_template->Set(String::New("print"), FunctionTemplate::New(Print));
global_template->Set(String::New("write"), FunctionTemplate::New(Write));
global_template->Set(String::New("read"), FunctionTemplate::New(Read));
- global_template->Set(String::New("readbinary"),
- FunctionTemplate::New(ReadBinary));
+ global_template->Set(String::New("readbuffer"),
+ FunctionTemplate::New(ReadBuffer));
global_template->Set(String::New("readline"),
FunctionTemplate::New(ReadLine));
global_template->Set(String::New("load"), FunctionTemplate::New(Load));
@@ -977,8 +970,8 @@ void Shell::OnExit() {
printf("+--------------------------------------------+-------------+\n");
delete [] counters;
}
- if (counters_file_ != NULL)
- delete counters_file_;
+ delete counters_file_;
+ delete counter_map_;
}
#endif // V8_SHARED
@@ -1026,20 +1019,30 @@ static char* ReadChars(const char* name, int* size_out) {
}
-Handle<Value> Shell::ReadBinary(const Arguments& args) {
+Handle<Value> Shell::ReadBuffer(const Arguments& args) {
+ ASSERT(sizeof(char) == sizeof(uint8_t)); // NOLINT
String::Utf8Value filename(args[0]);
- int size;
+ int length;
if (*filename == NULL) {
return ThrowException(String::New("Error loading file"));
}
- char* chars = ReadChars(*filename, &size);
- if (chars == NULL) {
+
+ uint8_t* data = reinterpret_cast<uint8_t*>(ReadChars(*filename, &length));
+ if (data == NULL) {
return ThrowException(String::New("Error reading file"));
}
- // We skip checking the string for UTF8 characters and use it raw as
- // backing store for the external string with 8-bit characters.
- BinaryResource* resource = new BinaryResource(chars, size);
- return String::NewExternal(resource);
+ Handle<Object> buffer = Object::New();
+ buffer->SetHiddenValue(String::New(kArrayBufferMarkerPropName), True());
+ Persistent<Object> persistent_buffer = Persistent<Object>::New(buffer);
+ persistent_buffer.MakeWeak(data, ExternalArrayWeakCallback);
+ persistent_buffer.MarkIndependent();
+ V8::AdjustAmountOfExternalAllocatedMemory(length);
+
+ buffer->SetIndexedPropertiesToExternalArrayData(
+ data, kExternalUnsignedByteArray, length);
+ buffer->Set(String::New("byteLength"),
+ Int32::New(static_cast<int32_t>(length)), ReadOnly);
+ return buffer;
}
@@ -1203,7 +1206,7 @@ void SourceGroup::Execute() {
Handle<String> SourceGroup::ReadFile(const char* name) {
int size;
- const char* chars = ReadChars(name, &size);
+ char* chars = ReadChars(name, &size);
if (chars == NULL) return Handle<String>();
Handle<String> result = String::New(chars, size);
delete[] chars;
diff --git a/deps/v8/src/d8.h b/deps/v8/src/d8.h
index c872f90958..2789c6db3e 100644
--- a/deps/v8/src/d8.h
+++ b/deps/v8/src/d8.h
@@ -307,7 +307,7 @@ class Shell : public i::AllStatic {
static Handle<Value> EnableProfiler(const Arguments& args);
static Handle<Value> DisableProfiler(const Arguments& args);
static Handle<Value> Read(const Arguments& args);
- static Handle<Value> ReadBinary(const Arguments& args);
+ static Handle<Value> ReadBuffer(const Arguments& args);
static Handle<String> ReadFromStdin();
static Handle<Value> ReadLine(const Arguments& args) {
return ReadFromStdin();
@@ -383,9 +383,11 @@ class Shell : public i::AllStatic {
static void RunShell();
static bool SetOptions(int argc, char* argv[]);
static Handle<ObjectTemplate> CreateGlobalTemplate();
+ static Handle<Value> CreateExternalArrayBuffer(int32_t size);
+ static Handle<Value> CreateExternalArrayBuffer(const Arguments& args);
static Handle<Value> CreateExternalArray(const Arguments& args,
ExternalArrayType type,
- size_t element_size);
+ int32_t element_size);
static void ExternalArrayWeakCallback(Persistent<Value> object, void* data);
};
diff --git a/deps/v8/src/d8.js b/deps/v8/src/d8.js
index bf269230b8..819135add4 100644
--- a/deps/v8/src/d8.js
+++ b/deps/v8/src/d8.js
@@ -2174,7 +2174,7 @@ function DebugResponseDetails(response) {
}
var current_line = from_line + num;
- spacer = maxdigits - (1 + Math.floor(log10(current_line)));
+ var spacer = maxdigits - (1 + Math.floor(log10(current_line)));
if (current_line == Debug.State.currentSourceLine + 1) {
for (var i = 0; i < maxdigits; i++) {
result += '>';
diff --git a/deps/v8/src/dateparser-inl.h b/deps/v8/src/dateparser-inl.h
index 32f0f9ea8f..a5c7143bdd 100644
--- a/deps/v8/src/dateparser-inl.h
+++ b/deps/v8/src/dateparser-inl.h
@@ -148,6 +148,9 @@ bool DateParser::Parse(Vector<Char> str,
} else {
// Garbage words are illegal if a number has been read.
if (has_read_number) return false;
+ // The first number has to be separated from garbage words by
+ // whitespace or other separators.
+ if (scanner.Peek().IsNumber()) return false;
}
} else if (token.IsAsciiSign() && (tz.IsUTC() || !time.IsEmpty())) {
// Parse UTC offset (only after UTC or time).
diff --git a/deps/v8/src/debug-agent.cc b/deps/v8/src/debug-agent.cc
index bdc7a578ac..e856222775 100644
--- a/deps/v8/src/debug-agent.cc
+++ b/deps/v8/src/debug-agent.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -157,7 +157,9 @@ void DebuggerAgent::OnSessionClosed(DebuggerAgentSession* session) {
ScopedLock with(session_access_);
ASSERT(session == session_);
if (session == session_) {
- CloseSession();
+ session_->Shutdown();
+ delete session_;
+ session_ = NULL;
}
}
@@ -247,7 +249,7 @@ SmartArrayPointer<char> DebuggerAgentUtil::ReceiveMessage(const Socket* conn) {
while (!(c == '\n' && prev_c == '\r')) {
prev_c = c;
received = conn->Receive(&c, 1);
- if (received <= 0) {
+ if (received == 0) {
PrintF("Error %d\n", Socket::LastError());
return SmartArrayPointer<char>();
}
@@ -323,41 +325,41 @@ bool DebuggerAgentUtil::SendConnectMessage(const Socket* conn,
const char* embedding_host) {
static const int kBufferSize = 80;
char buffer[kBufferSize]; // Sending buffer.
+ bool ok;
int len;
- int r;
// Send the header.
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize),
"Type: connect\r\n");
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize),
"V8-Version: %s\r\n", v8::V8::GetVersion());
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize),
"Protocol-Version: 1\r\n");
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
if (embedding_host != NULL) {
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize),
"Embedding-Host: %s\r\n", embedding_host);
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
}
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize),
"%s: 0\r\n", kContentLength);
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
// Terminate header with empty line.
len = OS::SNPrintF(Vector<char>(buffer, kBufferSize), "\r\n");
- r = conn->Send(buffer, len);
- if (r != len) return false;
+ ok = conn->Send(buffer, len);
+ if (!ok) return false;
// No body for connect message.
@@ -397,7 +399,7 @@ bool DebuggerAgentUtil::SendMessage(const Socket* conn,
uint16_t character = message[i];
buffer_position +=
unibrow::Utf8::Encode(buffer + buffer_position, character, previous);
- ASSERT(buffer_position < kBufferSize);
+ ASSERT(buffer_position <= kBufferSize);
// Send buffer if full or last character is encoded.
if (kBufferSize - buffer_position <
@@ -454,7 +456,7 @@ int DebuggerAgentUtil::ReceiveAll(const Socket* conn, char* data, int len) {
int total_received = 0;
while (total_received < len) {
int received = conn->Receive(data + total_received, len - total_received);
- if (received <= 0) {
+ if (received == 0) {
return total_received;
}
total_received += received;
diff --git a/deps/v8/src/debug-debugger.js b/deps/v8/src/debug-debugger.js
index 802f6224c4..91838e8ad0 100644
--- a/deps/v8/src/debug-debugger.js
+++ b/deps/v8/src/debug-debugger.js
@@ -1957,7 +1957,7 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) {
if (request.arguments && !IS_UNDEFINED(request.arguments.frameNumber)) {
frame_index = request.arguments.frameNumber;
if (frame_index < 0 || this.exec_state_.frameCount() <= frame_index) {
- return response.failed('Invalid frame number');
+ throw new Error('Invalid frame number');
}
return this.exec_state_.frame(frame_index);
} else {
@@ -1966,20 +1966,44 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) {
};
-DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) {
- // No frames no scopes.
- if (this.exec_state_.frameCount() == 0) {
- return response.failed('No scopes');
+// Gets scope host object from request. It is either a function
+// ('functionHandle' argument must be specified) or a stack frame
+// ('frameNumber' may be specified and the current frame is taken by default).
+DebugCommandProcessor.prototype.scopeHolderForScopeRequest_ =
+ function(request) {
+ if (request.arguments && "functionHandle" in request.arguments) {
+ if (!IS_NUMBER(request.arguments.functionHandle)) {
+ throw new Error('Function handle must be a number');
+ }
+ var function_mirror = LookupMirror(request.arguments.functionHandle);
+ if (!function_mirror) {
+ throw new Error('Failed to find function object by handle');
+ }
+ if (!function_mirror.isFunction()) {
+ throw new Error('Value of non-function type is found by handle');
+ }
+ return function_mirror;
+ } else {
+ // No frames no scopes.
+ if (this.exec_state_.frameCount() == 0) {
+ throw new Error('No scopes');
+ }
+
+ // Get the frame for which the scopes are requested.
+ var frame = this.frameForScopeRequest_(request);
+ return frame;
}
+}
- // Get the frame for which the scopes are requested.
- var frame = this.frameForScopeRequest_(request);
- // Fill all scopes for this frame.
- var total_scopes = frame.scopeCount();
+DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) {
+ var scope_holder = this.scopeHolderForScopeRequest_(request);
+
+ // Fill all scopes for this frame or function.
+ var total_scopes = scope_holder.scopeCount();
var scopes = [];
for (var i = 0; i < total_scopes; i++) {
- scopes.push(frame.scope(i));
+ scopes.push(scope_holder.scope(i));
}
response.body = {
fromScope: 0,
@@ -1991,24 +2015,19 @@ DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) {
DebugCommandProcessor.prototype.scopeRequest_ = function(request, response) {
- // No frames no scopes.
- if (this.exec_state_.frameCount() == 0) {
- return response.failed('No scopes');
- }
-
- // Get the frame for which the scope is requested.
- var frame = this.frameForScopeRequest_(request);
+ // Get the frame or function for which the scope is requested.
+ var scope_holder = this.scopeHolderForScopeRequest_(request);
// With no scope argument just return top scope.
var scope_index = 0;
if (request.arguments && !IS_UNDEFINED(request.arguments.number)) {
scope_index = %ToNumber(request.arguments.number);
- if (scope_index < 0 || frame.scopeCount() <= scope_index) {
+ if (scope_index < 0 || scope_holder.scopeCount() <= scope_index) {
return response.failed('Invalid scope number');
}
}
- response.body = frame.scope(scope_index);
+ response.body = scope_holder.scope(scope_index);
};
diff --git a/deps/v8/src/debug.cc b/deps/v8/src/debug.cc
index f8a1ecf4f9..543ce9f24d 100644
--- a/deps/v8/src/debug.cc
+++ b/deps/v8/src/debug.cc
@@ -892,6 +892,16 @@ void Debug::Iterate(ObjectVisitor* v) {
}
+void Debug::PutValuesOnStackAndDie(int start,
+ Address c_entry_fp,
+ Address last_fp,
+ Address larger_fp,
+ int count,
+ int end) {
+ OS::Abort();
+}
+
+
Object* Debug::Break(Arguments args) {
Heap* heap = isolate_->heap();
HandleScope scope(isolate_);
@@ -984,11 +994,34 @@ Object* Debug::Break(Arguments args) {
// Count frames until target frame
int count = 0;
JavaScriptFrameIterator it(isolate_);
- while (!it.done() && it.frame()->fp() != thread_local_.last_fp_) {
+ while (!it.done() && it.frame()->fp() < thread_local_.last_fp_) {
count++;
it.Advance();
}
+ // Catch the cases that would lead to crashes and capture
+ // - C entry FP at which to start stack crawl.
+ // - FP of the frame at which we plan to stop stepping out (last FP).
+ // - current FP that's larger than last FP.
+ // - Counter for the number of steps to step out.
+ if (it.done()) {
+ // We crawled the entire stack, never reaching last_fp_.
+ PutValuesOnStackAndDie(0xBEEEEEEE,
+ frame->fp(),
+ thread_local_.last_fp_,
+ NULL,
+ count,
+ 0xFEEEEEEE);
+ } else if (it.frame()->fp() != thread_local_.last_fp_) {
+ // We crawled over last_fp_, without getting a match.
+ PutValuesOnStackAndDie(0xBEEEEEEE,
+ frame->fp(),
+ thread_local_.last_fp_,
+ it.frame()->fp(),
+ count,
+ 0xFEEEEEEE);
+ }
+
// If we found original frame
if (it.frame()->fp() == thread_local_.last_fp_) {
if (step_count > 1) {
@@ -1418,7 +1451,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// Remember source position and frame to handle step next.
thread_local_.last_statement_position_ =
debug_info->code()->SourceStatementPosition(frame->pc());
- thread_local_.last_fp_ = frame->fp();
+ thread_local_.last_fp_ = frame->UnpaddedFP();
} else {
// If there's restarter frame on top of the stack, just get the pointer
// to function which is going to be restarted.
@@ -1487,7 +1520,7 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
// propagated on the next Debug::Break.
thread_local_.last_statement_position_ =
debug_info->code()->SourceStatementPosition(frame->pc());
- thread_local_.last_fp_ = frame->fp();
+ thread_local_.last_fp_ = frame->UnpaddedFP();
}
// Step in or Step in min
@@ -1522,7 +1555,7 @@ bool Debug::StepNextContinue(BreakLocationIterator* break_location_iterator,
// Continue if we are still on the same frame and in the same statement.
int current_statement_position =
break_location_iterator->code()->SourceStatementPosition(frame->pc());
- return thread_local_.last_fp_ == frame->fp() &&
+ return thread_local_.last_fp_ == frame->UnpaddedFP() &&
thread_local_.last_statement_position_ == current_statement_position;
}
@@ -1723,7 +1756,7 @@ void Debug::ClearOneShot() {
void Debug::ActivateStepIn(StackFrame* frame) {
ASSERT(!StepOutActive());
- thread_local_.step_into_fp_ = frame->fp();
+ thread_local_.step_into_fp_ = frame->UnpaddedFP();
}
@@ -1734,7 +1767,7 @@ void Debug::ClearStepIn() {
void Debug::ActivateStepOut(StackFrame* frame) {
ASSERT(!StepInActive());
- thread_local_.step_out_fp_ = frame->fp();
+ thread_local_.step_out_fp_ = frame->UnpaddedFP();
}
@@ -1751,20 +1784,19 @@ void Debug::ClearStepNext() {
// Helper function to compile full code for debugging. This code will
-// have debug break slots and deoptimization
-// information. Deoptimization information is required in case that an
-// optimized version of this function is still activated on the
-// stack. It will also make sure that the full code is compiled with
-// the same flags as the previous version - that is flags which can
-// change the code generated. The current method of mapping from
-// already compiled full code without debug break slots to full code
-// with debug break slots depends on the generated code is otherwise
-// exactly the same.
-static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
+// have debug break slots and deoptimization information. Deoptimization
+// information is required in case that an optimized version of this
+// function is still activated on the stack. It will also make sure that
+// the full code is compiled with the same flags as the previous version,
+// that is flags which can change the code generated. The current method
+// of mapping from already compiled full code without debug break slots
+// to full code with debug break slots depends on the generated code is
+// otherwise exactly the same.
+static bool CompileFullCodeForDebugging(Handle<JSFunction> function,
Handle<Code> current_code) {
ASSERT(!current_code->has_debug_break_slots());
- CompilationInfo info(shared);
+ CompilationInfo info(function);
info.MarkCompilingForDebugging(current_code);
ASSERT(!info.shared_info()->is_compiled());
ASSERT(!info.isolate()->has_pending_exception());
@@ -1776,7 +1808,7 @@ static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
info.isolate()->clear_pending_exception();
#if DEBUG
if (result) {
- Handle<Code> new_code(shared->code());
+ Handle<Code> new_code(function->shared()->code());
ASSERT(new_code->has_debug_break_slots());
ASSERT(current_code->is_compiled_optimizable() ==
new_code->is_compiled_optimizable());
@@ -1857,13 +1889,6 @@ static void RedirectActivationsToRecompiledCodeOnThread(
// break slots.
debug_break_slot_count++;
}
- if (frame_code->has_self_optimization_header() &&
- !new_code->has_self_optimization_header()) {
- delta -= FullCodeGenerator::self_optimization_header_size();
- } else {
- ASSERT(frame_code->has_self_optimization_header() ==
- new_code->has_self_optimization_header());
- }
int debug_break_slot_bytes =
debug_break_slot_count * Assembler::kDebugBreakSlotLength;
if (FLAG_trace_deopt) {
@@ -1987,6 +2012,7 @@ void Debug::PrepareForBreakPoints() {
// patch the return address to run in the new compiled code.
for (int i = 0; i < active_functions.length(); i++) {
Handle<JSFunction> function = active_functions[i];
+ Handle<SharedFunctionInfo> shared(function->shared());
if (function->code()->kind() == Code::FUNCTION &&
function->code()->has_debug_break_slots()) {
@@ -1994,7 +2020,6 @@ void Debug::PrepareForBreakPoints() {
continue;
}
- Handle<SharedFunctionInfo> shared(function->shared());
// If recompilation is not possible just skip it.
if (shared->is_toplevel() ||
!shared->allows_lazy_compilation() ||
@@ -2014,7 +2039,7 @@ void Debug::PrepareForBreakPoints() {
isolate_->debugger()->force_debugger_active();
isolate_->debugger()->set_force_debugger_active(true);
ASSERT(current_code->kind() == Code::FUNCTION);
- CompileFullCodeForDebugging(shared, current_code);
+ CompileFullCodeForDebugging(function, current_code);
isolate_->debugger()->set_force_debugger_active(
prev_force_debugger_active);
if (!shared->is_compiled()) {
@@ -2234,6 +2259,13 @@ void Debug::FramesHaveBeenDropped(StackFrame::Id new_break_frame_id,
}
+const int Debug::FramePaddingLayout::kInitialSize = 1;
+
+
+// Any even value bigger than kInitialSize as needed for stack scanning.
+const int Debug::FramePaddingLayout::kPaddingValue = kInitialSize + 1;
+
+
bool Debug::IsDebugGlobal(GlobalObject* global) {
return IsLoaded() && global == debug_context()->global();
}
diff --git a/deps/v8/src/debug.h b/deps/v8/src/debug.h
index 474b90bd21..d9c966c37f 100644
--- a/deps/v8/src/debug.h
+++ b/deps/v8/src/debug.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -232,6 +232,12 @@ class Debug {
void PreemptionWhileInDebugger();
void Iterate(ObjectVisitor* v);
+ NO_INLINE(void PutValuesOnStackAndDie(int start,
+ Address c_entry_fp,
+ Address last_fp,
+ Address larger_fp,
+ int count,
+ int end));
Object* Break(Arguments args);
void SetBreakPoint(Handle<SharedFunctionInfo> shared,
Handle<Object> break_point_object,
@@ -245,6 +251,8 @@ class Debug {
bool IsBreakOnException(ExceptionBreakType type);
void PrepareStep(StepAction step_action, int step_count);
void ClearStepping();
+ void ClearStepOut();
+ bool IsStepping() { return thread_local_.step_count_ > 0; }
bool StepNextContinue(BreakLocationIterator* break_location_iterator,
JavaScriptFrame* frame);
static Handle<DebugInfo> GetDebugInfo(Handle<SharedFunctionInfo> shared);
@@ -455,6 +463,50 @@ class Debug {
// Architecture-specific constant.
static const bool kFrameDropperSupported;
+ /**
+ * Defines layout of a stack frame that supports padding. This is a regular
+ * internal frame that has a flexible stack structure. LiveEdit can shift
+ * its lower part up the stack, taking up the 'padding' space when additional
+ * stack memory is required.
+ * Such frame is expected immediately above the topmost JavaScript frame.
+ *
+ * Stack Layout:
+ * --- Top
+ * LiveEdit routine frames
+ * ---
+ * C frames of debug handler
+ * ---
+ * ...
+ * ---
+ * An internal frame that has n padding words:
+ * - any number of words as needed by code -- upper part of frame
+ * - padding size: a Smi storing n -- current size of padding
+ * - padding: n words filled with kPaddingValue in form of Smi
+ * - 3 context/type words of a regular InternalFrame
+ * - fp
+ * ---
+ * Topmost JavaScript frame
+ * ---
+ * ...
+ * --- Bottom
+ */
+ class FramePaddingLayout : public AllStatic {
+ public:
+ // Architecture-specific constant.
+ static const bool kIsSupported;
+
+ // A size of frame base including fp. Padding words starts right above
+ // the base.
+ static const int kFrameBaseSize = 4;
+
+ // A number of words that should be reserved on stack for the LiveEdit use.
+ // Normally equals 1. Stored on stack in form of Smi.
+ static const int kInitialSize;
+ // A value that padding words are filled with (in form of Smi). Going
+ // bottom-top, the first word not having this value is a counter word.
+ static const int kPaddingValue;
+ };
+
private:
explicit Debug(Isolate* isolate);
~Debug();
@@ -464,7 +516,6 @@ class Debug {
void ActivateStepIn(StackFrame* frame);
void ClearStepIn();
void ActivateStepOut(StackFrame* frame);
- void ClearStepOut();
void ClearStepNext();
// Returns whether the compile succeeded.
void RemoveDebugInfo(Handle<DebugInfo> debug_info);
diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc
index 2a30ddd3da..3debf55cd6 100644
--- a/deps/v8/src/deoptimizer.cc
+++ b/deps/v8/src/deoptimizer.cc
@@ -354,6 +354,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
bailout_type_(type),
from_(from),
fp_to_sp_delta_(fp_to_sp_delta),
+ has_alignment_padding_(0),
input_(NULL),
output_count_(0),
jsframe_count_(0),
@@ -378,6 +379,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
reinterpret_cast<intptr_t>(from),
fp_to_sp_delta - (2 * kPointerSize));
}
+ function->shared()->increment_deopt_count();
// Find the optimized code.
if (type == EAGER) {
ASSERT(from == NULL);
@@ -593,12 +595,14 @@ void Deoptimizer::DoComputeOutputFrames() {
PrintF("[deoptimizing: end 0x%08" V8PRIxPTR " ",
reinterpret_cast<intptr_t>(function));
function->PrintName();
- PrintF(" => node=%u, pc=0x%08" V8PRIxPTR ", state=%s, took %0.3f ms]\n",
+ PrintF(" => node=%u, pc=0x%08" V8PRIxPTR ", state=%s, alignment=%s,"
+ " took %0.3f ms]\n",
node_id,
output_[index]->GetPc(),
FullCodeGenerator::State2String(
static_cast<FullCodeGenerator::State>(
output_[index]->GetState()->value())),
+ has_alignment_padding_ ? "with padding" : "no padding",
ms);
}
}
@@ -769,7 +773,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": ",
output_[frame_index]->GetTop() + output_offset);
- PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [esp + %d] ",
+ PrintF("[top + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
output_offset,
input_value,
input_offset);
@@ -789,7 +793,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": ",
output_[frame_index]->GetTop() + output_offset);
- PrintF("[top + %d] <- %" V8PRIdPTR " ; [esp + %d] (%s)\n",
+ PrintF("[top + %d] <- %" V8PRIdPTR " ; [sp + %d] (%s)\n",
output_offset,
value,
input_offset,
@@ -815,7 +819,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
input_->GetOffsetFromSlotIndex(input_slot_index);
double value = input_->GetDoubleFrameSlot(input_offset);
if (FLAG_trace_deopt) {
- PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [esp + %d]\n",
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [sp + %d]\n",
output_[frame_index]->GetTop() + output_offset,
output_offset,
value,
@@ -1290,7 +1294,7 @@ Object* FrameDescription::GetExpression(int index) {
}
-void TranslationBuffer::Add(int32_t value) {
+void TranslationBuffer::Add(int32_t value, Zone* zone) {
// Encode the sign bit in the least significant bit.
bool is_negative = (value < 0);
uint32_t bits = ((is_negative ? -value : value) << 1) |
@@ -1299,7 +1303,7 @@ void TranslationBuffer::Add(int32_t value) {
// each byte to indicate whether or not more bytes follow.
do {
uint32_t next = bits >> 7;
- contents_.Add(((bits << 1) & 0xFF) | (next != 0));
+ contents_.Add(((bits << 1) & 0xFF) | (next != 0), zone);
bits = next;
} while (bits != 0);
}
@@ -1332,76 +1336,76 @@ Handle<ByteArray> TranslationBuffer::CreateByteArray() {
void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
- buffer_->Add(CONSTRUCT_STUB_FRAME);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+ buffer_->Add(CONSTRUCT_STUB_FRAME, zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
}
void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
- buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+ buffer_->Add(ARGUMENTS_ADAPTOR_FRAME, zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
}
void Translation::BeginJSFrame(int node_id, int literal_id, unsigned height) {
- buffer_->Add(JS_FRAME);
- buffer_->Add(node_id);
- buffer_->Add(literal_id);
- buffer_->Add(height);
+ buffer_->Add(JS_FRAME, zone());
+ buffer_->Add(node_id, zone());
+ buffer_->Add(literal_id, zone());
+ buffer_->Add(height, zone());
}
void Translation::StoreRegister(Register reg) {
- buffer_->Add(REGISTER);
- buffer_->Add(reg.code());
+ buffer_->Add(REGISTER, zone());
+ buffer_->Add(reg.code(), zone());
}
void Translation::StoreInt32Register(Register reg) {
- buffer_->Add(INT32_REGISTER);
- buffer_->Add(reg.code());
+ buffer_->Add(INT32_REGISTER, zone());
+ buffer_->Add(reg.code(), zone());
}
void Translation::StoreDoubleRegister(DoubleRegister reg) {
- buffer_->Add(DOUBLE_REGISTER);
- buffer_->Add(DoubleRegister::ToAllocationIndex(reg));
+ buffer_->Add(DOUBLE_REGISTER, zone());
+ buffer_->Add(DoubleRegister::ToAllocationIndex(reg), zone());
}
void Translation::StoreStackSlot(int index) {
- buffer_->Add(STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreInt32StackSlot(int index) {
- buffer_->Add(INT32_STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(INT32_STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreDoubleStackSlot(int index) {
- buffer_->Add(DOUBLE_STACK_SLOT);
- buffer_->Add(index);
+ buffer_->Add(DOUBLE_STACK_SLOT, zone());
+ buffer_->Add(index, zone());
}
void Translation::StoreLiteral(int literal_id) {
- buffer_->Add(LITERAL);
- buffer_->Add(literal_id);
+ buffer_->Add(LITERAL, zone());
+ buffer_->Add(literal_id, zone());
}
void Translation::StoreArgumentsObject() {
- buffer_->Add(ARGUMENTS_OBJECT);
+ buffer_->Add(ARGUMENTS_OBJECT, zone());
}
void Translation::MarkDuplicate() {
- buffer_->Add(DUPLICATE);
+ buffer_->Add(DUPLICATE, zone());
}
diff --git a/deps/v8/src/deoptimizer.h b/deps/v8/src/deoptimizer.h
index 6bc4a51036..9e8a5491a2 100644
--- a/deps/v8/src/deoptimizer.h
+++ b/deps/v8/src/deoptimizer.h
@@ -221,6 +221,10 @@ class Deoptimizer : public Malloced {
}
static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
+ static int has_alignment_padding_offset() {
+ return OFFSET_OF(Deoptimizer, has_alignment_padding_);
+ }
+
static int GetDeoptimizedCodeCount(Isolate* isolate);
static const int kNotDeoptimizationEntry = -1;
@@ -322,6 +326,7 @@ class Deoptimizer : public Malloced {
BailoutType bailout_type_;
Address from_;
int fp_to_sp_delta_;
+ int has_alignment_padding_;
// Input frame description.
FrameDescription* input_;
@@ -515,10 +520,10 @@ class FrameDescription {
class TranslationBuffer BASE_EMBEDDED {
public:
- TranslationBuffer() : contents_(256) { }
+ explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { }
int CurrentIndex() const { return contents_.length(); }
- void Add(int32_t value);
+ void Add(int32_t value, Zone* zone);
Handle<ByteArray> CreateByteArray();
@@ -569,12 +574,14 @@ class Translation BASE_EMBEDDED {
DUPLICATE
};
- Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count)
+ Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count,
+ Zone* zone)
: buffer_(buffer),
- index_(buffer->CurrentIndex()) {
- buffer_->Add(BEGIN);
- buffer_->Add(frame_count);
- buffer_->Add(jsframe_count);
+ index_(buffer->CurrentIndex()),
+ zone_(zone) {
+ buffer_->Add(BEGIN, zone);
+ buffer_->Add(frame_count, zone);
+ buffer_->Add(jsframe_count, zone);
}
int index() const { return index_; }
@@ -593,6 +600,8 @@ class Translation BASE_EMBEDDED {
void StoreArgumentsObject();
void MarkDuplicate();
+ Zone* zone() const { return zone_; }
+
static int NumberOfOperandsFor(Opcode opcode);
#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
@@ -602,6 +611,7 @@ class Translation BASE_EMBEDDED {
private:
TranslationBuffer* buffer_;
int index_;
+ Zone* zone_;
};
diff --git a/deps/v8/src/double.h b/deps/v8/src/double.h
index 16a3245e9a..fcf6906af7 100644
--- a/deps/v8/src/double.h
+++ b/deps/v8/src/double.h
@@ -130,12 +130,6 @@ class Double {
return (d64 & kExponentMask) == kExponentMask;
}
- bool IsNan() const {
- uint64_t d64 = AsUint64();
- return ((d64 & kExponentMask) == kExponentMask) &&
- ((d64 & kSignificandMask) != 0);
- }
-
bool IsInfinite() const {
uint64_t d64 = AsUint64();
return ((d64 & kExponentMask) == kExponentMask) &&
diff --git a/deps/v8/src/elements-kind.cc b/deps/v8/src/elements-kind.cc
new file mode 100644
index 0000000000..655a23bf1e
--- /dev/null
+++ b/deps/v8/src/elements-kind.cc
@@ -0,0 +1,134 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "elements-kind.h"
+
+#include "api.h"
+#include "elements.h"
+#include "objects.h"
+
+namespace v8 {
+namespace internal {
+
+
+void PrintElementsKind(FILE* out, ElementsKind kind) {
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
+ PrintF(out, "%s", accessor->name());
+}
+
+
+ElementsKind GetInitialFastElementsKind() {
+ if (FLAG_packed_arrays) {
+ return FAST_SMI_ELEMENTS;
+ } else {
+ return FAST_HOLEY_SMI_ELEMENTS;
+ }
+}
+
+
+struct InitializeFastElementsKindSequence {
+ static void Construct(
+ ElementsKind** fast_elements_kind_sequence_ptr) {
+ ElementsKind* fast_elements_kind_sequence =
+ new ElementsKind[kFastElementsKindCount];
+ *fast_elements_kind_sequence_ptr = fast_elements_kind_sequence;
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == FIRST_FAST_ELEMENTS_KIND);
+ fast_elements_kind_sequence[0] = FAST_SMI_ELEMENTS;
+ fast_elements_kind_sequence[1] = FAST_HOLEY_SMI_ELEMENTS;
+ fast_elements_kind_sequence[2] = FAST_DOUBLE_ELEMENTS;
+ fast_elements_kind_sequence[3] = FAST_HOLEY_DOUBLE_ELEMENTS;
+ fast_elements_kind_sequence[4] = FAST_ELEMENTS;
+ fast_elements_kind_sequence[5] = FAST_HOLEY_ELEMENTS;
+ }
+};
+
+
+static LazyInstance<ElementsKind*,
+ InitializeFastElementsKindSequence>::type
+ fast_elements_kind_sequence = LAZY_INSTANCE_INITIALIZER;
+
+
+ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number) {
+ ASSERT(sequence_number >= 0 &&
+ sequence_number < kFastElementsKindCount);
+ return fast_elements_kind_sequence.Get()[sequence_number];
+}
+
+int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind) {
+ for (int i = 0; i < kFastElementsKindCount; ++i) {
+ if (fast_elements_kind_sequence.Get()[i] == elements_kind) {
+ return i;
+ }
+ }
+ UNREACHABLE();
+ return 0;
+}
+
+
+ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind,
+ bool allow_only_packed) {
+ ASSERT(IsFastElementsKind(elements_kind));
+ ASSERT(elements_kind != TERMINAL_FAST_ELEMENTS_KIND);
+ while (true) {
+ int index =
+ GetSequenceIndexFromFastElementsKind(elements_kind) + 1;
+ elements_kind = GetFastElementsKindFromSequenceIndex(index);
+ if (!IsFastHoleyElementsKind(elements_kind) || !allow_only_packed) {
+ return elements_kind;
+ }
+ }
+ UNREACHABLE();
+ return TERMINAL_FAST_ELEMENTS_KIND;
+}
+
+
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ switch (from_kind) {
+ case FAST_SMI_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS;
+ case FAST_HOLEY_SMI_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS &&
+ to_kind != FAST_HOLEY_SMI_ELEMENTS;
+ case FAST_DOUBLE_ELEMENTS:
+ return to_kind != FAST_SMI_ELEMENTS &&
+ to_kind != FAST_HOLEY_SMI_ELEMENTS &&
+ to_kind != FAST_DOUBLE_ELEMENTS;
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ return to_kind == FAST_ELEMENTS ||
+ to_kind == FAST_HOLEY_ELEMENTS;
+ case FAST_ELEMENTS:
+ return to_kind == FAST_HOLEY_ELEMENTS;
+ case FAST_HOLEY_ELEMENTS:
+ return false;
+ default:
+ return false;
+ }
+}
+
+
+} } // namespace v8::internal
diff --git a/deps/v8/src/elements-kind.h b/deps/v8/src/elements-kind.h
new file mode 100644
index 0000000000..3be7711a35
--- /dev/null
+++ b/deps/v8/src/elements-kind.h
@@ -0,0 +1,221 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ELEMENTS_KIND_H_
+#define V8_ELEMENTS_KIND_H_
+
+#include "v8checks.h"
+
+namespace v8 {
+namespace internal {
+
+enum ElementsKind {
+ // The "fast" kind for elements that only contain SMI values. Must be first
+ // to make it possible to efficiently check maps for this kind.
+ FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
+
+ // The "fast" kind for tagged values. Must be second to make it possible to
+ // efficiently check maps for this and the FAST_SMI_ONLY_ELEMENTS kind
+ // together at once.
+ FAST_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+
+ // The "fast" kind for unwrapped, non-tagged double values.
+ FAST_DOUBLE_ELEMENTS,
+ FAST_HOLEY_DOUBLE_ELEMENTS,
+
+ // The "slow" kind.
+ DICTIONARY_ELEMENTS,
+ NON_STRICT_ARGUMENTS_ELEMENTS,
+ // The "fast" kind for external arrays
+ EXTERNAL_BYTE_ELEMENTS,
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS,
+ EXTERNAL_SHORT_ELEMENTS,
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS,
+ EXTERNAL_INT_ELEMENTS,
+ EXTERNAL_UNSIGNED_INT_ELEMENTS,
+ EXTERNAL_FLOAT_ELEMENTS,
+ EXTERNAL_DOUBLE_ELEMENTS,
+ EXTERNAL_PIXEL_ELEMENTS,
+
+ // Derived constants from ElementsKind
+ FIRST_ELEMENTS_KIND = FAST_SMI_ELEMENTS,
+ LAST_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
+ FIRST_FAST_ELEMENTS_KIND = FAST_SMI_ELEMENTS,
+ LAST_FAST_ELEMENTS_KIND = FAST_HOLEY_DOUBLE_ELEMENTS,
+ FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_BYTE_ELEMENTS,
+ LAST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
+ TERMINAL_FAST_ELEMENTS_KIND = FAST_HOLEY_ELEMENTS
+};
+
+const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
+const int kFastElementsKindCount = LAST_FAST_ELEMENTS_KIND -
+ FIRST_FAST_ELEMENTS_KIND + 1;
+
+void PrintElementsKind(FILE* out, ElementsKind kind);
+
+ElementsKind GetInitialFastElementsKind();
+
+ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_index);
+
+int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind);
+
+
+inline bool IsDictionaryElementsKind(ElementsKind kind) {
+ return kind == DICTIONARY_ELEMENTS;
+}
+
+
+inline bool IsExternalArrayElementsKind(ElementsKind kind) {
+ return kind >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
+ kind <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND;
+}
+
+
+inline bool IsFastElementsKind(ElementsKind kind) {
+ ASSERT(FIRST_FAST_ELEMENTS_KIND == 0);
+ return kind <= FAST_HOLEY_DOUBLE_ELEMENTS;
+}
+
+
+inline bool IsFastDoubleElementsKind(ElementsKind kind) {
+ return kind == FAST_DOUBLE_ELEMENTS ||
+ kind == FAST_HOLEY_DOUBLE_ELEMENTS;
+}
+
+
+inline bool IsFastSmiOrObjectElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_SMI_ELEMENTS ||
+ kind == FAST_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsFastSmiElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_SMI_ELEMENTS;
+}
+
+
+inline bool IsFastObjectElementsKind(ElementsKind kind) {
+ return kind == FAST_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsFastHoleyElementsKind(ElementsKind kind) {
+ return kind == FAST_HOLEY_SMI_ELEMENTS ||
+ kind == FAST_HOLEY_DOUBLE_ELEMENTS ||
+ kind == FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsHoleyElementsKind(ElementsKind kind) {
+ return IsFastHoleyElementsKind(kind) ||
+ kind == DICTIONARY_ELEMENTS;
+}
+
+
+inline bool IsFastPackedElementsKind(ElementsKind kind) {
+ return kind == FAST_SMI_ELEMENTS ||
+ kind == FAST_DOUBLE_ELEMENTS ||
+ kind == FAST_ELEMENTS;
+}
+
+
+inline ElementsKind GetPackedElementsKind(ElementsKind holey_kind) {
+ if (holey_kind == FAST_HOLEY_SMI_ELEMENTS) {
+ return FAST_SMI_ELEMENTS;
+ }
+ if (holey_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
+ return FAST_DOUBLE_ELEMENTS;
+ }
+ if (holey_kind == FAST_HOLEY_ELEMENTS) {
+ return FAST_ELEMENTS;
+ }
+ return holey_kind;
+}
+
+
+inline ElementsKind GetHoleyElementsKind(ElementsKind packed_kind) {
+ if (packed_kind == FAST_SMI_ELEMENTS) {
+ return FAST_HOLEY_SMI_ELEMENTS;
+ }
+ if (packed_kind == FAST_DOUBLE_ELEMENTS) {
+ return FAST_HOLEY_DOUBLE_ELEMENTS;
+ }
+ if (packed_kind == FAST_ELEMENTS) {
+ return FAST_HOLEY_ELEMENTS;
+ }
+ return packed_kind;
+}
+
+
+inline ElementsKind FastSmiToObjectElementsKind(ElementsKind from_kind) {
+ ASSERT(IsFastSmiElementsKind(from_kind));
+ return (from_kind == FAST_SMI_ELEMENTS)
+ ? FAST_ELEMENTS
+ : FAST_HOLEY_ELEMENTS;
+}
+
+
+inline bool IsSimpleMapChangeTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ return (GetHoleyElementsKind(from_kind) == to_kind) ||
+ (IsFastSmiElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind));
+}
+
+
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind);
+
+
+inline bool IsTransitionableFastElementsKind(ElementsKind from_kind) {
+ return IsFastElementsKind(from_kind) &&
+ from_kind != TERMINAL_FAST_ELEMENTS_KIND;
+}
+
+
+ElementsKind GetNextMoreGeneralFastElementsKind(ElementsKind elements_kind,
+ bool allow_only_packed);
+
+
+inline bool CanTransitionToMoreGeneralFastElementsKind(
+ ElementsKind elements_kind,
+ bool allow_only_packed) {
+ return IsFastElementsKind(elements_kind) &&
+ (elements_kind != TERMINAL_FAST_ELEMENTS_KIND &&
+ (!allow_only_packed || elements_kind != FAST_ELEMENTS));
+}
+
+
+} } // namespace v8::internal
+
+#endif // V8_ELEMENTS_KIND_H_
diff --git a/deps/v8/src/elements.cc b/deps/v8/src/elements.cc
index aa51ea9b78..f0e1414de4 100644
--- a/deps/v8/src/elements.cc
+++ b/deps/v8/src/elements.cc
@@ -39,8 +39,14 @@
// Inheritance hierarchy:
// - ElementsAccessorBase (abstract)
// - FastElementsAccessor (abstract)
-// - FastObjectElementsAccessor
+// - FastSmiOrObjectElementsAccessor
+// - FastPackedSmiElementsAccessor
+// - FastHoleySmiElementsAccessor
+// - FastPackedObjectElementsAccessor
+// - FastHoleyObjectElementsAccessor
// - FastDoubleElementsAccessor
+// - FastPackedDoubleElementsAccessor
+// - FastHoleyDoubleElementsAccessor
// - ExternalElementsAccessor (abstract)
// - ExternalByteElementsAccessor
// - ExternalUnsignedByteElementsAccessor
@@ -59,15 +65,24 @@ namespace v8 {
namespace internal {
+static const int kPackedSizeNotKnown = -1;
+
+
// First argument in list is the accessor class, the second argument is the
// accessor ElementsKind, and the third is the backing store class. Use the
// fast element handler for smi-only arrays. The implementation is currently
// identical. Note that the order must match that of the ElementsKind enum for
// the |accessor_array[]| below to work.
#define ELEMENTS_LIST(V) \
- V(FastObjectElementsAccessor, FAST_SMI_ONLY_ELEMENTS, FixedArray) \
- V(FastObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
- V(FastDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, FixedDoubleArray) \
+ V(FastPackedSmiElementsAccessor, FAST_SMI_ELEMENTS, FixedArray) \
+ V(FastHoleySmiElementsAccessor, FAST_HOLEY_SMI_ELEMENTS, \
+ FixedArray) \
+ V(FastPackedObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
+ V(FastHoleyObjectElementsAccessor, FAST_HOLEY_ELEMENTS, FixedArray) \
+ V(FastPackedDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, \
+ FixedDoubleArray) \
+ V(FastHoleyDoubleElementsAccessor, FAST_HOLEY_DOUBLE_ELEMENTS, \
+ FixedDoubleArray) \
V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, \
SeededNumberDictionary) \
V(NonStrictArgumentsElementsAccessor, NON_STRICT_ARGUMENTS_ELEMENTS, \
@@ -139,8 +154,6 @@ void CopyObjectToObjectElements(FixedArray* from,
uint32_t to_start,
int raw_copy_size) {
ASSERT(to->map() != HEAP->fixed_cow_array_map());
- ASSERT(from_kind == FAST_ELEMENTS || from_kind == FAST_SMI_ONLY_ELEMENTS);
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
@@ -148,7 +161,7 @@ void CopyObjectToObjectElements(FixedArray* from,
copy_size = Min(from->length() - from_start,
to->length() - to_start);
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -160,12 +173,15 @@ void CopyObjectToObjectElements(FixedArray* from,
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return;
+ ASSERT(IsFastSmiOrObjectElementsKind(from_kind));
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
Address to_address = to->address() + FixedArray::kHeaderSize;
Address from_address = from->address() + FixedArray::kHeaderSize;
CopyWords(reinterpret_cast<Object**>(to_address) + to_start,
reinterpret_cast<Object**>(from_address) + from_start,
copy_size);
- if (from_kind == FAST_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
Heap* heap = from->GetHeap();
if (!heap->InNewSpace(to)) {
heap->RecordWrites(to->address(),
@@ -190,7 +206,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = from->max_number_key() + 1 - from_start;
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // Fast object arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -200,7 +216,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
#endif
}
ASSERT(to != from);
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
if (copy_size == 0) return;
uint32_t to_length = to->length();
if (to_start + copy_size > to_length) {
@@ -216,7 +232,7 @@ static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
to->set_the_hole(i + to_start);
}
}
- if (to_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(to_kind)) {
if (!heap->InNewSpace(to)) {
heap->RecordWrites(to->address(),
to->OffsetOfElementAt(to_start),
@@ -234,7 +250,7 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
ElementsKind to_kind,
uint32_t to_start,
int raw_copy_size) {
- ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
@@ -242,7 +258,7 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
copy_size = Min(from->length() - from_start,
to->length() - to_start);
#ifdef DEBUG
- // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
// marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
for (int i = to_start + copy_size; i < to->length(); ++i) {
@@ -255,14 +271,14 @@ MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return from;
for (int i = 0; i < copy_size; ++i) {
- if (to_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(to_kind)) {
UNIMPLEMENTED();
return Failure::Exception();
} else {
MaybeObject* maybe_value = from->get(i + from_start);
Object* value;
- ASSERT(to_kind == FAST_ELEMENTS);
- // Because FAST_DOUBLE_ELEMENTS -> FAST_ELEMENT allocate HeapObjects
+ ASSERT(IsFastObjectElementsKind(to_kind));
+ // Because Double -> Object elements transitions allocate HeapObjects
// iteratively, the allocate must succeed within a single GC cycle,
// otherwise the retry after the GC will also fail. In order to ensure
// that no GC is triggered, allocate HeapNumbers from old space if they
@@ -313,6 +329,76 @@ static void CopyDoubleToDoubleElements(FixedDoubleArray* from,
}
+static void CopySmiToDoubleElements(FixedArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->length() - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ to->set_the_hole(i);
+ }
+ }
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ Object* the_hole = from->GetHeap()->the_hole_value();
+ for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size);
+ from_start < from_end; from_start++, to_start++) {
+ Object* hole_or_smi = from->get(from_start);
+ if (hole_or_smi == the_hole) {
+ to->set_the_hole(to_start);
+ } else {
+ to->set(to_start, Smi::cast(hole_or_smi)->value());
+ }
+ }
+}
+
+
+static void CopyPackedSmiToDoubleElements(FixedArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int packed_size,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ uint32_t to_end;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->length() - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ to_end = to->length();
+ } else {
+ to_end = to_start + static_cast<uint32_t>(copy_size);
+ }
+ } else {
+ to_end = to_start + static_cast<uint32_t>(copy_size);
+ }
+ ASSERT(static_cast<int>(to_end) <= to->length());
+ ASSERT(packed_size >= 0 && packed_size <= copy_size);
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size);
+ from_start < from_end; from_start++, to_start++) {
+ Object* smi = from->get(from_start);
+ ASSERT(!smi->IsTheHole());
+ to->set(to_start, Smi::cast(smi)->value());
+ }
+
+ while (to_start < to_end) {
+ to->set_the_hole(to_start++);
+ }
+}
+
+
static void CopyObjectToDoubleElements(FixedArray* from,
uint32_t from_start,
FixedDoubleArray* to,
@@ -332,12 +418,14 @@ static void CopyObjectToDoubleElements(FixedArray* from,
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
if (copy_size == 0) return;
- for (int i = 0; i < copy_size; i++) {
- Object* hole_or_object = from->get(i + from_start);
- if (hole_or_object->IsTheHole()) {
- to->set_the_hole(i + to_start);
+ Object* the_hole = from->GetHeap()->the_hole_value();
+ for (uint32_t from_end = from_start + copy_size;
+ from_start < from_end; from_start++, to_start++) {
+ Object* hole_or_object = from->get(from_start);
+ if (hole_or_object == the_hole) {
+ to->set_the_hole(to_start);
} else {
- to->set(i + to_start, hole_or_object->Number());
+ to->set(to_start, hole_or_object->Number());
}
}
}
@@ -404,6 +492,38 @@ class ElementsAccessorBase : public ElementsAccessor {
virtual ElementsKind kind() const { return ElementsTraits::Kind; }
+ static void ValidateContents(JSObject* holder, int length) {
+ }
+
+ static void ValidateImpl(JSObject* holder) {
+ FixedArrayBase* fixed_array_base = holder->elements();
+ // When objects are first allocated, its elements are Failures.
+ if (fixed_array_base->IsFailure()) return;
+ if (!fixed_array_base->IsHeapObject()) return;
+ Map* map = fixed_array_base->map();
+ // Arrays that have been shifted in place can't be verified.
+ Heap* heap = holder->GetHeap();
+ if (map == heap->raw_unchecked_one_pointer_filler_map() ||
+ map == heap->raw_unchecked_two_pointer_filler_map() ||
+ map == heap->free_space_map()) {
+ return;
+ }
+ int length = 0;
+ if (holder->IsJSArray()) {
+ Object* length_obj = JSArray::cast(holder)->length();
+ if (length_obj->IsSmi()) {
+ length = Smi::cast(length_obj)->value();
+ }
+ } else {
+ length = fixed_array_base->length();
+ }
+ ElementsAccessorSubclass::ValidateContents(holder, length);
+ }
+
+ virtual void Validate(JSObject* holder) {
+ ElementsAccessorSubclass::ValidateImpl(holder);
+ }
+
static bool HasElementImpl(Object* receiver,
JSObject* holder,
uint32_t key,
@@ -424,10 +544,10 @@ class ElementsAccessorBase : public ElementsAccessor {
receiver, holder, key, BackingStore::cast(backing_store));
}
- virtual MaybeObject* Get(Object* receiver,
- JSObject* holder,
- uint32_t key,
- FixedArrayBase* backing_store) {
+ MUST_USE_RESULT virtual MaybeObject* Get(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store) {
if (backing_store == NULL) {
backing_store = holder->elements();
}
@@ -435,76 +555,94 @@ class ElementsAccessorBase : public ElementsAccessor {
receiver, holder, key, BackingStore::cast(backing_store));
}
- static MaybeObject* GetImpl(Object* receiver,
- JSObject* obj,
- uint32_t key,
- BackingStore* backing_store) {
+ MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
return (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store))
? backing_store->get(key)
: backing_store->GetHeap()->the_hole_value();
}
- virtual MaybeObject* SetLength(JSArray* array,
- Object* length) {
+ MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* array,
+ Object* length) {
return ElementsAccessorSubclass::SetLengthImpl(
array, length, BackingStore::cast(array->elements()));
}
- static MaybeObject* SetLengthImpl(JSObject* obj,
- Object* length,
- BackingStore* backing_store);
+ MUST_USE_RESULT static MaybeObject* SetLengthImpl(
+ JSObject* obj,
+ Object* length,
+ BackingStore* backing_store);
- virtual MaybeObject* SetCapacityAndLength(JSArray* array,
- int capacity,
- int length) {
+ MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(
+ JSArray* array,
+ int capacity,
+ int length) {
return ElementsAccessorSubclass::SetFastElementsCapacityAndLength(
array,
capacity,
length);
}
- static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
- int capacity,
- int length) {
+ MUST_USE_RESULT static MaybeObject* SetFastElementsCapacityAndLength(
+ JSObject* obj,
+ int capacity,
+ int length) {
UNIMPLEMENTED();
return obj;
}
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) = 0;
-
- static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
- uint32_t from_start,
- FixedArrayBase* to,
- ElementsKind to_kind,
- uint32_t to_start,
- int copy_size) {
+ MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) = 0;
+
+ MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int packed_size,
+ int copy_size) {
UNREACHABLE();
return NULL;
}
- virtual MaybeObject* CopyElements(JSObject* from_holder,
- uint32_t from_start,
- FixedArrayBase* to,
- ElementsKind to_kind,
- uint32_t to_start,
- int copy_size,
- FixedArrayBase* from) {
+ MUST_USE_RESULT virtual MaybeObject* CopyElements(JSObject* from_holder,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size,
+ FixedArrayBase* from) {
+ int packed_size = kPackedSizeNotKnown;
if (from == NULL) {
from = from_holder->elements();
}
+
+ if (from_holder) {
+ ElementsKind elements_kind = from_holder->GetElementsKind();
+ bool is_packed = IsFastPackedElementsKind(elements_kind) &&
+ from_holder->IsJSArray();
+ if (is_packed) {
+ packed_size = Smi::cast(JSArray::cast(from_holder)->length())->value();
+ if (copy_size >= 0 && packed_size > copy_size) {
+ packed_size = copy_size;
+ }
+ }
+ }
if (from->length() == 0) {
return from;
}
return ElementsAccessorSubclass::CopyElementsImpl(
- from, from_start, to, to_kind, to_start, copy_size);
+ from, from_start, to, to_kind, to_start, packed_size, copy_size);
}
- virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
- JSObject* holder,
- FixedArray* to,
- FixedArrayBase* from) {
+ MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray(
+ Object* receiver,
+ JSObject* holder,
+ FixedArray* to,
+ FixedArrayBase* from) {
int len0 = to->length();
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
@@ -620,6 +758,7 @@ class FastElementsAccessor
KindTraits>(name) {}
protected:
friend class ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits>;
+ friend class NonStrictArgumentsElementsAccessor;
typedef typename KindTraits::BackingStore BackingStore;
@@ -630,10 +769,21 @@ class FastElementsAccessor
Object* length_object,
uint32_t length) {
uint32_t old_capacity = backing_store->length();
+ Object* old_length = array->length();
+ bool same_size = old_length->IsSmi() &&
+ static_cast<uint32_t>(Smi::cast(old_length)->value()) == length;
+ ElementsKind kind = array->GetElementsKind();
+
+ if (!same_size && IsFastElementsKind(kind) &&
+ !IsFastHoleyElementsKind(kind)) {
+ kind = GetHoleyElementsKind(kind);
+ MaybeObject* maybe_obj = array->TransitionElementsKind(kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ }
// Check whether the backing store should be shrunk.
if (length <= old_capacity) {
- if (array->HasFastTypeElements()) {
+ if (array->HasFastSmiOrObjectElements()) {
MaybeObject* maybe_obj = array->EnsureWritableFastElements();
if (!maybe_obj->To(&backing_store)) return maybe_obj;
}
@@ -665,39 +815,40 @@ class FastElementsAccessor
MaybeObject* result = FastElementsAccessorSubclass::
SetFastElementsCapacityAndLength(array, new_capacity, length);
if (result->IsFailure()) return result;
+ array->ValidateElements();
return length_object;
}
// Request conversion to slow elements.
return array->GetHeap()->undefined_value();
}
-};
-
-
-class FastObjectElementsAccessor
- : public FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize> {
- public:
- explicit FastObjectElementsAccessor(const char* name)
- : FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize>(name) {}
static MaybeObject* DeleteCommon(JSObject* obj,
- uint32_t key) {
- ASSERT(obj->HasFastElements() ||
- obj->HasFastSmiOnlyElements() ||
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ ASSERT(obj->HasFastSmiOrObjectElements() ||
+ obj->HasFastDoubleElements() ||
obj->HasFastArgumentsElements());
+ typename KindTraits::BackingStore* backing_store =
+ KindTraits::BackingStore::cast(obj->elements());
Heap* heap = obj->GetHeap();
- FixedArray* backing_store = FixedArray::cast(obj->elements());
if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
- backing_store = FixedArray::cast(backing_store->get(1));
+ backing_store =
+ KindTraits::BackingStore::cast(
+ FixedArray::cast(backing_store)->get(1));
} else {
- Object* writable;
- MaybeObject* maybe = obj->EnsureWritableFastElements();
- if (!maybe->ToObject(&writable)) return maybe;
- backing_store = FixedArray::cast(writable);
+ ElementsKind kind = KindTraits::Kind;
+ if (IsFastPackedElementsKind(kind)) {
+ MaybeObject* transitioned =
+ obj->TransitionElementsKind(GetHoleyElementsKind(kind));
+ if (transitioned->IsFailure()) return transitioned;
+ }
+ if (IsFastSmiOrObjectElementsKind(KindTraits::Kind)) {
+ Object* writable;
+ MaybeObject* maybe = obj->EnsureWritableFastElements();
+ if (!maybe->ToObject(&writable)) return maybe;
+ backing_store = KindTraits::BackingStore::cast(writable);
+ }
}
uint32_t length = static_cast<uint32_t>(
obj->IsJSArray()
@@ -709,15 +860,14 @@ class FastObjectElementsAccessor
// has too few used values, normalize it.
// To avoid doing the check on every delete we require at least
// one adjacent hole to the value being deleted.
- Object* hole = heap->the_hole_value();
const int kMinLengthForSparsenessCheck = 64;
if (backing_store->length() >= kMinLengthForSparsenessCheck &&
!heap->InNewSpace(backing_store) &&
- ((key > 0 && backing_store->get(key - 1) == hole) ||
- (key + 1 < length && backing_store->get(key + 1) == hole))) {
+ ((key > 0 && backing_store->is_the_hole(key - 1)) ||
+ (key + 1 < length && backing_store->is_the_hole(key + 1)))) {
int num_used = 0;
for (int i = 0; i < backing_store->length(); ++i) {
- if (backing_store->get(i) != hole) ++num_used;
+ if (!backing_store->is_the_hole(i)) ++num_used;
// Bail out early if more than 1/4 is used.
if (4 * num_used > backing_store->length()) break;
}
@@ -730,27 +880,90 @@ class FastObjectElementsAccessor
return heap->true_value();
}
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ return DeleteCommon(obj, key, mode);
+ }
+
+ static bool HasElementImpl(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ typename KindTraits::BackingStore* backing_store) {
+ if (key >= static_cast<uint32_t>(backing_store->length())) {
+ return false;
+ }
+ return !backing_store->is_the_hole(key);
+ }
+
+ static void ValidateContents(JSObject* holder, int length) {
+#if DEBUG
+ FixedArrayBase* elements = holder->elements();
+ Heap* heap = elements->GetHeap();
+ Map* map = elements->map();
+ ASSERT((IsFastSmiOrObjectElementsKind(KindTraits::Kind) &&
+ (map == heap->fixed_array_map() ||
+ map == heap->fixed_cow_array_map())) ||
+ (IsFastDoubleElementsKind(KindTraits::Kind) ==
+ ((map == heap->fixed_array_map() && length == 0) ||
+ map == heap->fixed_double_array_map())));
+ for (int i = 0; i < length; i++) {
+ typename KindTraits::BackingStore* backing_store =
+ KindTraits::BackingStore::cast(elements);
+ ASSERT((!IsFastSmiElementsKind(KindTraits::Kind) ||
+ static_cast<Object*>(backing_store->get(i))->IsSmi()) ||
+ (IsFastHoleyElementsKind(KindTraits::Kind) ==
+ backing_store->is_the_hole(i)));
+ }
+#endif
+ }
+};
+
+
+template<typename FastElementsAccessorSubclass,
+ typename KindTraits>
+class FastSmiOrObjectElementsAccessor
+ : public FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
+ kPointerSize> {
+ public:
+ explicit FastSmiOrObjectElementsAccessor(const char* name)
+ : FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
+ kPointerSize>(name) {}
+
static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
uint32_t from_start,
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
- switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- CopyObjectToObjectElements(
- FixedArray::cast(from), ElementsTraits::Kind, from_start,
- FixedArray::cast(to), to_kind, to_start, copy_size);
- return from;
- }
- case FAST_DOUBLE_ELEMENTS:
+ if (IsFastSmiOrObjectElementsKind(to_kind)) {
+ CopyObjectToObjectElements(
+ FixedArray::cast(from), KindTraits::Kind, from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ } else if (IsFastDoubleElementsKind(to_kind)) {
+ if (IsFastSmiElementsKind(KindTraits::Kind)) {
+ if (IsFastPackedElementsKind(KindTraits::Kind) &&
+ packed_size != kPackedSizeNotKnown) {
+ CopyPackedSmiToDoubleElements(
+ FixedArray::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start,
+ packed_size, copy_size);
+ } else {
+ CopySmiToDoubleElements(
+ FixedArray::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start, copy_size);
+ }
+ } else {
CopyObjectToDoubleElements(
FixedArray::cast(from), from_start,
FixedDoubleArray::cast(to), to_start, copy_size);
- return from;
- default:
- UNREACHABLE();
+ }
+ } else {
+ UNREACHABLE();
}
return to->GetHeap()->undefined_value();
}
@@ -759,64 +972,102 @@ class FastObjectElementsAccessor
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
- JSObject::SetFastElementsCapacityMode set_capacity_mode =
- obj->HasFastSmiOnlyElements()
- ? JSObject::kAllowSmiOnlyElements
- : JSObject::kDontAllowSmiOnlyElements;
+ JSObject::SetFastElementsCapacitySmiMode set_capacity_mode =
+ obj->HasFastSmiElements()
+ ? JSObject::kAllowSmiElements
+ : JSObject::kDontAllowSmiElements;
return obj->SetFastElementsCapacityAndLength(capacity,
length,
set_capacity_mode);
}
+};
- protected:
- friend class FastElementsAccessor<FastObjectElementsAccessor,
- ElementsKindTraits<FAST_ELEMENTS>,
- kPointerSize>;
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
- return DeleteCommon(obj, key);
- }
+class FastPackedSmiElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastPackedSmiElementsAccessor,
+ ElementsKindTraits<FAST_SMI_ELEMENTS> > {
+ public:
+ explicit FastPackedSmiElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastPackedSmiElementsAccessor,
+ ElementsKindTraits<FAST_SMI_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleySmiElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastHoleySmiElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_SMI_ELEMENTS> > {
+ public:
+ explicit FastHoleySmiElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastHoleySmiElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_SMI_ELEMENTS> >(name) {}
+};
+
+
+class FastPackedObjectElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastPackedObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS> > {
+ public:
+ explicit FastPackedObjectElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastPackedObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleyObjectElementsAccessor
+ : public FastSmiOrObjectElementsAccessor<
+ FastHoleyObjectElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_ELEMENTS> > {
+ public:
+ explicit FastHoleyObjectElementsAccessor(const char* name)
+ : FastSmiOrObjectElementsAccessor<
+ FastHoleyObjectElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_ELEMENTS> >(name) {}
};
+template<typename FastElementsAccessorSubclass,
+ typename KindTraits>
class FastDoubleElementsAccessor
- : public FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ : public FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
kDoubleSize> {
public:
explicit FastDoubleElementsAccessor(const char* name)
- : FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ : FastElementsAccessor<FastElementsAccessorSubclass,
+ KindTraits,
kDoubleSize>(name) {}
static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
uint32_t capacity,
uint32_t length) {
- return obj->SetFastDoubleElementsCapacityAndLength(capacity, length);
+ return obj->SetFastDoubleElementsCapacityAndLength(capacity,
+ length);
}
protected:
- friend class ElementsAccessorBase<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
- friend class FastElementsAccessor<FastDoubleElementsAccessor,
- ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
- kDoubleSize>;
-
static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
uint32_t from_start,
FixedArrayBase* to,
ElementsKind to_kind,
uint32_t to_start,
+ int packed_size,
int copy_size) {
switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
return CopyDoubleToObjectElements(
FixedDoubleArray::cast(from), from_start, FixedArray::cast(to),
to_kind, to_start, copy_size);
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
CopyDoubleToDoubleElements(FixedDoubleArray::cast(from), from_start,
FixedDoubleArray::cast(to),
to_start, copy_size);
@@ -826,26 +1077,35 @@ class FastDoubleElementsAccessor
}
return to->GetHeap()->undefined_value();
}
+};
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
- int length = obj->IsJSArray()
- ? Smi::cast(JSArray::cast(obj)->length())->value()
- : FixedDoubleArray::cast(obj->elements())->length();
- if (key < static_cast<uint32_t>(length)) {
- FixedDoubleArray::cast(obj->elements())->set_the_hole(key);
- }
- return obj->GetHeap()->true_value();
- }
- static bool HasElementImpl(Object* receiver,
- JSObject* holder,
- uint32_t key,
- FixedDoubleArray* backing_store) {
- return key < static_cast<uint32_t>(backing_store->length()) &&
- !backing_store->is_the_hole(key);
- }
+class FastPackedDoubleElementsAccessor
+ : public FastDoubleElementsAccessor<
+ FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> > {
+ public:
+ friend class ElementsAccessorBase<FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
+ explicit FastPackedDoubleElementsAccessor(const char* name)
+ : FastDoubleElementsAccessor<
+ FastPackedDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >(name) {}
+};
+
+
+class FastHoleyDoubleElementsAccessor
+ : public FastDoubleElementsAccessor<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> > {
+ public:
+ friend class ElementsAccessorBase<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> >;
+ explicit FastHoleyDoubleElementsAccessor(const char* name)
+ : FastDoubleElementsAccessor<
+ FastHoleyDoubleElementsAccessor,
+ ElementsKindTraits<FAST_HOLEY_DOUBLE_ELEMENTS> >(name) {}
};
@@ -866,27 +1126,28 @@ class ExternalElementsAccessor
friend class ElementsAccessorBase<ExternalElementsAccessorSubclass,
ElementsKindTraits<Kind> >;
- static MaybeObject* GetImpl(Object* receiver,
- JSObject* obj,
- uint32_t key,
- BackingStore* backing_store) {
+ MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
return
key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store)
? backing_store->get(key)
: backing_store->GetHeap()->undefined_value();
}
- static MaybeObject* SetLengthImpl(JSObject* obj,
- Object* length,
- BackingStore* backing_store) {
+ MUST_USE_RESULT static MaybeObject* SetLengthImpl(
+ JSObject* obj,
+ Object* length,
+ BackingStore* backing_store) {
// External arrays do not support changing their length.
UNREACHABLE();
return obj;
}
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
// External arrays always ignore deletes.
return obj->GetHeap()->true_value();
}
@@ -1002,10 +1263,11 @@ class DictionaryElementsAccessor
// Adjusts the length of the dictionary backing store and returns the new
// length according to ES5 section 15.4.5.2 behavior.
- static MaybeObject* SetLengthWithoutNormalize(SeededNumberDictionary* dict,
- JSArray* array,
- Object* length_object,
- uint32_t length) {
+ MUST_USE_RESULT static MaybeObject* SetLengthWithoutNormalize(
+ SeededNumberDictionary* dict,
+ JSArray* array,
+ Object* length_object,
+ uint32_t length) {
if (length == 0) {
// If the length of a slow array is reset to zero, we clear
// the array and flush backing storage. This has the added
@@ -1057,9 +1319,10 @@ class DictionaryElementsAccessor
return length_object;
}
- static MaybeObject* DeleteCommon(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT static MaybeObject* DeleteCommon(
+ JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
Isolate* isolate = obj->GetIsolate();
Heap* heap = isolate->heap();
FixedArray* backing_store = FixedArray::cast(obj->elements());
@@ -1102,20 +1365,24 @@ class DictionaryElementsAccessor
return heap->true_value();
}
- static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
- uint32_t from_start,
- FixedArrayBase* to,
- ElementsKind to_kind,
- uint32_t to_start,
- int copy_size) {
+ MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int packed_size,
+ int copy_size) {
switch (to_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
CopyDictionaryToObjectElements(
SeededNumberDictionary::cast(from), from_start,
FixedArray::cast(to), to_kind, to_start, copy_size);
return from;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
CopyDictionaryToDoubleElements(
SeededNumberDictionary::cast(from), from_start,
FixedDoubleArray::cast(to), to_start, copy_size);
@@ -1131,16 +1398,17 @@ class DictionaryElementsAccessor
friend class ElementsAccessorBase<DictionaryElementsAccessor,
ElementsKindTraits<DICTIONARY_ELEMENTS> >;
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
return DeleteCommon(obj, key, mode);
}
- static MaybeObject* GetImpl(Object* receiver,
- JSObject* obj,
- uint32_t key,
- SeededNumberDictionary* backing_store) {
+ MUST_USE_RESULT static MaybeObject* GetImpl(
+ Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
int entry = backing_store->FindEntry(key);
if (entry != SeededNumberDictionary::kNotFound) {
Object* element = backing_store->ValueAt(entry);
@@ -1186,10 +1454,10 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
NonStrictArgumentsElementsAccessor,
ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> >;
- static MaybeObject* GetImpl(Object* receiver,
- JSObject* obj,
- uint32_t key,
- FixedArray* parameter_map) {
+ MUST_USE_RESULT static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ FixedArray* parameter_map) {
Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
Context* context = Context::cast(parameter_map->get(0));
@@ -1216,18 +1484,19 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
}
}
- static MaybeObject* SetLengthImpl(JSObject* obj,
- Object* length,
- FixedArray* parameter_map) {
+ MUST_USE_RESULT static MaybeObject* SetLengthImpl(
+ JSObject* obj,
+ Object* length,
+ FixedArray* parameter_map) {
// TODO(mstarzinger): This was never implemented but will be used once we
// correctly implement [[DefineOwnProperty]] on arrays.
UNIMPLEMENTED();
return obj;
}
- virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key,
- JSReceiver::DeleteMode mode) {
+ MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
FixedArray* parameter_map = FixedArray::cast(obj->elements());
Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
@@ -1240,18 +1509,22 @@ class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
if (arguments->IsDictionary()) {
return DictionaryElementsAccessor::DeleteCommon(obj, key, mode);
} else {
- return FastObjectElementsAccessor::DeleteCommon(obj, key);
+ // It's difficult to access the version of DeleteCommon that is declared
+ // in the templatized super class, call the concrete implementation in
+ // the class for the most generalized ElementsKind subclass.
+ return FastHoleyObjectElementsAccessor::DeleteCommon(obj, key, mode);
}
}
return obj->GetHeap()->true_value();
}
- static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
- uint32_t from_start,
- FixedArrayBase* to,
- ElementsKind to_kind,
- uint32_t to_start,
- int copy_size) {
+ MUST_USE_RESULT static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int packed_size,
+ int copy_size) {
FixedArray* parameter_map = FixedArray::cast(from);
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
@@ -1304,7 +1577,7 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
if (array->IsDictionary()) {
return elements_accessors_[DICTIONARY_ELEMENTS];
} else {
- return elements_accessors_[FAST_ELEMENTS];
+ return elements_accessors_[FAST_HOLEY_ELEMENTS];
}
case EXTERNAL_BYTE_ARRAY_TYPE:
return elements_accessors_[EXTERNAL_BYTE_ELEMENTS];
@@ -1332,18 +1605,8 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
void ElementsAccessor::InitializeOncePerProcess() {
- static struct ConcreteElementsAccessors {
-#define ACCESSOR_STRUCT(Class, Kind, Store) Class* Kind##_handler;
- ELEMENTS_LIST(ACCESSOR_STRUCT)
-#undef ACCESSOR_STRUCT
- } element_accessors = {
-#define ACCESSOR_INIT(Class, Kind, Store) new Class(#Kind),
- ELEMENTS_LIST(ACCESSOR_INIT)
-#undef ACCESSOR_INIT
- };
-
static ElementsAccessor* accessor_array[] = {
-#define ACCESSOR_ARRAY(Class, Kind, Store) element_accessors.Kind##_handler,
+#define ACCESSOR_ARRAY(Class, Kind, Store) new Class(#Kind),
ELEMENTS_LIST(ACCESSOR_ARRAY)
#undef ACCESSOR_ARRAY
};
@@ -1355,9 +1618,17 @@ void ElementsAccessor::InitializeOncePerProcess() {
}
+void ElementsAccessor::TearDown() {
+#define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind];
+ ELEMENTS_LIST(ACCESSOR_DELETE)
+#undef ACCESSOR_DELETE
+ elements_accessors_ = NULL;
+}
+
+
template <typename ElementsAccessorSubclass, typename ElementsKindTraits>
-MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass,
- ElementsKindTraits>::
+MUST_USE_RESULT MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass,
+ ElementsKindTraits>::
SetLengthImpl(JSObject* obj,
Object* length,
typename ElementsKindTraits::BackingStore* backing_store) {
diff --git a/deps/v8/src/elements.h b/deps/v8/src/elements.h
index ff97c08324..822fca50ee 100644
--- a/deps/v8/src/elements.h
+++ b/deps/v8/src/elements.h
@@ -28,6 +28,7 @@
#ifndef V8_ELEMENTS_H_
#define V8_ELEMENTS_H_
+#include "elements-kind.h"
#include "objects.h"
#include "heap.h"
#include "isolate.h"
@@ -45,6 +46,10 @@ class ElementsAccessor {
virtual ElementsKind kind() const = 0;
const char* name() const { return name_; }
+ // Checks the elements of an object for consistency, asserting when a problem
+ // is found.
+ virtual void Validate(JSObject* obj) = 0;
+
// Returns true if a holder contains an element with the specified key
// without iterating up the prototype chain. The caller can optionally pass
// in the backing store to use for the check, which must be compatible with
@@ -60,18 +65,19 @@ class ElementsAccessor {
// can optionally pass in the backing store to use for the check, which must
// be compatible with the ElementsKind of the ElementsAccessor. If
// backing_store is NULL, the holder->elements() is used as the backing store.
- virtual MaybeObject* Get(Object* receiver,
- JSObject* holder,
- uint32_t key,
- FixedArrayBase* backing_store = NULL) = 0;
+ MUST_USE_RESULT virtual MaybeObject* Get(
+ Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
// Modifies the length data property as specified for JSArrays and resizes the
// underlying backing store accordingly. The method honors the semantics of
// changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that
// have non-deletable elements can only be shrunk to the size of highest
// element that is non-deletable.
- virtual MaybeObject* SetLength(JSArray* holder,
- Object* new_length) = 0;
+ MUST_USE_RESULT virtual MaybeObject* SetLength(JSArray* holder,
+ Object* new_length) = 0;
// Modifies both the length and capacity of a JSArray, resizing the underlying
// backing store as necessary. This method does NOT honor the semantics of
@@ -79,14 +85,14 @@ class ElementsAccessor {
// elements. This method should only be called for array expansion OR by
// runtime JavaScript code that use InternalArrays and don't care about
// EcmaScript 5.1 semantics.
- virtual MaybeObject* SetCapacityAndLength(JSArray* array,
- int capacity,
- int length) = 0;
+ MUST_USE_RESULT virtual MaybeObject* SetCapacityAndLength(JSArray* array,
+ int capacity,
+ int length) = 0;
// Deletes an element in an object, returning a new elements backing store.
- virtual MaybeObject* Delete(JSObject* holder,
- uint32_t key,
- JSReceiver::DeleteMode mode) = 0;
+ MUST_USE_RESULT virtual MaybeObject* Delete(JSObject* holder,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) = 0;
// If kCopyToEnd is specified as the copy_size to CopyElements, it copies all
// of elements from source after source_start to the destination array.
@@ -101,26 +107,28 @@ class ElementsAccessor {
// the source JSObject or JSArray in source_holder. If the holder's backing
// store is available, it can be passed in source and source_holder is
// ignored.
- virtual MaybeObject* CopyElements(JSObject* source_holder,
- uint32_t source_start,
- FixedArrayBase* destination,
- ElementsKind destination_kind,
- uint32_t destination_start,
- int copy_size,
- FixedArrayBase* source = NULL) = 0;
-
- MaybeObject* CopyElements(JSObject* from_holder,
- FixedArrayBase* to,
- ElementsKind to_kind,
- FixedArrayBase* from = NULL) {
+ MUST_USE_RESULT virtual MaybeObject* CopyElements(
+ JSObject* source_holder,
+ uint32_t source_start,
+ FixedArrayBase* destination,
+ ElementsKind destination_kind,
+ uint32_t destination_start,
+ int copy_size,
+ FixedArrayBase* source = NULL) = 0;
+
+ MUST_USE_RESULT MaybeObject* CopyElements(JSObject* from_holder,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ FixedArrayBase* from = NULL) {
return CopyElements(from_holder, 0, to, to_kind, 0,
kCopyToEndAndInitializeToHole, from);
}
- virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
- JSObject* holder,
- FixedArray* to,
- FixedArrayBase* from = NULL) = 0;
+ MUST_USE_RESULT virtual MaybeObject* AddElementsToFixedArray(
+ Object* receiver,
+ JSObject* holder,
+ FixedArray* to,
+ FixedArrayBase* from = NULL) = 0;
// Returns a shared ElementsAccessor for the specified ElementsKind.
static ElementsAccessor* ForKind(ElementsKind elements_kind) {
@@ -131,6 +139,7 @@ class ElementsAccessor {
static ElementsAccessor* ForArray(FixedArrayBase* array);
static void InitializeOncePerProcess();
+ static void TearDown();
protected:
friend class NonStrictArgumentsElementsAccessor;
diff --git a/deps/v8/src/extensions/externalize-string-extension.cc b/deps/v8/src/extensions/externalize-string-extension.cc
index 9fbf329818..50d876136f 100644
--- a/deps/v8/src/extensions/externalize-string-extension.cc
+++ b/deps/v8/src/extensions/externalize-string-extension.cc
@@ -133,11 +133,8 @@ v8::Handle<v8::Value> ExternalizeStringExtension::IsAscii(
void ExternalizeStringExtension::Register() {
- static ExternalizeStringExtension* externalize_extension = NULL;
- if (externalize_extension == NULL)
- externalize_extension = new ExternalizeStringExtension;
- static v8::DeclareExtension externalize_extension_declaration(
- externalize_extension);
+ static ExternalizeStringExtension externalize_extension;
+ static v8::DeclareExtension declaration(&externalize_extension);
}
} } // namespace v8::internal
diff --git a/deps/v8/src/extensions/gc-extension.cc b/deps/v8/src/extensions/gc-extension.cc
index 573797e174..f921552aaa 100644
--- a/deps/v8/src/extensions/gc-extension.cc
+++ b/deps/v8/src/extensions/gc-extension.cc
@@ -46,9 +46,8 @@ v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
void GCExtension::Register() {
- static GCExtension* gc_extension = NULL;
- if (gc_extension == NULL) gc_extension = new GCExtension();
- static v8::DeclareExtension gc_extension_declaration(gc_extension);
+ static GCExtension gc_extension;
+ static v8::DeclareExtension declaration(&gc_extension);
}
} } // namespace v8::internal
diff --git a/deps/v8/src/factory.cc b/deps/v8/src/factory.cc
index e8a9f26a5c..28b318a8f4 100644
--- a/deps/v8/src/factory.cc
+++ b/deps/v8/src/factory.cc
@@ -34,6 +34,7 @@
#include "macro-assembler.h"
#include "objects.h"
#include "objects-visiting.h"
+#include "platform.h"
#include "scopeinfo.h"
namespace v8 {
@@ -114,7 +115,8 @@ Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
ASSERT(0 <= number_of_descriptors);
CALL_HEAP_FUNCTION(isolate(),
- DescriptorArray::Allocate(number_of_descriptors),
+ DescriptorArray::Allocate(number_of_descriptors,
+ DescriptorArray::MAY_BE_SHARED),
DescriptorArray);
}
@@ -291,6 +293,15 @@ Handle<Context> Factory::NewGlobalContext() {
}
+Handle<Context> Factory::NewModuleContext(Handle<Context> previous,
+ Handle<ScopeInfo> scope_info) {
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateModuleContext(*previous, *scope_info),
+ Context);
+}
+
+
Handle<Context> Factory::NewFunctionContext(int length,
Handle<JSFunction> function) {
CALL_HEAP_FUNCTION(
@@ -324,10 +335,9 @@ Handle<Context> Factory::NewWithContext(Handle<JSFunction> function,
}
-Handle<Context> Factory::NewBlockContext(
- Handle<JSFunction> function,
- Handle<Context> previous,
- Handle<ScopeInfo> scope_info) {
+Handle<Context> Factory::NewBlockContext(Handle<JSFunction> function,
+ Handle<Context> previous,
+ Handle<ScopeInfo> scope_info) {
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateBlockContext(*function,
@@ -487,7 +497,9 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
- CALL_HEAP_FUNCTION(isolate(), src->CopyDropTransitions(), Map);
+ CALL_HEAP_FUNCTION(isolate(),
+ src->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED),
+ Map);
}
@@ -667,6 +679,43 @@ Handle<Object> Factory::NewError(const char* type,
}
+Handle<String> Factory::EmergencyNewError(const char* type,
+ Handle<JSArray> args) {
+ const int kBufferSize = 1000;
+ char buffer[kBufferSize];
+ size_t space = kBufferSize;
+ char* p = &buffer[0];
+
+ Vector<char> v(buffer, kBufferSize);
+ OS::StrNCpy(v, type, space);
+ space -= Min(space, strlen(type));
+ p = &buffer[kBufferSize] - space;
+
+ for (unsigned i = 0; i < ARRAY_SIZE(args); i++) {
+ if (space > 0) {
+ *p++ = ' ';
+ space--;
+ if (space > 0) {
+ MaybeObject* maybe_arg = args->GetElement(i);
+ Handle<String> arg_str(reinterpret_cast<String*>(maybe_arg));
+ const char* arg = *arg_str->ToCString();
+ Vector<char> v2(p, space);
+ OS::StrNCpy(v2, arg, space);
+ space -= Min(space, strlen(arg));
+ p = &buffer[kBufferSize] - space;
+ }
+ }
+ }
+ if (space > 0) {
+ *p = '\0';
+ } else {
+ buffer[kBufferSize - 1] = '\0';
+ }
+ Handle<String> error_string = NewStringFromUtf8(CStrVector(buffer), TENURED);
+ return error_string;
+}
+
+
Handle<Object> Factory::NewError(const char* maker,
const char* type,
Handle<JSArray> args) {
@@ -675,8 +724,9 @@ Handle<Object> Factory::NewError(const char* maker,
isolate()->js_builtins_object()->GetPropertyNoExceptionThrown(*make_str));
// If the builtins haven't been properly configured yet this error
// constructor may not have been defined. Bail out.
- if (!fun_obj->IsJSFunction())
- return undefined_value();
+ if (!fun_obj->IsJSFunction()) {
+ return EmergencyNewError(type, args);
+ }
Handle<JSFunction> fun = Handle<JSFunction>::cast(fun_obj);
Handle<Object> type_obj = LookupAsciiSymbol(type);
Handle<Object> argv[] = { type_obj, args };
@@ -767,7 +817,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
instance_size != JSObject::kHeaderSize) {
Handle<Map> initial_map = NewMap(type,
instance_size,
- FAST_SMI_ONLY_ELEMENTS);
+ GetInitialFastElementsKind());
function->set_initial_map(*initial_map);
initial_map->set_constructor(*function);
}
@@ -892,7 +942,7 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors(
Handle<String> key =
SymbolFromString(Handle<String>(String::cast(entry->name())));
// Check if a descriptor with this name already exists before writing.
- if (result->LinearSearch(*key, descriptor_count) ==
+ if (result->LinearSearch(EXPECT_UNSORTED, *key, descriptor_count) ==
DescriptorArray::kNotFound) {
CallbacksDescriptor desc(*key, *entry, entry->property_attributes());
result->Set(descriptor_count, &desc, witness);
@@ -928,6 +978,13 @@ Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
}
+Handle<JSModule> Factory::NewJSModule() {
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateJSModule(), JSModule);
+}
+
+
Handle<GlobalObject> Factory::NewGlobalObject(
Handle<JSFunction> constructor) {
CALL_HEAP_FUNCTION(isolate(),
@@ -998,10 +1055,11 @@ void Factory::EnsureCanContainHeapObjectElements(Handle<JSArray> array) {
void Factory::EnsureCanContainElements(Handle<JSArray> array,
Handle<FixedArrayBase> elements,
+ uint32_t length,
EnsureElementsMode mode) {
CALL_HEAP_FUNCTION_VOID(
isolate(),
- array->EnsureCanContainElements(*elements, mode));
+ array->EnsureCanContainElements(*elements, length, mode));
}
diff --git a/deps/v8/src/factory.h b/deps/v8/src/factory.h
index 786d4a983a..bb435456b0 100644
--- a/deps/v8/src/factory.h
+++ b/deps/v8/src/factory.h
@@ -162,9 +162,12 @@ class Factory {
// Create a global (but otherwise uninitialized) context.
Handle<Context> NewGlobalContext();
+ // Create a module context.
+ Handle<Context> NewModuleContext(Handle<Context> previous,
+ Handle<ScopeInfo> scope_info);
+
// Create a function context.
- Handle<Context> NewFunctionContext(int length,
- Handle<JSFunction> function);
+ Handle<Context> NewFunctionContext(int length, Handle<JSFunction> function);
// Create a catch context.
Handle<Context> NewCatchContext(Handle<JSFunction> function,
@@ -177,7 +180,7 @@ class Factory {
Handle<Context> previous,
Handle<JSObject> extension);
- // Create a 'block' context.
+ // Create a block context.
Handle<Context> NewBlockContext(Handle<JSFunction> function,
Handle<Context> previous,
Handle<ScopeInfo> scope_info);
@@ -213,9 +216,10 @@ class Factory {
Handle<JSGlobalPropertyCell> NewJSGlobalPropertyCell(
Handle<Object> value);
- Handle<Map> NewMap(InstanceType type,
- int instance_size,
- ElementsKind elements_kind = FAST_ELEMENTS);
+ Handle<Map> NewMap(
+ InstanceType type,
+ int instance_size,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
Handle<JSObject> NewFunctionPrototype(Handle<JSFunction> function);
@@ -262,14 +266,18 @@ class Factory {
// runtime.
Handle<JSObject> NewJSObjectFromMap(Handle<Map> map);
+ // JS modules are pretenured.
+ Handle<JSModule> NewJSModule();
+
// JS arrays are pretenured when allocated by the parser.
- Handle<JSArray> NewJSArray(int capacity,
- ElementsKind elements_kind = FAST_ELEMENTS,
- PretenureFlag pretenure = NOT_TENURED);
+ Handle<JSArray> NewJSArray(
+ int capacity,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
+ PretenureFlag pretenure = NOT_TENURED);
Handle<JSArray> NewJSArrayWithElements(
Handle<FixedArrayBase> elements,
- ElementsKind elements_kind = FAST_ELEMENTS,
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
PretenureFlag pretenure = NOT_TENURED);
void SetElementsCapacityAndLength(Handle<JSArray> array,
@@ -281,6 +289,7 @@ class Factory {
void EnsureCanContainHeapObjectElements(Handle<JSArray> array);
void EnsureCanContainElements(Handle<JSArray> array,
Handle<FixedArrayBase> elements,
+ uint32_t length,
EnsureElementsMode mode);
Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype);
@@ -329,6 +338,7 @@ class Factory {
Handle<Object> NewError(const char* maker, const char* type,
Handle<JSArray> args);
+ Handle<String> EmergencyNewError(const char* type, Handle<JSArray> args);
Handle<Object> NewError(const char* maker, const char* type,
Vector< Handle<Object> > args);
Handle<Object> NewError(const char* type,
diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h
index 75697a8906..2b4c53cd2d 100644
--- a/deps/v8/src/flag-definitions.h
+++ b/deps/v8/src/flag-definitions.h
@@ -132,6 +132,10 @@ public:
// Flags for language modes and experimental language features.
DEFINE_bool(use_strict, false, "enforce strict mode")
+DEFINE_bool(es5_readonly, false,
+ "activate correct semantics for inheriting readonliness")
+DEFINE_bool(es52_globals, false,
+ "activate new semantics for global var declarations")
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
@@ -148,6 +152,7 @@ DEFINE_implication(harmony, harmony_collections)
DEFINE_implication(harmony_modules, harmony_scoping)
// Flags for experimental implementation features.
+DEFINE_bool(packed_arrays, false, "optimizes arrays that have no holes")
DEFINE_bool(smi_only_arrays, true, "tracks arrays with only smi values")
DEFINE_bool(clever_optimizations,
true,
@@ -165,7 +170,12 @@ DEFINE_bool(eliminate_dead_phis, true, "eliminate dead phis")
DEFINE_bool(use_gvn, true, "use hydrogen global value numbering")
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining")
-DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
+DEFINE_int(max_inlined_source_size, 600,
+ "maximum source size in bytes considered for a single inlining")
+DEFINE_int(max_inlined_nodes, 196,
+ "maximum number of AST nodes considered for a single inlining")
+DEFINE_int(max_inlined_nodes_cumulative, 196,
+ "maximum cumulative number of AST nodes considered for inlining")
DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion")
DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
true,
@@ -188,6 +198,10 @@ DEFINE_bool(trap_on_deopt, false, "put a break point before deoptimizing")
DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases")
DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
DEFINE_bool(use_osr, true, "use on-stack replacement")
+DEFINE_bool(array_bounds_checks_elimination, false,
+ "perform array bounds checks elimination")
+DEFINE_bool(array_index_dehoisting, false,
+ "perform array index dehoisting")
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
diff --git a/deps/v8/src/frames.cc b/deps/v8/src/frames.cc
index 0571a813f5..b7e028634f 100644
--- a/deps/v8/src/frames.cc
+++ b/deps/v8/src/frames.cc
@@ -469,6 +469,20 @@ StackFrame::Type StackFrame::GetCallerState(State* state) const {
}
+Address StackFrame::UnpaddedFP() const {
+#if defined(V8_TARGET_ARCH_IA32)
+ if (!is_optimized()) return fp();
+ int32_t alignment_state = Memory::int32_at(
+ fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
+
+ return (alignment_state == kAlignmentPaddingPushed) ?
+ (fp() + kPointerSize) : fp();
+#else
+ return fp();
+#endif
+}
+
+
Code* EntryFrame::unchecked_code() const {
return HEAP->raw_unchecked_js_entry_code();
}
@@ -1359,34 +1373,28 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
// -------------------------------------------------------------------------
int NumRegs(RegList reglist) {
- int n = 0;
- while (reglist != 0) {
- n++;
- reglist &= reglist - 1; // clear one bit
- }
- return n;
+ return CompilerIntrinsics::CountSetBits(reglist);
}
struct JSCallerSavedCodeData {
- JSCallerSavedCodeData() {
- int i = 0;
- for (int r = 0; r < kNumRegs; r++)
- if ((kJSCallerSaved & (1 << r)) != 0)
- reg_code[i++] = r;
-
- ASSERT(i == kNumJSCallerSaved);
- }
int reg_code[kNumJSCallerSaved];
};
+JSCallerSavedCodeData caller_saved_code_data;
-static LazyInstance<JSCallerSavedCodeData>::type caller_saved_code_data =
- LAZY_INSTANCE_INITIALIZER;
+void SetUpJSCallerSavedCodeData() {
+ int i = 0;
+ for (int r = 0; r < kNumRegs; r++)
+ if ((kJSCallerSaved & (1 << r)) != 0)
+ caller_saved_code_data.reg_code[i++] = r;
+
+ ASSERT(i == kNumJSCallerSaved);
+}
int JSCallerSavedCode(int n) {
ASSERT(0 <= n && n < kNumJSCallerSaved);
- return caller_saved_code_data.Get().reg_code[n];
+ return caller_saved_code_data.reg_code[n];
}
@@ -1400,11 +1408,11 @@ class field##_Wrapper : public ZoneObject { \
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
#undef DEFINE_WRAPPER
-static StackFrame* AllocateFrameCopy(StackFrame* frame) {
+static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
#define FRAME_TYPE_CASE(type, field) \
case StackFrame::type: { \
field##_Wrapper* wrapper = \
- new field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
+ new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
return &wrapper->frame_; \
}
@@ -1416,11 +1424,11 @@ static StackFrame* AllocateFrameCopy(StackFrame* frame) {
return NULL;
}
-Vector<StackFrame*> CreateStackMap() {
- ZoneList<StackFrame*> list(10);
+Vector<StackFrame*> CreateStackMap(Zone* zone) {
+ ZoneList<StackFrame*> list(10, zone);
for (StackFrameIterator it; !it.done(); it.Advance()) {
- StackFrame* frame = AllocateFrameCopy(it.frame());
- list.Add(frame);
+ StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
+ list.Add(frame, zone);
}
return list.ToVector();
}
diff --git a/deps/v8/src/frames.h b/deps/v8/src/frames.h
index 9071555197..2d45932d09 100644
--- a/deps/v8/src/frames.h
+++ b/deps/v8/src/frames.h
@@ -40,6 +40,8 @@ typedef uint32_t RegList;
// Get the number of registers in a given register list.
int NumRegs(RegList list);
+void SetUpJSCallerSavedCodeData();
+
// Return the code of the n-th saved register available to JavaScript.
int JSCallerSavedCode(int n);
@@ -204,11 +206,19 @@ class StackFrame BASE_EMBEDDED {
Address fp() const { return state_.fp; }
Address caller_sp() const { return GetCallerStackPointer(); }
+ // If this frame is optimized and was dynamically aligned return its old
+ // unaligned frame pointer. When the frame is deoptimized its FP will shift
+ // up one word and become unaligned.
+ Address UnpaddedFP() const;
+
Address pc() const { return *pc_address(); }
void set_pc(Address pc) { *pc_address() = pc; }
virtual void SetCallerFp(Address caller_fp) = 0;
+ // Manually changes value of fp in this object.
+ void UpdateFp(Address fp) { state_.fp = fp; }
+
Address* pc_address() const { return state_.pc_address; }
// Get the id of this stack frame.
@@ -883,7 +893,7 @@ class StackFrameLocator BASE_EMBEDDED {
// Reads all frames on the current stack and copies them into the current
// zone memory.
-Vector<StackFrame*> CreateStackMap();
+Vector<StackFrame*> CreateStackMap(Zone* zone);
} } // namespace v8::internal
diff --git a/deps/v8/src/full-codegen.cc b/deps/v8/src/full-codegen.cc
index 44fe011a4e..4da4e531ee 100644
--- a/deps/v8/src/full-codegen.cc
+++ b/deps/v8/src/full-codegen.cc
@@ -303,7 +303,7 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
masm.positions_recorder()->StartGDBJITLineInfoRecording();
#endif
- FullCodeGenerator cgen(&masm, info);
+ FullCodeGenerator cgen(&masm, info, isolate->zone());
cgen.Generate();
if (cgen.HasStackOverflow()) {
ASSERT(!isolate->has_pending_exception());
@@ -316,7 +316,6 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
code->set_optimizable(info->IsOptimizable() &&
!info->function()->flags()->Contains(kDontOptimize) &&
info->function()->scope()->AllowsLazyRecompilation());
- code->set_self_optimization_header(cgen.has_self_optimization_header_);
cgen.PopulateDeoptimizationData(code);
cgen.PopulateTypeFeedbackInfo(code);
cgen.PopulateTypeFeedbackCells(code);
@@ -332,9 +331,6 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle.
- if (!code.is_null()) {
- isolate->runtime_profiler()->NotifyCodeGenerated(code->instruction_size());
- }
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit && !code.is_null()) {
GDBJITLineInfo* lineinfo =
@@ -444,14 +440,14 @@ void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
}
}
#endif // DEBUG
- bailout_entries_.Add(entry);
+ bailout_entries_.Add(entry, zone());
}
void FullCodeGenerator::RecordTypeFeedbackCell(
unsigned id, Handle<JSGlobalPropertyCell> cell) {
TypeFeedbackCellEntry entry = { id, cell };
- type_feedback_cells_.Add(entry);
+ type_feedback_cells_.Add(entry, zone());
}
@@ -460,7 +456,7 @@ void FullCodeGenerator::RecordStackCheck(unsigned ast_id) {
// state.
ASSERT(masm_->pc_offset() > 0);
BailoutEntry entry = { ast_id, static_cast<unsigned>(masm_->pc_offset()) };
- stack_checks_.Add(entry);
+ stack_checks_.Add(entry, zone());
}
@@ -573,88 +569,91 @@ void FullCodeGenerator::DoTest(const TestContext* context) {
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
- int save_global_count = global_count_;
- global_count_ = 0;
+ ZoneList<Handle<Object> >* saved_globals = globals_;
+ ZoneList<Handle<Object> > inner_globals(10, zone());
+ globals_ = &inner_globals;
AstVisitor::VisitDeclarations(declarations);
-
- // Batch declare global functions and variables.
- if (global_count_ > 0) {
- Handle<FixedArray> array =
- isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
- int length = declarations->length();
- for (int j = 0, i = 0; i < length; i++) {
- Declaration* decl = declarations->at(i);
- Variable* var = decl->proxy()->var();
-
- if (var->IsUnallocated()) {
- array->set(j++, *(var->name()));
- FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
- if (fun_decl == NULL) {
- if (var->binding_needs_init()) {
- // In case this binding needs initialization use the hole.
- array->set_the_hole(j++);
- } else {
- array->set_undefined(j++);
- }
- } else {
- Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(fun_decl->fun(), script());
- // Check for stack-overflow exception.
- if (function.is_null()) {
- SetStackOverflow();
- return;
- }
- array->set(j++, *function);
- }
- }
- }
+ if (!globals_->is_empty()) {
// Invoke the platform-dependent code generator to do the actual
// declaration the global functions and variables.
+ Handle<FixedArray> array =
+ isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
+ for (int i = 0; i < globals_->length(); ++i)
+ array->set(i, *globals_->at(i));
DeclareGlobals(array);
}
- global_count_ = save_global_count;
-}
-
-
-void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+ globals_ = saved_globals;
}
-void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
-}
-
-
-void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), NULL);
-}
-
-
-void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) {
- EmitDeclaration(decl->proxy(), decl->mode(), NULL);
-}
+void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
+ Handle<JSModule> instance = module->interface()->Instance();
+ ASSERT(!instance.is_null());
+ // Allocate a module context statically.
+ Block* block = module->body();
+ Scope* saved_scope = scope();
+ scope_ = block->scope();
+ Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
+
+ // Generate code for module creation and linking.
+ Comment cmnt(masm_, "[ ModuleLiteral");
+ SetStatementPosition(block);
+
+ if (scope_info->HasContext()) {
+ // Set up module context.
+ __ Push(scope_info);
+ __ Push(instance);
+ __ CallRuntime(Runtime::kPushModuleContext, 2);
+ StoreToFrameField(
+ StandardFrameConstants::kContextOffset, context_register());
+ }
-void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) {
- // TODO(rossberg)
-}
+ {
+ Comment cmnt(masm_, "[ Declarations");
+ VisitDeclarations(scope_->declarations());
+ }
+ scope_ = saved_scope;
+ if (scope_info->HasContext()) {
+ // Pop module context.
+ LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+ // Update local stack frame context field.
+ StoreToFrameField(
+ StandardFrameConstants::kContextOffset, context_register());
+ }
-void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
- // TODO(rossberg)
+ // Populate module instance object.
+ const PropertyAttributes attr =
+ static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE | DONT_ENUM);
+ for (Interface::Iterator it = module->interface()->iterator();
+ !it.done(); it.Advance()) {
+ if (it.interface()->IsModule()) {
+ Handle<Object> value = it.interface()->Instance();
+ ASSERT(!value.is_null());
+ JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
+ } else {
+ // TODO(rossberg): set proper getters instead of undefined...
+ // instance->DefineAccessor(*it.name(), ACCESSOR_GETTER, *getter, attr);
+ Handle<Object> value(isolate()->heap()->undefined_value());
+ JSReceiver::SetProperty(instance, it.name(), value, attr, kStrictMode);
+ }
+ }
+ USE(instance->PreventExtensions());
}
void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
- // TODO(rossberg)
+ // Noting to do.
+ // The instance object is resolved statically through the module's interface.
}
void FullCodeGenerator::VisitModulePath(ModulePath* module) {
- // TODO(rossberg)
+ // Noting to do.
+ // The instance object is resolved statically through the module's interface.
}
@@ -916,9 +915,9 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
Scope* saved_scope = scope();
// Push a block context when entering a block with block scoped variables.
- if (stmt->block_scope() != NULL) {
+ if (stmt->scope() != NULL) {
{ Comment cmnt(masm_, "[ Extend block context");
- scope_ = stmt->block_scope();
+ scope_ = stmt->scope();
Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
__ Push(scope_info);
@@ -945,7 +944,7 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
// Pop block context if necessary.
- if (stmt->block_scope() != NULL) {
+ if (stmt->scope() != NULL) {
LoadContextField(context_register(), Context::PREVIOUS_INDEX);
// Update local stack frame context field.
StoreToFrameField(StandardFrameConstants::kContextOffset,
diff --git a/deps/v8/src/full-codegen.h b/deps/v8/src/full-codegen.h
index 58d59862a5..928de47b31 100644
--- a/deps/v8/src/full-codegen.h
+++ b/deps/v8/src/full-codegen.h
@@ -77,28 +77,25 @@ class FullCodeGenerator: public AstVisitor {
TOS_REG
};
- FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
+ FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info,
+ Zone* zone)
: masm_(masm),
info_(info),
scope_(info->scope()),
nesting_stack_(NULL),
loop_depth_(0),
- global_count_(0),
+ globals_(NULL),
context_(NULL),
bailout_entries_(info->HasDeoptimizationSupport()
- ? info->function()->ast_node_count() : 0),
- stack_checks_(2), // There's always at least one.
+ ? info->function()->ast_node_count() : 0, zone),
+ stack_checks_(2, zone), // There's always at least one.
type_feedback_cells_(info->HasDeoptimizationSupport()
- ? info->function()->ast_node_count() : 0),
+ ? info->function()->ast_node_count() : 0, zone),
ic_total_count_(0),
- has_self_optimization_header_(false) { }
+ zone_(zone) { }
static bool MakeCode(CompilationInfo* info);
- // Returns the platform-specific size in bytes of the self-optimization
- // header.
- static int self_optimization_header_size();
-
// Encode state and pc-offset as a BitField<type, start, size>.
// Only use 30 bits because we encode the result as a smi.
class StateField : public BitField<State, 0, 1> { };
@@ -113,6 +110,8 @@ class FullCodeGenerator: public AstVisitor {
return NULL;
}
+ Zone* zone() const { return zone_; }
+
private:
class Breakable;
class Iteration;
@@ -207,7 +206,7 @@ class FullCodeGenerator: public AstVisitor {
virtual ~NestedBlock() {}
virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
- if (statement()->AsBlock()->block_scope() != NULL) {
+ if (statement()->AsBlock()->scope() != NULL) {
++(*context_length);
}
return previous_;
@@ -241,7 +240,7 @@ class FullCodeGenerator: public AstVisitor {
// The finally block of a try/finally statement.
class Finally : public NestedStatement {
public:
- static const int kElementCount = 2;
+ static const int kElementCount = 5;
explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
virtual ~Finally() {}
@@ -418,12 +417,9 @@ class FullCodeGenerator: public AstVisitor {
Label* if_true,
Label* if_false);
- // Platform-specific code for a variable, constant, or function
- // declaration. Functions have an initial value.
- // Increments global_count_ for unallocated variables.
- void EmitDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function);
+ // If enabled, emit debug code for checking that the current context is
+ // neither a with nor a catch context.
+ void EmitDebugCheckDeclarationContext(Variable* variable);
// Platform-specific code for checking the stack limit at the back edge of
// a loop.
@@ -553,12 +549,8 @@ class FullCodeGenerator: public AstVisitor {
Handle<Script> script() { return info_->script(); }
bool is_eval() { return info_->is_eval(); }
bool is_native() { return info_->is_native(); }
- bool is_classic_mode() {
- return language_mode() == CLASSIC_MODE;
- }
- LanguageMode language_mode() {
- return function()->language_mode();
- }
+ bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
+ LanguageMode language_mode() { return function()->language_mode(); }
FunctionLiteral* function() { return info_->function(); }
Scope* scope() { return scope_; }
@@ -790,15 +782,15 @@ class FullCodeGenerator: public AstVisitor {
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
- int global_count_;
+ ZoneList<Handle<Object> >* globals_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;
ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
int ic_total_count_;
- bool has_self_optimization_header_;
Handle<FixedArray> handler_table_;
Handle<JSGlobalPropertyCell> profiling_counter_;
+ Zone* zone_;
friend class NestedStatement;
@@ -809,16 +801,16 @@ class FullCodeGenerator: public AstVisitor {
// A map from property names to getter/setter pairs allocated in the zone.
class AccessorTable: public TemplateHashMap<Literal,
ObjectLiteral::Accessors,
- ZoneListAllocationPolicy> {
+ ZoneAllocationPolicy> {
public:
explicit AccessorTable(Zone* zone) :
- TemplateHashMap<Literal,
- ObjectLiteral::Accessors,
- ZoneListAllocationPolicy>(Literal::Match),
+ TemplateHashMap<Literal, ObjectLiteral::Accessors,
+ ZoneAllocationPolicy>(Literal::Match,
+ ZoneAllocationPolicy(zone)),
zone_(zone) { }
Iterator lookup(Literal* literal) {
- Iterator it = find(literal, true);
+ Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
return it;
}
diff --git a/deps/v8/src/func-name-inferrer.cc b/deps/v8/src/func-name-inferrer.cc
index 239358dfa6..2dd0bbc15d 100644
--- a/deps/v8/src/func-name-inferrer.cc
+++ b/deps/v8/src/func-name-inferrer.cc
@@ -34,11 +34,12 @@
namespace v8 {
namespace internal {
-FuncNameInferrer::FuncNameInferrer(Isolate* isolate)
+FuncNameInferrer::FuncNameInferrer(Isolate* isolate, Zone* zone)
: isolate_(isolate),
- entries_stack_(10),
- names_stack_(5),
- funcs_to_infer_(4) {
+ entries_stack_(10, zone),
+ names_stack_(5, zone),
+ funcs_to_infer_(4, zone),
+ zone_(zone) {
}
@@ -48,21 +49,21 @@ void FuncNameInferrer::PushEnclosingName(Handle<String> name) {
// and starts with a capital letter.
if (name->length() > 0 && Runtime::IsUpperCaseChar(
isolate()->runtime_state(), name->Get(0))) {
- names_stack_.Add(Name(name, kEnclosingConstructorName));
+ names_stack_.Add(Name(name, kEnclosingConstructorName), zone());
}
}
void FuncNameInferrer::PushLiteralName(Handle<String> name) {
if (IsOpen() && !isolate()->heap()->prototype_symbol()->Equals(*name)) {
- names_stack_.Add(Name(name, kLiteralName));
+ names_stack_.Add(Name(name, kLiteralName), zone());
}
}
void FuncNameInferrer::PushVariableName(Handle<String> name) {
if (IsOpen() && !isolate()->heap()->result_symbol()->Equals(*name)) {
- names_stack_.Add(Name(name, kVariableName));
+ names_stack_.Add(Name(name, kVariableName), zone());
}
}
diff --git a/deps/v8/src/func-name-inferrer.h b/deps/v8/src/func-name-inferrer.h
index 1a57268326..f57e778604 100644
--- a/deps/v8/src/func-name-inferrer.h
+++ b/deps/v8/src/func-name-inferrer.h
@@ -45,7 +45,7 @@ class Isolate;
// a name.
class FuncNameInferrer : public ZoneObject {
public:
- explicit FuncNameInferrer(Isolate* isolate);
+ FuncNameInferrer(Isolate* isolate, Zone* zone);
// Returns whether we have entered name collection state.
bool IsOpen() const { return !entries_stack_.is_empty(); }
@@ -55,7 +55,7 @@ class FuncNameInferrer : public ZoneObject {
// Enters name collection state.
void Enter() {
- entries_stack_.Add(names_stack_.length());
+ entries_stack_.Add(names_stack_.length(), zone());
}
// Pushes an encountered name onto names stack when in collection state.
@@ -66,7 +66,7 @@ class FuncNameInferrer : public ZoneObject {
// Adds a function to infer name for.
void AddFunction(FunctionLiteral* func_to_infer) {
if (IsOpen()) {
- funcs_to_infer_.Add(func_to_infer);
+ funcs_to_infer_.Add(func_to_infer, zone());
}
}
@@ -88,6 +88,8 @@ class FuncNameInferrer : public ZoneObject {
void Leave() {
ASSERT(IsOpen());
names_stack_.Rewind(entries_stack_.RemoveLast());
+ if (entries_stack_.is_empty())
+ funcs_to_infer_.Clear();
}
private:
@@ -103,6 +105,7 @@ class FuncNameInferrer : public ZoneObject {
};
Isolate* isolate() { return isolate_; }
+ Zone* zone() const { return zone_; }
// Constructs a full name in dotted notation from gathered names.
Handle<String> MakeNameFromStack();
@@ -117,6 +120,7 @@ class FuncNameInferrer : public ZoneObject {
ZoneList<int> entries_stack_;
ZoneList<Name> names_stack_;
ZoneList<FunctionLiteral*> funcs_to_infer_;
+ Zone* zone_;
DISALLOW_COPY_AND_ASSIGN(FuncNameInferrer);
};
diff --git a/deps/v8/src/handles.cc b/deps/v8/src/handles.cc
index 416ecbd211..def1604ac7 100644
--- a/deps/v8/src/handles.cc
+++ b/deps/v8/src/handles.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -729,9 +729,9 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsProperty(i) && !descs->IsDontEnum(i)) {
+ if (descs->IsProperty(i) && !descs->GetDetails(i).IsDontEnum()) {
storage->set(index, descs->GetKey(i));
- PropertyDetails details(descs->GetDetails(i));
+ PropertyDetails details = descs->GetDetails(i);
sort_array->set(index, Smi::FromInt(details.index()));
if (!indices.is_null()) {
if (details.type() != FIELD) {
diff --git a/deps/v8/src/hashmap.h b/deps/v8/src/hashmap.h
index 5aeb8951ed..6f76e9f7f4 100644
--- a/deps/v8/src/hashmap.h
+++ b/deps/v8/src/hashmap.h
@@ -40,9 +40,16 @@ class TemplateHashMapImpl {
public:
typedef bool (*MatchFun) (void* key1, void* key2);
+ // The default capacity. This is used by the call sites which want
+ // to pass in a non-default AllocationPolicy but want to use the
+ // default value of capacity specified by the implementation.
+ static const uint32_t kDefaultHashMapCapacity = 8;
+
// initial_capacity is the size of the initial hash map;
// it must be a power of 2 (and thus must not be 0).
- TemplateHashMapImpl(MatchFun match, uint32_t initial_capacity = 8);
+ TemplateHashMapImpl(MatchFun match,
+ uint32_t capacity = kDefaultHashMapCapacity,
+ AllocationPolicy allocator = AllocationPolicy());
~TemplateHashMapImpl();
@@ -60,10 +67,13 @@ class TemplateHashMapImpl {
// but insert is set, a new entry is inserted with
// corresponding key, key hash, and NULL value.
// Otherwise, NULL is returned.
- Entry* Lookup(void* key, uint32_t hash, bool insert);
+ Entry* Lookup(void* key, uint32_t hash, bool insert,
+ AllocationPolicy allocator = AllocationPolicy());
// Removes the entry with matching key.
- void Remove(void* key, uint32_t hash);
+ // It returns the value of the deleted entry
+ // or null if there is no value for such key.
+ void* Remove(void* key, uint32_t hash);
// Empties the hash map (occupancy() == 0).
void Clear();
@@ -95,29 +105,30 @@ class TemplateHashMapImpl {
Entry* map_end() const { return map_ + capacity_; }
Entry* Probe(void* key, uint32_t hash);
- void Initialize(uint32_t capacity);
- void Resize();
+ void Initialize(uint32_t capacity, AllocationPolicy allocator);
+ void Resize(AllocationPolicy allocator);
};
typedef TemplateHashMapImpl<FreeStoreAllocationPolicy> HashMap;
-template<class P>
-TemplateHashMapImpl<P>::TemplateHashMapImpl(MatchFun match,
- uint32_t initial_capacity) {
+template<class AllocationPolicy>
+TemplateHashMapImpl<AllocationPolicy>::TemplateHashMapImpl(
+ MatchFun match, uint32_t initial_capacity, AllocationPolicy allocator) {
match_ = match;
- Initialize(initial_capacity);
+ Initialize(initial_capacity, allocator);
}
-template<class P>
-TemplateHashMapImpl<P>::~TemplateHashMapImpl() {
- P::Delete(map_);
+template<class AllocationPolicy>
+TemplateHashMapImpl<AllocationPolicy>::~TemplateHashMapImpl() {
+ AllocationPolicy::Delete(map_);
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
- void* key, uint32_t hash, bool insert) {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+TemplateHashMapImpl<AllocationPolicy>::Lookup(
+ void* key, uint32_t hash, bool insert, AllocationPolicy allocator) {
// Find a matching entry.
Entry* p = Probe(key, hash);
if (p->key != NULL) {
@@ -133,7 +144,7 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
// Grow the map if we reached >= 80% occupancy.
if (occupancy_ + occupancy_/4 >= capacity_) {
- Resize();
+ Resize(allocator);
p = Probe(key, hash);
}
@@ -145,15 +156,16 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
}
-template<class P>
-void TemplateHashMapImpl<P>::Remove(void* key, uint32_t hash) {
+template<class AllocationPolicy>
+void* TemplateHashMapImpl<AllocationPolicy>::Remove(void* key, uint32_t hash) {
// Lookup the entry for the key to remove.
Entry* p = Probe(key, hash);
if (p->key == NULL) {
// Key not found nothing to remove.
- return;
+ return NULL;
}
+ void* value = p->value;
// To remove an entry we need to ensure that it does not create an empty
// entry that will cause the search for another entry to stop too soon. If all
// the entries between the entry to remove and the next empty slot have their
@@ -202,11 +214,12 @@ void TemplateHashMapImpl<P>::Remove(void* key, uint32_t hash) {
// Clear the entry which is allowed to en emptied.
p->key = NULL;
occupancy_--;
+ return value;
}
-template<class P>
-void TemplateHashMapImpl<P>::Clear() {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Clear() {
// Mark all entries as empty.
const Entry* end = map_end();
for (Entry* p = map_; p < end; p++) {
@@ -216,15 +229,16 @@ void TemplateHashMapImpl<P>::Clear() {
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Start() const {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Start() const {
return Next(map_ - 1);
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Next(Entry* p)
- const {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Next(Entry* p) const {
const Entry* end = map_end();
ASSERT(map_ - 1 <= p && p < end);
for (p++; p < end; p++) {
@@ -236,9 +250,9 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Next(Entry* p)
}
-template<class P>
-typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Probe(void* key,
- uint32_t hash) {
+template<class AllocationPolicy>
+typename TemplateHashMapImpl<AllocationPolicy>::Entry*
+ TemplateHashMapImpl<AllocationPolicy>::Probe(void* key, uint32_t hash) {
ASSERT(key != NULL);
ASSERT(IsPowerOf2(capacity_));
@@ -258,10 +272,11 @@ typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Probe(void* key,
}
-template<class P>
-void TemplateHashMapImpl<P>::Initialize(uint32_t capacity) {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Initialize(
+ uint32_t capacity, AllocationPolicy allocator) {
ASSERT(IsPowerOf2(capacity));
- map_ = reinterpret_cast<Entry*>(P::New(capacity * sizeof(Entry)));
+ map_ = reinterpret_cast<Entry*>(allocator.New(capacity * sizeof(Entry)));
if (map_ == NULL) {
v8::internal::FatalProcessOutOfMemory("HashMap::Initialize");
return;
@@ -271,24 +286,24 @@ void TemplateHashMapImpl<P>::Initialize(uint32_t capacity) {
}
-template<class P>
-void TemplateHashMapImpl<P>::Resize() {
+template<class AllocationPolicy>
+void TemplateHashMapImpl<AllocationPolicy>::Resize(AllocationPolicy allocator) {
Entry* map = map_;
uint32_t n = occupancy_;
// Allocate larger map.
- Initialize(capacity_ * 2);
+ Initialize(capacity_ * 2, allocator);
// Rehash all current entries.
for (Entry* p = map; n > 0; p++) {
if (p->key != NULL) {
- Lookup(p->key, p->hash, true)->value = p->value;
+ Lookup(p->key, p->hash, true, allocator)->value = p->value;
n--;
}
}
// Delete old map.
- P::Delete(map);
+ AllocationPolicy::Delete(map);
}
@@ -325,13 +340,18 @@ class TemplateHashMap: private TemplateHashMapImpl<AllocationPolicy> {
};
TemplateHashMap(
- typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match)
- : TemplateHashMapImpl<AllocationPolicy>(match) { }
+ typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match,
+ AllocationPolicy allocator = AllocationPolicy())
+ : TemplateHashMapImpl<AllocationPolicy>(
+ match,
+ TemplateHashMapImpl<AllocationPolicy>::kDefaultHashMapCapacity,
+ allocator) { }
Iterator begin() const { return Iterator(this, this->Start()); }
Iterator end() const { return Iterator(this, NULL); }
- Iterator find(Key* key, bool insert = false) {
- return Iterator(this, this->Lookup(key, key->Hash(), insert));
+ Iterator find(Key* key, bool insert = false,
+ AllocationPolicy allocator = AllocationPolicy()) {
+ return Iterator(this, this->Lookup(key, key->Hash(), insert, allocator));
}
};
diff --git a/deps/v8/src/heap-inl.h b/deps/v8/src/heap-inl.h
index 706d2886b9..9d79db2466 100644
--- a/deps/v8/src/heap-inl.h
+++ b/deps/v8/src/heap-inl.h
@@ -460,15 +460,16 @@ MaybeObject* Heap::PrepareForCompare(String* str) {
}
-int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
+intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
+ intptr_t change_in_bytes) {
ASSERT(HasBeenSetUp());
- int amount = amount_of_external_allocated_memory_ + change_in_bytes;
+ intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
if (change_in_bytes >= 0) {
// Avoid overflow.
if (amount > amount_of_external_allocated_memory_) {
amount_of_external_allocated_memory_ = amount;
}
- int amount_since_last_global_gc =
+ intptr_t amount_since_last_global_gc =
amount_of_external_allocated_memory_ -
amount_of_external_allocated_memory_at_last_global_gc_;
if (amount_since_last_global_gc > external_allocation_limit_) {
@@ -594,12 +595,24 @@ void ExternalStringTable::Iterate(ObjectVisitor* v) {
void ExternalStringTable::Verify() {
#ifdef DEBUG
for (int i = 0; i < new_space_strings_.length(); ++i) {
- ASSERT(heap_->InNewSpace(new_space_strings_[i]));
- ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
+ Object* obj = Object::cast(new_space_strings_[i]);
+ // TODO(yangguo): check that the object is indeed an external string.
+ ASSERT(heap_->InNewSpace(obj));
+ ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
+ if (obj->IsExternalAsciiString()) {
+ ExternalAsciiString* string = ExternalAsciiString::cast(obj);
+ ASSERT(String::IsAscii(string->GetChars(), string->length()));
+ }
}
for (int i = 0; i < old_space_strings_.length(); ++i) {
- ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
- ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
+ Object* obj = Object::cast(old_space_strings_[i]);
+ // TODO(yangguo): check that the object is indeed an external string.
+ ASSERT(!heap_->InNewSpace(obj));
+ ASSERT(obj != HEAP->raw_unchecked_the_hole_value());
+ if (obj->IsExternalAsciiString()) {
+ ExternalAsciiString* string = ExternalAsciiString::cast(obj);
+ ASSERT(String::IsAscii(string->GetChars(), string->length()));
+ }
}
#endif
}
diff --git a/deps/v8/src/heap-profiler.cc b/deps/v8/src/heap-profiler.cc
index 8be6f27685..301b09993e 100644
--- a/deps/v8/src/heap-profiler.cc
+++ b/deps/v8/src/heap-profiler.cc
@@ -33,7 +33,6 @@
namespace v8 {
namespace internal {
-
HeapProfiler::HeapProfiler()
: snapshots_(new HeapSnapshotsCollection()),
next_snapshot_uid_(1) {
@@ -86,6 +85,24 @@ HeapSnapshot* HeapProfiler::TakeSnapshot(String* name,
}
+void HeapProfiler::StartHeapObjectsTracking() {
+ ASSERT(Isolate::Current()->heap_profiler() != NULL);
+ Isolate::Current()->heap_profiler()->StartHeapObjectsTrackingImpl();
+}
+
+
+void HeapProfiler::StopHeapObjectsTracking() {
+ ASSERT(Isolate::Current()->heap_profiler() != NULL);
+ Isolate::Current()->heap_profiler()->StopHeapObjectsTrackingImpl();
+}
+
+
+SnapshotObjectId HeapProfiler::PushHeapObjectsStats(v8::OutputStream* stream) {
+ ASSERT(Isolate::Current()->heap_profiler() != NULL);
+ return Isolate::Current()->heap_profiler()->PushHeapObjectsStatsImpl(stream);
+}
+
+
void HeapProfiler::DefineWrapperClass(
uint16_t class_id, v8::HeapProfiler::WrapperInfoCallback callback) {
ASSERT(class_id != v8::HeapProfiler::kPersistentHandleNoClassId);
@@ -136,6 +153,28 @@ HeapSnapshot* HeapProfiler::TakeSnapshotImpl(String* name,
return TakeSnapshotImpl(snapshots_->names()->GetName(name), type, control);
}
+void HeapProfiler::StartHeapObjectsTrackingImpl() {
+ snapshots_->StartHeapObjectsTracking();
+}
+
+
+SnapshotObjectId HeapProfiler::PushHeapObjectsStatsImpl(OutputStream* stream) {
+ return snapshots_->PushHeapObjectsStats(stream);
+}
+
+
+void HeapProfiler::StopHeapObjectsTrackingImpl() {
+ snapshots_->StopHeapObjectsTracking();
+}
+
+
+size_t HeapProfiler::GetMemorySizeUsedByProfiler() {
+ HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+ ASSERT(profiler != NULL);
+ size_t size = profiler->snapshots_->GetUsedMemorySize();
+ return size;
+}
+
int HeapProfiler::GetSnapshotsCount() {
HeapProfiler* profiler = Isolate::Current()->heap_profiler();
@@ -158,6 +197,15 @@ HeapSnapshot* HeapProfiler::FindSnapshot(unsigned uid) {
}
+SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle<Object> obj) {
+ if (!obj->IsHeapObject())
+ return v8::HeapProfiler::kUnknownObjectId;
+ HeapProfiler* profiler = Isolate::Current()->heap_profiler();
+ ASSERT(profiler != NULL);
+ return profiler->snapshots_->FindObjectId(HeapObject::cast(*obj)->address());
+}
+
+
void HeapProfiler::DeleteAllSnapshots() {
HeapProfiler* profiler = Isolate::Current()->heap_profiler();
ASSERT(profiler != NULL);
diff --git a/deps/v8/src/heap-profiler.h b/deps/v8/src/heap-profiler.h
index ef5c4f4b4a..346177b8ba 100644
--- a/deps/v8/src/heap-profiler.h
+++ b/deps/v8/src/heap-profiler.h
@@ -44,22 +44,27 @@ class HeapSnapshotsCollection;
} \
} while (false)
-// The HeapProfiler writes data to the log files, which can be postprocessed
-// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
class HeapProfiler {
public:
static void SetUp();
static void TearDown();
+ static size_t GetMemorySizeUsedByProfiler();
+
static HeapSnapshot* TakeSnapshot(const char* name,
int type,
v8::ActivityControl* control);
static HeapSnapshot* TakeSnapshot(String* name,
int type,
v8::ActivityControl* control);
+
+ static void StartHeapObjectsTracking();
+ static void StopHeapObjectsTracking();
+ static SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
static int GetSnapshotsCount();
static HeapSnapshot* GetSnapshot(int index);
static HeapSnapshot* FindSnapshot(unsigned uid);
+ static SnapshotObjectId GetSnapshotObjectId(Handle<Object> obj);
static void DeleteAllSnapshots();
void ObjectMoveEvent(Address from, Address to);
@@ -84,6 +89,10 @@ class HeapProfiler {
v8::ActivityControl* control);
void ResetSnapshots();
+ void StartHeapObjectsTrackingImpl();
+ void StopHeapObjectsTrackingImpl();
+ SnapshotObjectId PushHeapObjectsStatsImpl(OutputStream* stream);
+
HeapSnapshotsCollection* snapshots_;
unsigned next_snapshot_uid_;
List<v8::HeapProfiler::WrapperInfoCallback> wrapper_callbacks_;
diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc
index e0116192ce..172405b72c 100644
--- a/deps/v8/src/heap.cc
+++ b/deps/v8/src/heap.cc
@@ -42,6 +42,7 @@
#include "natives.h"
#include "objects-visiting.h"
#include "objects-visiting-inl.h"
+#include "once.h"
#include "runtime-profiler.h"
#include "scopeinfo.h"
#include "snapshot.h"
@@ -60,8 +61,6 @@
namespace v8 {
namespace internal {
-static LazyMutex gc_initializer_mutex = LAZY_MUTEX_INITIALIZER;
-
Heap::Heap()
: isolate_(NULL),
@@ -177,6 +176,9 @@ Heap::Heap()
global_contexts_list_ = NULL;
mark_compact_collector_.heap_ = this;
external_string_table_.heap_ = this;
+ // Put a dummy entry in the remembered pages so we can find the list the
+ // minidump even if there are no real unmapped pages.
+ RememberUnmappedPage(NULL, false);
}
@@ -244,12 +246,17 @@ int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
const char** reason) {
// Is global GC requested?
- if (space != NEW_SPACE || FLAG_gc_global) {
+ if (space != NEW_SPACE) {
isolate_->counters()->gc_compactor_caused_by_request()->Increment();
*reason = "GC in old space requested";
return MARK_COMPACTOR;
}
+ if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
+ *reason = "GC in old space forced by flags";
+ return MARK_COMPACTOR;
+ }
+
// Is enough data promoted to justify a global GC?
if (OldGenerationPromotionLimitReached()) {
isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
@@ -806,7 +813,7 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
UpdateSurvivalRateTrend(start_new_space_size);
- size_of_old_gen_at_last_old_space_gc_ = PromotedSpaceSize();
+ size_of_old_gen_at_last_old_space_gc_ = PromotedSpaceSizeOfObjects();
if (high_survival_rate_during_scavenges &&
IsStableOrIncreasingSurvivalTrend()) {
@@ -1130,6 +1137,27 @@ void PromotionQueue::RelocateQueueHead() {
}
+class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
+ public:
+ explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
+
+ virtual Object* RetainAs(Object* object) {
+ if (!heap_->InFromSpace(object)) {
+ return object;
+ }
+
+ MapWord map_word = HeapObject::cast(object)->map_word();
+ if (map_word.IsForwardingAddress()) {
+ return map_word.ToForwardingAddress();
+ }
+ return NULL;
+ }
+
+ private:
+ Heap* heap_;
+};
+
+
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
@@ -1228,6 +1256,9 @@ void Heap::Scavenge() {
}
incremental_marking()->UpdateMarkingDequeAfterScavenge();
+ ScavengeWeakObjectRetainer weak_object_retainer(this);
+ ProcessWeakReferences(&weak_object_retainer);
+
ASSERT(new_space_front == new_space_.top());
// Set age mark.
@@ -1314,7 +1345,8 @@ void Heap::UpdateReferencesInExternalStringTable(
static Object* ProcessFunctionWeakReferences(Heap* heap,
Object* function,
- WeakObjectRetainer* retainer) {
+ WeakObjectRetainer* retainer,
+ bool record_slots) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
JSFunction* tail = NULL;
@@ -1331,6 +1363,12 @@ static Object* ProcessFunctionWeakReferences(Heap* heap,
// Subsequent elements in the list.
ASSERT(tail != NULL);
tail->set_next_function_link(retain);
+ if (record_slots) {
+ Object** next_function =
+ HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
+ heap->mark_compact_collector()->RecordSlot(
+ next_function, next_function, retain);
+ }
}
// Retained function is new tail.
candidate_function = reinterpret_cast<JSFunction*>(retain);
@@ -1359,6 +1397,15 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* head = undefined;
Context* tail = NULL;
Object* candidate = global_contexts_list_;
+
+ // We don't record weak slots during marking or scavenges.
+ // Instead we do it once when we complete mark-compact cycle.
+ // Note that write barrier has no effect if we are already in the middle of
+ // compacting mark-sweep cycle and we have to record slots manually.
+ bool record_slots =
+ gc_state() == MARK_COMPACT &&
+ mark_compact_collector()->is_compacting();
+
while (candidate != undefined) {
// Check whether to keep the candidate in the list.
Context* candidate_context = reinterpret_cast<Context*>(candidate);
@@ -1374,6 +1421,14 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Context::NEXT_CONTEXT_LINK,
retain,
UPDATE_WRITE_BARRIER);
+
+ if (record_slots) {
+ Object** next_context =
+ HeapObject::RawField(
+ tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
+ mark_compact_collector()->RecordSlot(
+ next_context, next_context, retain);
+ }
}
// Retained context is new tail.
candidate_context = reinterpret_cast<Context*>(retain);
@@ -1386,11 +1441,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
ProcessFunctionWeakReferences(
this,
candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
- retainer);
+ retainer,
+ record_slots);
candidate_context->set_unchecked(this,
Context::OPTIMIZED_FUNCTIONS_LIST,
function_list_head,
UPDATE_WRITE_BARRIER);
+ if (record_slots) {
+ Object** optimized_functions =
+ HeapObject::RawField(
+ tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
+ mark_compact_collector()->RecordSlot(
+ optimized_functions, optimized_functions, function_list_head);
+ }
}
// Move to next element in the list.
@@ -1490,6 +1553,27 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
}
+STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == 0);
+
+
+INLINE(static HeapObject* EnsureDoubleAligned(Heap* heap,
+ HeapObject* object,
+ int size));
+
+static HeapObject* EnsureDoubleAligned(Heap* heap,
+ HeapObject* object,
+ int size) {
+ if ((OffsetFrom(object->address()) & kDoubleAlignmentMask) != 0) {
+ heap->CreateFillerObjectAt(object->address(), kPointerSize);
+ return HeapObject::FromAddress(object->address() + kPointerSize);
+ } else {
+ heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
+ kPointerSize);
+ return object;
+ }
+}
+
+
enum LoggingAndProfiling {
LOGGING_AND_PROFILING_ENABLED,
LOGGING_AND_PROFILING_DISABLED
@@ -1613,7 +1697,10 @@ class ScavengingVisitor : public StaticVisitorBase {
}
}
- template<ObjectContents object_contents, SizeRestriction size_restriction>
+
+ template<ObjectContents object_contents,
+ SizeRestriction size_restriction,
+ int alignment>
static inline void EvacuateObject(Map* map,
HeapObject** slot,
HeapObject* object,
@@ -1622,19 +1709,26 @@ class ScavengingVisitor : public StaticVisitorBase {
(object_size <= Page::kMaxNonCodeHeapObjectSize));
SLOW_ASSERT(object->Size() == object_size);
+ int allocation_size = object_size;
+ if (alignment != kObjectAlignment) {
+ ASSERT(alignment == kDoubleAlignment);
+ allocation_size += kPointerSize;
+ }
+
Heap* heap = map->GetHeap();
if (heap->ShouldBePromoted(object->address(), object_size)) {
MaybeObject* maybe_result;
if ((size_restriction != SMALL) &&
- (object_size > Page::kMaxNonCodeHeapObjectSize)) {
- maybe_result = heap->lo_space()->AllocateRaw(object_size,
+ (allocation_size > Page::kMaxNonCodeHeapObjectSize)) {
+ maybe_result = heap->lo_space()->AllocateRaw(allocation_size,
NOT_EXECUTABLE);
} else {
if (object_contents == DATA_OBJECT) {
- maybe_result = heap->old_data_space()->AllocateRaw(object_size);
+ maybe_result = heap->old_data_space()->AllocateRaw(allocation_size);
} else {
- maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
+ maybe_result =
+ heap->old_pointer_space()->AllocateRaw(allocation_size);
}
}
@@ -1642,6 +1736,10 @@ class ScavengingVisitor : public StaticVisitorBase {
if (maybe_result->ToObject(&result)) {
HeapObject* target = HeapObject::cast(result);
+ if (alignment != kObjectAlignment) {
+ target = EnsureDoubleAligned(heap, target, allocation_size);
+ }
+
// Order is important: slot might be inside of the target if target
// was allocated over a dead object and slot comes from the store
// buffer.
@@ -1649,18 +1747,27 @@ class ScavengingVisitor : public StaticVisitorBase {
MigrateObject(heap, object, target, object_size);
if (object_contents == POINTER_OBJECT) {
- heap->promotion_queue()->insert(target, object_size);
+ if (map->instance_type() == JS_FUNCTION_TYPE) {
+ heap->promotion_queue()->insert(
+ target, JSFunction::kNonWeakFieldsEndOffset);
+ } else {
+ heap->promotion_queue()->insert(target, object_size);
+ }
}
heap->tracer()->increment_promoted_objects_size(object_size);
return;
}
}
- MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
+ MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size);
heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
Object* result = allocation->ToObjectUnchecked();
HeapObject* target = HeapObject::cast(result);
+ if (alignment != kObjectAlignment) {
+ target = EnsureDoubleAligned(heap, target, allocation_size);
+ }
+
// Order is important: slot might be inside of the target if target
// was allocated over a dead object and slot comes from the store
// buffer.
@@ -1696,7 +1803,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot,
HeapObject* object) {
int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
- EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
+ EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(map,
slot,
object,
object_size);
@@ -1708,10 +1815,11 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
int object_size = FixedDoubleArray::SizeFor(length);
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map,
- slot,
- object,
- object_size);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kDoubleAlignment>(
+ map,
+ slot,
+ object,
+ object_size);
}
@@ -1719,7 +1827,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ map, slot, object, object_size);
}
@@ -1728,7 +1837,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int object_size = SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ map, slot, object, object_size);
}
@@ -1737,7 +1847,8 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object) {
int object_size = SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
- EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+ EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE, kObjectAlignment>(
+ map, slot, object, object_size);
}
@@ -1780,7 +1891,8 @@ class ScavengingVisitor : public StaticVisitorBase {
}
int object_size = ConsString::kSize;
- EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
+ EvacuateObject<POINTER_OBJECT, SMALL, kObjectAlignment>(
+ map, slot, object, object_size);
}
template<ObjectContents object_contents>
@@ -1790,14 +1902,16 @@ class ScavengingVisitor : public StaticVisitorBase {
static inline void VisitSpecialized(Map* map,
HeapObject** slot,
HeapObject* object) {
- EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+ EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+ map, slot, object, object_size);
}
static inline void Visit(Map* map,
HeapObject** slot,
HeapObject* object) {
int object_size = map->instance_size();
- EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+ EvacuateObject<object_contents, SMALL, kObjectAlignment>(
+ map, slot, object, object_size);
}
};
@@ -1914,7 +2028,7 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
map->set_pre_allocated_property_fields(0);
map->init_instance_descriptors();
map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
- map->set_prototype_transitions(empty_fixed_array(), SKIP_WRITE_BARRIER);
+ map->init_prototype_transitions(undefined_value());
map->set_unused_property_fields(0);
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
@@ -2053,15 +2167,15 @@ bool Heap::CreateInitialMaps() {
// Fix the instance_descriptors for the existing maps.
meta_map()->init_instance_descriptors();
meta_map()->set_code_cache(empty_fixed_array());
- meta_map()->set_prototype_transitions(empty_fixed_array());
+ meta_map()->init_prototype_transitions(undefined_value());
fixed_array_map()->init_instance_descriptors();
fixed_array_map()->set_code_cache(empty_fixed_array());
- fixed_array_map()->set_prototype_transitions(empty_fixed_array());
+ fixed_array_map()->init_prototype_transitions(undefined_value());
oddball_map()->init_instance_descriptors();
oddball_map()->set_code_cache(empty_fixed_array());
- oddball_map()->set_prototype_transitions(empty_fixed_array());
+ oddball_map()->init_prototype_transitions(undefined_value());
// Fix prototype object for existing maps.
meta_map()->set_prototype(null_value());
@@ -2360,7 +2474,7 @@ bool Heap::CreateApiObjects() {
// bottleneck to trap the Smi-only -> fast elements transition, and there
// appears to be no benefit for optimize this case.
Map* new_neander_map = Map::cast(obj);
- new_neander_map->set_elements_kind(FAST_ELEMENTS);
+ new_neander_map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
set_neander_map(new_neander_map);
{ MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
@@ -2908,8 +3022,8 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
share->set_ast_node_count(0);
- share->set_deopt_counter(FLAG_deopt_every_n_times);
- share->set_ic_age(0);
+ share->set_stress_deopt_counter(FLAG_deopt_every_n_times);
+ share->set_counters(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
@@ -2941,6 +3055,7 @@ MaybeObject* Heap::AllocateJSMessageObject(String* type,
}
JSMessageObject* message = JSMessageObject::cast(result);
message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
+ message->initialize_elements();
message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
message->set_type(type);
message->set_arguments(arguments);
@@ -3217,6 +3332,8 @@ MaybeObject* Heap::AllocateExternalStringFromAscii(
return Failure::OutOfMemoryException();
}
+ ASSERT(String::IsAscii(resource->data(), static_cast<int>(length)));
+
Map* map = external_ascii_string_map();
Object* result;
{ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
@@ -3554,7 +3671,8 @@ MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
Map* new_map;
ASSERT(object_function->has_initial_map());
{ MaybeObject* maybe_map =
- object_function->initial_map()->CopyDropTransitions();
+ object_function->initial_map()->CopyDropTransitions(
+ DescriptorArray::MAY_BE_SHARED);
if (!maybe_map->To<Map>(&new_map)) return maybe_map;
}
Object* prototype;
@@ -3642,7 +3760,7 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
// Check the state of the object
ASSERT(JSObject::cast(result)->HasFastProperties());
- ASSERT(JSObject::cast(result)->HasFastElements());
+ ASSERT(JSObject::cast(result)->HasFastObjectElements());
return result;
}
@@ -3687,7 +3805,7 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
map->set_inobject_properties(in_object_properties);
map->set_unused_property_fields(in_object_properties);
map->set_prototype(prototype);
- ASSERT(map->has_fast_elements());
+ ASSERT(map->has_fast_object_elements());
// If the function has only simple this property assignments add
// field descriptors for these to the initial map as the object
@@ -3702,7 +3820,8 @@ MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
fun->shared()->ForbidInlineConstructor();
} else {
DescriptorArray* descriptors;
- { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
+ { MaybeObject* maybe_descriptors_obj =
+ DescriptorArray::Allocate(count, DescriptorArray::MAY_BE_SHARED);
if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
return maybe_descriptors_obj;
}
@@ -3804,8 +3923,7 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
InitializeJSObjectFromMap(JSObject::cast(obj),
FixedArray::cast(properties),
map);
- ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() ||
- JSObject::cast(obj)->HasFastElements());
+ ASSERT(JSObject::cast(obj)->HasFastSmiOrObjectElements());
return obj;
}
@@ -3833,6 +3951,16 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
}
+MaybeObject* Heap::AllocateJSModule() {
+ // Allocate a fresh map. Modules do not have a prototype.
+ Map* map;
+ MaybeObject* maybe_map = AllocateMap(JS_MODULE_TYPE, JSModule::kSize);
+ if (!maybe_map->To(&map)) return maybe_map;
+ // Allocate the object based on the map.
+ return AllocateJSObjectFromMap(map, TENURED);
+}
+
+
MaybeObject* Heap::AllocateJSArrayAndStorage(
ElementsKind elements_kind,
int length,
@@ -3840,6 +3968,9 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
ArrayStorageAllocationMode mode,
PretenureFlag pretenure) {
ASSERT(capacity >= length);
+ if (length != 0 && mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ }
MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
JSArray* array;
if (!maybe_array->To(&array)) return maybe_array;
@@ -3860,8 +3991,7 @@ MaybeObject* Heap::AllocateJSArrayAndStorage(
maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
}
} else {
- ASSERT(elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
maybe_elms = AllocateUninitializedFixedArray(capacity);
} else {
@@ -3887,6 +4017,7 @@ MaybeObject* Heap::AllocateJSArrayWithElements(
array->set_elements(elements);
array->set_length(Smi::FromInt(elements->length()));
+ array->ValidateElements();
return array;
}
@@ -3969,7 +4100,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
// Fill these accessors into the dictionary.
DescriptorArray* descs = map->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details(descs->GetDetails(i));
+ PropertyDetails details = descs->GetDetails(i);
ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
PropertyDetails d =
PropertyDetails(details.attributes(), CALLBACKS, details.index());
@@ -4371,6 +4502,16 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
String::cast(result)->set_length(length);
String::cast(result)->set_hash_field(String::kEmptyHashField);
ASSERT_EQ(size, HeapObject::cast(result)->Size());
+
+#ifdef DEBUG
+ if (FLAG_verify_heap) {
+ // Initialize string's content to ensure ASCII-ness (character range 0-127)
+ // as required when verifying the heap.
+ char* dest = SeqAsciiString::cast(result)->GetChars();
+ memset(dest, 0x0F, length * kCharSize);
+ }
+#endif // DEBUG
+
return result;
}
@@ -4417,13 +4558,13 @@ MaybeObject* Heap::AllocateJSArray(
Context* global_context = isolate()->context()->global_context();
JSFunction* array_function = global_context->array_function();
Map* map = array_function->initial_map();
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- map = Map::cast(global_context->double_js_array_map());
- } else if (elements_kind == FAST_ELEMENTS || !FLAG_smi_only_arrays) {
- map = Map::cast(global_context->object_js_array_map());
- } else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- ASSERT(map == global_context->smi_js_array_map());
+ Object* maybe_map_array = global_context->js_array_maps();
+ if (!maybe_map_array->IsUndefined()) {
+ Object* maybe_transitioned_map =
+ FixedArray::cast(maybe_map_array)->get(elements_kind);
+ if (!maybe_transitioned_map->IsUndefined()) {
+ map = Map::cast(maybe_transitioned_map);
+ }
}
return AllocateJSObjectFromMap(map, pretenure);
@@ -4662,6 +4803,11 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
AllocationSpace space =
(pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
int size = FixedDoubleArray::SizeFor(length);
+
+#ifndef V8_HOST_ARCH_64_BIT
+ size += kPointerSize;
+#endif
+
if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
// Too big for new space.
space = LO_SPACE;
@@ -4674,7 +4820,12 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
AllocationSpace retry_space =
(size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
- return AllocateRaw(size, space, retry_space);
+ HeapObject* object;
+ { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
+ if (!maybe_object->To<HeapObject>(&object)) return maybe_object;
+ }
+
+ return EnsureDoubleAligned(this, object, size);
}
@@ -4698,15 +4849,29 @@ MaybeObject* Heap::AllocateGlobalContext() {
}
Context* context = reinterpret_cast<Context*>(result);
context->set_map_no_write_barrier(global_context_map());
- context->set_smi_js_array_map(undefined_value());
- context->set_double_js_array_map(undefined_value());
- context->set_object_js_array_map(undefined_value());
+ context->set_js_array_maps(undefined_value());
ASSERT(context->IsGlobalContext());
ASSERT(result->IsContext());
return result;
}
+MaybeObject* Heap::AllocateModuleContext(Context* previous,
+ ScopeInfo* scope_info) {
+ Object* result;
+ { MaybeObject* maybe_result =
+ AllocateFixedArrayWithHoles(scope_info->ContextLength(), TENURED);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ Context* context = reinterpret_cast<Context*>(result);
+ context->set_map_no_write_barrier(module_context_map());
+ context->set_previous(previous);
+ context->set_extension(scope_info);
+ context->set_global(previous->global());
+ return context;
+}
+
+
MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
Object* result;
@@ -4849,8 +5014,10 @@ void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
bool Heap::IdleNotification(int hint) {
const int kMaxHint = 1000;
- intptr_t size_factor = Min(Max(hint, 30), kMaxHint) / 10;
- // The size factor is in range [3..100].
+ intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4;
+ // The size factor is in range [5..250]. The numbers here are chosen from
+ // experiments. If you changes them, make sure to test with
+ // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.*
intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold;
if (contexts_disposed_ > 0) {
@@ -4874,11 +5041,14 @@ bool Heap::IdleNotification(int hint) {
// Take into account that we might have decided to delay full collection
// because incremental marking is in progress.
ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
+ // After context disposal there is likely a lot of garbage remaining, reset
+ // the idle notification counters in order to trigger more incremental GCs
+ // on subsequent idle notifications.
+ StartIdleRound();
return false;
}
- if (hint >= kMaxHint || !FLAG_incremental_marking ||
- FLAG_expose_gc || Serializer::enabled()) {
+ if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) {
return IdleGlobalGC();
}
@@ -4917,10 +5087,6 @@ bool Heap::IdleNotification(int hint) {
}
if (incremental_marking()->IsStopped()) {
- if (!WorthStartingGCWhenIdle()) {
- FinishIdleRound();
- return true;
- }
incremental_marking()->Start();
}
@@ -5558,6 +5724,11 @@ bool Heap::ConfigureHeap(int max_semispace_size,
intptr_t max_executable_size) {
if (HasBeenSetUp()) return false;
+ if (FLAG_stress_compaction) {
+ // This will cause more frequent GCs when stressing.
+ max_semispace_size_ = Page::kPageSize;
+ }
+
if (max_semispace_size > 0) {
if (max_semispace_size < Page::kPageSize) {
max_semispace_size = Page::kPageSize;
@@ -5662,16 +5833,6 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
}
-intptr_t Heap::PromotedSpaceSize() {
- return old_pointer_space_->Size()
- + old_data_space_->Size()
- + code_space_->Size()
- + map_space_->Size()
- + cell_space_->Size()
- + lo_space_->Size();
-}
-
-
intptr_t Heap::PromotedSpaceSizeOfObjects() {
return old_pointer_space_->SizeOfObjects()
+ old_data_space_->SizeOfObjects()
@@ -5682,7 +5843,7 @@ intptr_t Heap::PromotedSpaceSizeOfObjects() {
}
-int Heap::PromotedExternalMemorySize() {
+intptr_t Heap::PromotedExternalMemorySize() {
if (amount_of_external_allocated_memory_
<= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
return amount_of_external_allocated_memory_
@@ -5855,6 +6016,15 @@ class HeapDebugUtils {
#endif
+
+V8_DECLARE_ONCE(initialize_gc_once);
+
+static void InitializeGCOnce() {
+ InitializeScavengingVisitorsTables();
+ NewSpaceScavenger::Initialize();
+ MarkCompactCollector::Initialize();
+}
+
bool Heap::SetUp(bool create_heap_objects) {
#ifdef DEBUG
allocation_timeout_ = FLAG_gc_interval;
@@ -5873,15 +6043,7 @@ bool Heap::SetUp(bool create_heap_objects) {
if (!ConfigureHeapDefault()) return false;
}
- gc_initializer_mutex.Pointer()->Lock();
- static bool initialized_gc = false;
- if (!initialized_gc) {
- initialized_gc = true;
- InitializeScavengingVisitorsTables();
- NewSpaceScavenger::Initialize();
- MarkCompactCollector::Initialize();
- }
- gc_initializer_mutex.Pointer()->Unlock();
+ CallOnce(&initialize_gc_once, &InitializeGCOnce);
MarkMapPointersAsEncoded(false);
@@ -5993,6 +6155,11 @@ void Heap::SetStackLimits() {
void Heap::TearDown() {
+#ifdef DEBUG
+ if (FLAG_verify_heap) {
+ Verify();
+ }
+#endif
if (FLAG_print_cumulative_gc_stat) {
PrintF("\n\n");
PrintF("gc_count=%d ", gc_count_);
diff --git a/deps/v8/src/heap.h b/deps/v8/src/heap.h
index 0391e0e526..dd1f710b25 100644
--- a/deps/v8/src/heap.h
+++ b/deps/v8/src/heap.h
@@ -243,7 +243,8 @@ namespace internal {
V(compare_ic_symbol, ".compare_ic") \
V(infinity_symbol, "Infinity") \
V(minus_infinity_symbol, "-Infinity") \
- V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
+ V(hidden_stack_trace_symbol, "v8::hidden_stack_trace") \
+ V(query_colon_symbol, "(?:)")
// Forward declarations.
class GCTracer;
@@ -529,6 +530,8 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateJSObject(
JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
+ MUST_USE_RESULT MaybeObject* AllocateJSModule();
+
// Allocate a JSArray with no elements
MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
ElementsKind elements_kind,
@@ -618,7 +621,7 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateMap(
InstanceType instance_type,
int instance_size,
- ElementsKind elements_kind = FAST_ELEMENTS);
+ ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
// Allocates a partial map for bootstrapping.
MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type,
@@ -820,6 +823,10 @@ class Heap {
// Allocate a global (but otherwise uninitialized) context.
MUST_USE_RESULT MaybeObject* AllocateGlobalContext();
+ // Allocate a module context.
+ MUST_USE_RESULT MaybeObject* AllocateModuleContext(Context* previous,
+ ScopeInfo* scope_info);
+
// Allocate a function context.
MUST_USE_RESULT MaybeObject* AllocateFunctionContext(int length,
JSFunction* function);
@@ -1326,7 +1333,8 @@ class Heap {
// Adjusts the amount of registered external memory.
// Returns the adjusted value.
- inline int AdjustAmountOfExternalAllocatedMemory(int change_in_bytes);
+ inline intptr_t AdjustAmountOfExternalAllocatedMemory(
+ intptr_t change_in_bytes);
// Allocate uninitialized fixed array.
MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int length);
@@ -1334,7 +1342,7 @@ class Heap {
PretenureFlag pretenure);
inline intptr_t PromotedTotalSize() {
- return PromotedSpaceSize() + PromotedExternalMemorySize();
+ return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
}
// True if we have reached the allocation limit in the old generation that
@@ -1355,19 +1363,6 @@ class Heap {
static const intptr_t kMinimumAllocationLimit =
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
- // When we sweep lazily we initially guess that there is no garbage on the
- // heap and set the limits for the next GC accordingly. As we sweep we find
- // out that some of the pages contained garbage and we have to adjust
- // downwards the size of the heap. This means the limits that control the
- // timing of the next GC also need to be adjusted downwards.
- void LowerOldGenLimits(intptr_t adjustment) {
- size_of_old_gen_at_last_old_space_gc_ -= adjustment;
- old_gen_promotion_limit_ =
- OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
- old_gen_allocation_limit_ =
- OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
- }
-
intptr_t OldGenPromotionLimit(intptr_t old_gen_size) {
const int divisor = FLAG_stress_compaction ? 10 : 3;
intptr_t limit =
@@ -1411,6 +1406,12 @@ class Heap {
kRootListLength
};
+ STATIC_CHECK(kUndefinedValueRootIndex == Internals::kUndefinedValueRootIndex);
+ STATIC_CHECK(kNullValueRootIndex == Internals::kNullValueRootIndex);
+ STATIC_CHECK(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
+ STATIC_CHECK(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
+ STATIC_CHECK(kempty_symbolRootIndex == Internals::kEmptySymbolRootIndex);
+
MUST_USE_RESULT MaybeObject* NumberToString(
Object* number, bool check_number_string_cache = true);
MUST_USE_RESULT MaybeObject* Uint32ToString(
@@ -1442,6 +1443,8 @@ class Heap {
inline bool NextGCIsLikelyToBeFull() {
if (FLAG_gc_global) return true;
+ if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
+
intptr_t total_promoted = PromotedTotalSize();
intptr_t adjusted_promotion_limit =
@@ -1452,7 +1455,7 @@ class Heap {
intptr_t adjusted_allocation_limit =
old_gen_allocation_limit_ - new_space_.Capacity() / 5;
- if (PromotedSpaceSize() >= adjusted_allocation_limit) return true;
+ if (PromotedSpaceSizeOfObjects() >= adjusted_allocation_limit) return true;
return false;
}
@@ -1490,7 +1493,6 @@ class Heap {
GCTracer* tracer() { return tracer_; }
// Returns the size of objects residing in non new spaces.
- intptr_t PromotedSpaceSize();
intptr_t PromotedSpaceSizeOfObjects();
double total_regexp_code_generated() { return total_regexp_code_generated_; }
@@ -1595,7 +1597,7 @@ class Heap {
}
void AgeInlineCaches() {
- ++global_ic_age_;
+ global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
}
private:
@@ -1605,6 +1607,8 @@ class Heap {
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
+ Object* roots_[kRootListLength];
+
intptr_t code_range_size_;
int reserved_semispace_size_;
int max_semispace_size_;
@@ -1646,7 +1650,7 @@ class Heap {
int gc_post_processing_depth_;
// Returns the amount of external memory registered since last global gc.
- int PromotedExternalMemorySize();
+ intptr_t PromotedExternalMemorySize();
int ms_count_; // how many mark-sweep collections happened
unsigned int gc_count_; // how many gc happened
@@ -1711,17 +1715,15 @@ class Heap {
// The amount of external memory registered through the API kept alive
// by global handles
- int amount_of_external_allocated_memory_;
+ intptr_t amount_of_external_allocated_memory_;
// Caches the amount of external memory registered at the last global gc.
- int amount_of_external_allocated_memory_at_last_global_gc_;
+ intptr_t amount_of_external_allocated_memory_at_last_global_gc_;
// Indicates that an allocation has failed in the old generation since the
// last GC.
int old_gen_exhausted_;
- Object* roots_[kRootListLength];
-
Object* global_contexts_list_;
StoreBufferRebuilder store_buffer_rebuilder_;
@@ -1974,13 +1976,6 @@ class Heap {
return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
}
- bool WorthStartingGCWhenIdle() {
- if (contexts_disposed_ > 0) {
- return true;
- }
- return incremental_marking()->WorthActivating();
- }
-
// Estimates how many milliseconds a Mark-Sweep would take to complete.
// In idle notification handler we assume that this function will return:
// - a number less than 10 for small heaps, which are less than 8Mb.
diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc
index f698da46d4..4bb25096f7 100644
--- a/deps/v8/src/hydrogen-instructions.cc
+++ b/deps/v8/src/hydrogen-instructions.cc
@@ -336,7 +336,8 @@ HUseListNode* HValue::RemoveUse(HValue* value, int index) {
// Do not reuse use list nodes in debug mode, zap them.
if (current != NULL) {
HUseListNode* temp =
- new HUseListNode(current->value(), current->index(), NULL);
+ new(block()->zone())
+ HUseListNode(current->value(), current->index(), NULL);
current->Zap();
current = temp;
}
@@ -416,6 +417,7 @@ void HValue::Kill() {
SetFlag(kIsDead);
for (int i = 0; i < OperandCount(); ++i) {
HValue* operand = OperandAt(i);
+ if (operand == NULL) continue;
HUseListNode* first = operand->use_list_;
if (first != NULL && first->value() == this && first->index() == i) {
operand->use_list_ = first->tail();
@@ -462,7 +464,8 @@ void HValue::PrintChangesTo(StringStream* stream) {
add_comma = true; \
stream->Add(#type); \
}
- GVN_FLAG_LIST(PRINT_DO);
+ GVN_TRACKED_FLAG_LIST(PRINT_DO);
+ GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
#undef PRINT_DO
}
stream->Add("]");
@@ -493,8 +496,8 @@ void HValue::RegisterUse(int index, HValue* new_value) {
if (new_value != NULL) {
if (removed == NULL) {
- new_value->use_list_ =
- new HUseListNode(this, index, new_value->use_list_);
+ new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
+ this, index, new_value->use_list_);
} else {
removed->set_tail(new_value->use_list_);
new_value->use_list_ = removed;
@@ -599,6 +602,9 @@ void HInstruction::InsertAfter(HInstruction* previous) {
SetBlock(block);
previous->next_ = this;
if (next != NULL) next->previous_ = this;
+ if (block->last() == previous) {
+ block->set_last(this);
+ }
}
@@ -608,6 +614,7 @@ void HInstruction::Verify() {
HBasicBlock* cur_block = block();
for (int i = 0; i < OperandCount(); ++i) {
HValue* other_operand = OperandAt(i);
+ if (other_operand == NULL) continue;
HBasicBlock* other_block = other_operand->block();
if (cur_block == other_block) {
if (!other_operand->IsPhi()) {
@@ -866,6 +873,17 @@ HValue* HBitwise::Canonicalize() {
}
+HValue* HBitNot::Canonicalize() {
+ // Optimize ~~x, a common pattern used for ToInt32(x).
+ if (value()->IsBitNot()) {
+ HValue* result = HBitNot::cast(value())->value();
+ ASSERT(result->representation().IsInteger32());
+ return result;
+ }
+ return this;
+}
+
+
HValue* HAdd::Canonicalize() {
if (!representation().IsInteger32()) return this;
if (CheckUsesForFlag(kTruncatingToInt32)) ClearFlag(kCanOverflow);
@@ -916,6 +934,62 @@ void HJSArrayLength::PrintDataTo(StringStream* stream) {
}
+HValue* HUnaryMathOperation::Canonicalize() {
+ if (op() == kMathFloor) {
+ // If the input is integer32 then we replace the floor instruction
+ // with its input. This happens before the representation changes are
+ // introduced.
+ if (value()->representation().IsInteger32()) return value();
+
+#ifdef V8_TARGET_ARCH_ARM
+ if (value()->IsDiv() && (value()->UseCount() == 1)) {
+ // TODO(2038): Implement this optimization for non ARM architectures.
+ HDiv* hdiv = HDiv::cast(value());
+ HValue* left = hdiv->left();
+ HValue* right = hdiv->right();
+ // Try to simplify left and right values of the division.
+ HValue* new_left =
+ LChunkBuilder::SimplifiedDividendForMathFloorOfDiv(left);
+ HValue* new_right =
+ LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right);
+
+ // Return if left or right are not optimizable.
+ if ((new_left == NULL) || (new_right == NULL)) return this;
+
+ // Insert the new values in the graph.
+ if (new_left->IsInstruction() &&
+ !HInstruction::cast(new_left)->IsLinked()) {
+ HInstruction::cast(new_left)->InsertBefore(this);
+ }
+ if (new_right->IsInstruction() &&
+ !HInstruction::cast(new_right)->IsLinked()) {
+ HInstruction::cast(new_right)->InsertBefore(this);
+ }
+ HMathFloorOfDiv* instr = new(block()->zone()) HMathFloorOfDiv(context(),
+ new_left,
+ new_right);
+ // Replace this HMathFloor instruction by the new HMathFloorOfDiv.
+ instr->InsertBefore(this);
+ ReplaceAllUsesWith(instr);
+ Kill();
+ // We know the division had no other uses than this HMathFloor. Delete it.
+ // Also delete the arguments of the division if they are not used any
+ // more.
+ hdiv->DeleteAndReplaceWith(NULL);
+ ASSERT(left->IsChange() || left->IsConstant());
+ ASSERT(right->IsChange() || right->IsConstant());
+ if (left->HasNoUses()) left->DeleteAndReplaceWith(NULL);
+ if (right->HasNoUses()) right->DeleteAndReplaceWith(NULL);
+
+ // Return NULL to remove this instruction from the graph.
+ return NULL;
+ }
+#endif // V8_TARGET_ARCH_ARM
+ }
+ return this;
+}
+
+
HValue* HCheckInstanceType::Canonicalize() {
if (check_ == IS_STRING &&
!value()->type().IsUninitialized() &&
@@ -965,16 +1039,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
}
-void HCheckMap::PrintDataTo(StringStream* stream) {
+void HCheckMaps::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
- stream->Add(" %p", *map());
- if (mode() == REQUIRE_EXACT_MAP) {
- stream->Add(" [EXACT]");
- } else if (!has_element_transitions_) {
- stream->Add(" [EXACT*]");
- } else {
- stream->Add(" [MATCH ELEMENTS]");
+ stream->Add(" [%p", *map_set()->first());
+ for (int i = 1; i < map_set()->length(); ++i) {
+ stream->Add(",%p", *map_set()->at(i));
}
+ stream->Add("]");
}
@@ -1181,7 +1252,7 @@ void HPhi::PrintTo(StringStream* stream) {
void HPhi::AddInput(HValue* value) {
- inputs_.Add(NULL);
+ inputs_.Add(NULL, value->block()->zone());
SetOperandAt(OperandCount() - 1, value);
// Mark phis that may have 'arguments' directly or indirectly as an operand.
if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
@@ -1228,14 +1299,33 @@ void HPhi::InitRealUses(int phi_id) {
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* value = it.value();
if (!value->IsPhi()) {
- Representation rep = value->RequiredInputRepresentation(it.index());
+ Representation rep = value->ObservedInputRepresentation(it.index());
non_phi_uses_[rep.kind()] += value->LoopWeight();
+ if (FLAG_trace_representation) {
+ PrintF("%d %s is used by %d %s as %s\n",
+ this->id(),
+ this->Mnemonic(),
+ value->id(),
+ value->Mnemonic(),
+ rep.Mnemonic());
+ }
}
}
}
void HPhi::AddNonPhiUsesFrom(HPhi* other) {
+ if (FLAG_trace_representation) {
+ PrintF("adding to %d %s uses of %d %s: i%d d%d t%d\n",
+ this->id(),
+ this->Mnemonic(),
+ other->id(),
+ other->Mnemonic(),
+ other->non_phi_uses_[Representation::kInteger32],
+ other->non_phi_uses_[Representation::kDouble],
+ other->non_phi_uses_[Representation::kTagged]);
+ }
+
for (int i = 0; i < Representation::kNumRepresentations; i++) {
indirect_uses_[i] += other->non_phi_uses_[i];
}
@@ -1249,6 +1339,12 @@ void HPhi::AddIndirectUsesTo(int* dest) {
}
+void HPhi::ResetInteger32Uses() {
+ non_phi_uses_[Representation::kInteger32] = 0;
+ indirect_uses_[Representation::kInteger32] = 0;
+}
+
+
void HSimulate::PrintDataTo(StringStream* stream) {
stream->Add("id=%d", ast_id());
if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
@@ -1302,18 +1398,18 @@ HConstant::HConstant(Handle<Object> handle, Representation r)
}
-HConstant* HConstant::CopyToRepresentation(Representation r) const {
+HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
if (r.IsInteger32() && !has_int32_value_) return NULL;
if (r.IsDouble() && !has_double_value_) return NULL;
- return new HConstant(handle_, r);
+ return new(zone) HConstant(handle_, r);
}
-HConstant* HConstant::CopyToTruncatedInt32() const {
+HConstant* HConstant::CopyToTruncatedInt32(Zone* zone) const {
if (!has_double_value_) return NULL;
int32_t truncated = NumberToInt32(*handle_);
- return new HConstant(FACTORY->NewNumberFromInt(truncated),
- Representation::Integer32());
+ return new(zone) HConstant(FACTORY->NewNumberFromInt(truncated),
+ Representation::Integer32());
}
@@ -1522,17 +1618,51 @@ void HLoadNamedField::PrintDataTo(StringStream* stream) {
}
+// Returns true if an instance of this map can never find a property with this
+// name in its prototype chain. This means all prototypes up to the top are
+// fast and don't have the name in them. It would be good if we could optimize
+// polymorphic loads where the property is sometimes found in the prototype
+// chain.
+static bool PrototypeChainCanNeverResolve(
+ Handle<Map> map, Handle<String> name) {
+ Isolate* isolate = map->GetIsolate();
+ Object* current = map->prototype();
+ while (current != isolate->heap()->null_value()) {
+ if (current->IsJSGlobalProxy() ||
+ current->IsGlobalObject() ||
+ !current->IsJSObject() ||
+ JSObject::cast(current)->IsAccessCheckNeeded() ||
+ !JSObject::cast(current)->HasFastProperties()) {
+ return false;
+ }
+
+ LookupResult lookup(isolate);
+ JSObject::cast(current)->map()->LookupInDescriptors(NULL, *name, &lookup);
+ if (lookup.IsFound()) {
+ if (lookup.type() != MAP_TRANSITION) return false;
+ } else if (!lookup.IsCacheable()) {
+ return false;
+ }
+
+ current = JSObject::cast(current)->GetPrototype();
+ }
+ return true;
+}
+
+
HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
SmallMapList* types,
- Handle<String> name)
- : types_(Min(types->length(), kMaxLoadPolymorphism)),
+ Handle<String> name,
+ Zone* zone)
+ : types_(Min(types->length(), kMaxLoadPolymorphism), zone),
name_(name),
need_generic_(false) {
SetOperandAt(0, context);
SetOperandAt(1, object);
set_representation(Representation::Tagged());
SetGVNFlag(kDependsOnMaps);
+ SmallMapList negative_lookups;
for (int i = 0;
i < types->length() && types_.length() < kMaxLoadPolymorphism;
++i) {
@@ -1548,21 +1678,39 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
} else {
SetGVNFlag(kDependsOnBackingStoreFields);
}
- types_.Add(types->at(i));
+ types_.Add(types->at(i), zone);
break;
}
case CONSTANT_FUNCTION:
- types_.Add(types->at(i));
+ types_.Add(types->at(i), zone);
+ break;
+ case MAP_TRANSITION:
+ if (PrototypeChainCanNeverResolve(map, name)) {
+ negative_lookups.Add(types->at(i), zone);
+ }
break;
default:
break;
}
+ } else if (lookup.IsCacheable()) {
+ if (PrototypeChainCanNeverResolve(map, name)) {
+ negative_lookups.Add(types->at(i), zone);
+ }
}
}
- if (types_.length() == types->length() && FLAG_deoptimize_uncommon_cases) {
+ bool need_generic =
+ (types->length() != negative_lookups.length() + types_.length());
+ if (!need_generic && FLAG_deoptimize_uncommon_cases) {
SetFlag(kUseGVN);
+ for (int i = 0; i < negative_lookups.length(); i++) {
+ types_.Add(negative_lookups.at(i), zone);
+ }
} else {
+ // We don't have an easy way to handle both a call (to the generic stub) and
+ // a deopt in the same hydrogen instruction, so in this case we don't add
+ // the negative lookups which can deopt - just let the generic stub handle
+ // them.
SetAllSideEffects();
need_generic_ = true;
}
@@ -1607,11 +1755,14 @@ void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) {
stream->Add("[");
key()->PrintNameTo(stream);
stream->Add("]");
+ if (RequiresHoleCheck()) {
+ stream->Add(" check_hole");
+ }
}
bool HLoadKeyedFastElement::RequiresHoleCheck() {
- if (hole_check_mode_ == OMIT_HOLE_CHECK) {
+ if (IsFastPackedElementsKind(elements_kind())) {
return false;
}
@@ -1657,12 +1808,11 @@ HValue* HLoadKeyedGeneric::Canonicalize() {
new(block()->zone()) HCheckMapValue(object(), names_cache->map());
HInstruction* index = new(block()->zone()) HLoadKeyedFastElement(
index_cache,
- key_load->key(),
- HLoadKeyedFastElement::OMIT_HOLE_CHECK);
- HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
- object(), index);
+ key_load->key());
map_check->InsertBefore(this);
index->InsertBefore(this);
+ HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
+ object(), index);
load->InsertBefore(this);
return load;
}
@@ -1706,8 +1856,11 @@ void HLoadKeyedSpecializedArrayElement::PrintDataTo(
stream->Add("pixel");
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -1736,6 +1889,9 @@ void HStoreNamedField::PrintDataTo(StringStream* stream) {
stream->Add(" = ");
value()->PrintNameTo(stream);
stream->Add(" @%d%s", offset(), is_in_object() ? "[in-object]" : "");
+ if (NeedsWriteBarrier()) {
+ stream->Add(" (write-barrier)");
+ }
if (!transition().is_null()) {
stream->Add(" (transition map %p)", *transition());
}
@@ -1801,9 +1957,12 @@ void HStoreKeyedSpecializedArrayElement::PrintDataTo(
case EXTERNAL_PIXEL_ELEMENTS:
stream->Add("pixel");
break;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -1818,7 +1977,13 @@ void HStoreKeyedSpecializedArrayElement::PrintDataTo(
void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
object()->PrintNameTo(stream);
- stream->Add(" %p -> %p", *original_map(), *transitioned_map());
+ ElementsKind from_kind = original_map()->elements_kind();
+ ElementsKind to_kind = transitioned_map()->elements_kind();
+ stream->Add(" %p [%s] -> %p [%s]",
+ *original_map(),
+ ElementsAccessor::ForKind(from_kind)->name(),
+ *transitioned_map(),
+ ElementsAccessor::ForKind(to_kind)->name());
}
@@ -1879,7 +2044,7 @@ HType HValue::CalculateInferredType() {
}
-HType HCheckMap::CalculateInferredType() {
+HType HCheckMaps::CalculateInferredType() {
return value()->type();
}
@@ -2089,6 +2254,17 @@ HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
}
+bool HStoreKeyedFastDoubleElement::NeedsCanonicalization() {
+ // If value was loaded from unboxed double backing store or
+ // converted from an integer then we don't have to canonicalize it.
+ if (value()->IsLoadKeyedFastDoubleElement() ||
+ (value()->IsChange() && HChange::cast(value())->from().IsInteger32())) {
+ return false;
+ }
+ return true;
+}
+
+
#define H_CONSTANT_INT32(val) \
new(zone) HConstant(FACTORY->NewNumberFromInt(val, TENURED), \
Representation::Integer32())
@@ -2257,6 +2433,13 @@ void HIn::PrintDataTo(StringStream* stream) {
}
+void HBitwise::PrintDataTo(StringStream* stream) {
+ stream->Add(Token::Name(op_));
+ stream->Add(" ");
+ HBitwiseBinaryOperation::PrintDataTo(stream);
+}
+
+
Representation HPhi::InferredRepresentation() {
bool double_occurred = false;
bool int32_occurred = false;
diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h
index b63e647e1c..780d57d61a 100644
--- a/deps/v8/src/hydrogen-instructions.h
+++ b/deps/v8/src/hydrogen-instructions.h
@@ -85,7 +85,7 @@ class LChunkBuilder;
V(Change) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -140,6 +140,7 @@ class LChunkBuilder;
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MathFloorOfDiv) \
V(Mod) \
V(Mul) \
V(ObjectLiteral) \
@@ -188,7 +189,10 @@ class LChunkBuilder;
V(DateField) \
V(WrapReceiver)
-#define GVN_FLAG_LIST(V) \
+#define GVN_TRACKED_FLAG_LIST(V) \
+ V(NewSpacePromotion)
+
+#define GVN_UNTRACKED_FLAG_LIST(V) \
V(Calls) \
V(InobjectFields) \
V(BackingStoreFields) \
@@ -506,14 +510,18 @@ class HUseIterator BASE_EMBEDDED {
// There must be one corresponding kDepends flag for every kChanges flag and
// the order of the kChanges flags must be exactly the same as of the kDepends
-// flags.
+// flags. All tracked flags should appear before untracked ones.
enum GVNFlag {
// Declare global value numbering flags.
#define DECLARE_FLAG(type) kChanges##type, kDependsOn##type,
- GVN_FLAG_LIST(DECLARE_FLAG)
+ GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
+ GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
#undef DECLARE_FLAG
kAfterLastFlag,
- kLastFlag = kAfterLastFlag - 1
+ kLastFlag = kAfterLastFlag - 1,
+#define COUNT_FLAG(type) + 1
+ kNumberOfTrackedSideEffects = 0 GVN_TRACKED_FLAG_LIST(COUNT_FLAG)
+#undef COUNT_FLAG
};
typedef EnumSet<GVNFlag> GVNFlagSet;
@@ -530,6 +538,10 @@ class HValue: public ZoneObject {
// implement DataEquals(), which will be used to determine if other
// occurrences of the instruction are indeed the same.
kUseGVN,
+ // Track instructions that are dominating side effects. If an instruction
+ // sets this flag, it must implement SetSideEffectDominator() and should
+ // indicate which side effects to track by setting GVN flags.
+ kTrackSideEffectDominators,
kCanOverflow,
kBailoutOnMinusZero,
kCanBeDivByZero,
@@ -544,6 +556,12 @@ class HValue: public ZoneObject {
static const int kChangesToDependsFlagsLeftShift = 1;
+ static GVNFlag ChangesFlagFromInt(int x) {
+ return static_cast<GVNFlag>(x * 2);
+ }
+ static GVNFlag DependsOnFlagFromInt(int x) {
+ return static_cast<GVNFlag>(x * 2 + 1);
+ }
static GVNFlagSet ConvertChangesToDependsFlags(GVNFlagSet flags) {
return GVNFlagSet(flags.ToIntegral() << kChangesToDependsFlagsLeftShift);
}
@@ -702,6 +720,11 @@ class HValue: public ZoneObject {
return representation();
}
+ // Type feedback access.
+ virtual Representation ObservedInputRepresentation(int index) {
+ return RequiredInputRepresentation(index);
+ }
+
// This gives the instruction an opportunity to replace itself with an
// instruction that does the same in some better way. To replace an
// instruction with a new one, first add the new instruction to the graph,
@@ -726,6 +749,13 @@ class HValue: public ZoneObject {
virtual HType CalculateInferredType();
+ // This function must be overridden for instructions which have the
+ // kTrackSideEffectDominators flag set, to track instructions that are
+ // dominating side effects.
+ virtual void SetSideEffectDominator(GVNFlag side_effect, HValue* dominator) {
+ UNREACHABLE();
+ }
+
#ifdef DEBUG
virtual void Verify() = 0;
#endif
@@ -756,7 +786,8 @@ class HValue: public ZoneObject {
GVNFlagSet result;
// Create changes mask.
#define ADD_FLAG(type) result.Add(kDependsOn##type);
- GVN_FLAG_LIST(ADD_FLAG)
+ GVN_TRACKED_FLAG_LIST(ADD_FLAG)
+ GVN_UNTRACKED_FLAG_LIST(ADD_FLAG)
#undef ADD_FLAG
return result;
}
@@ -765,7 +796,8 @@ class HValue: public ZoneObject {
GVNFlagSet result;
// Create changes mask.
#define ADD_FLAG(type) result.Add(kChanges##type);
- GVN_FLAG_LIST(ADD_FLAG)
+ GVN_TRACKED_FLAG_LIST(ADD_FLAG)
+ GVN_UNTRACKED_FLAG_LIST(ADD_FLAG)
#undef ADD_FLAG
return result;
}
@@ -781,6 +813,7 @@ class HValue: public ZoneObject {
// an executing program (i.e. are not safe to repeat, move or remove);
static GVNFlagSet AllObservableSideEffectsFlagSet() {
GVNFlagSet result = AllChangesFlagSet();
+ result.Remove(kChangesNewSpacePromotion);
result.Remove(kChangesElementsKind);
result.Remove(kChangesElementsPointer);
result.Remove(kChangesMaps);
@@ -959,7 +992,8 @@ class HSoftDeoptimize: public HTemplateInstruction<0> {
class HDeoptimize: public HControlInstruction {
public:
- explicit HDeoptimize(int environment_length) : values_(environment_length) { }
+ HDeoptimize(int environment_length, Zone* zone)
+ : values_(environment_length, zone) { }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
@@ -978,8 +1012,8 @@ class HDeoptimize: public HControlInstruction {
UNREACHABLE();
}
- void AddEnvironmentValue(HValue* value) {
- values_.Add(NULL);
+ void AddEnvironmentValue(HValue* value, Zone* zone) {
+ values_.Add(NULL, zone);
SetOperandAt(values_.length() - 1, value);
}
@@ -1196,6 +1230,7 @@ class HChange: public HUnaryOperation {
SetFlag(kUseGVN);
if (deoptimize_on_undefined) SetFlag(kDeoptimizeOnUndefined);
if (is_truncating) SetFlag(kTruncatingToInt32);
+ if (to.IsTagged()) SetGVNFlag(kChangesNewSpacePromotion);
}
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
@@ -1246,11 +1281,12 @@ class HClampToUint8: public HUnaryOperation {
class HSimulate: public HInstruction {
public:
- HSimulate(int ast_id, int pop_count)
+ HSimulate(int ast_id, int pop_count, Zone* zone)
: ast_id_(ast_id),
pop_count_(pop_count),
- values_(2),
- assigned_indexes_(2) {}
+ values_(2, zone),
+ assigned_indexes_(2, zone),
+ zone_(zone) {}
virtual ~HSimulate() {}
virtual void PrintDataTo(StringStream* stream);
@@ -1298,9 +1334,9 @@ class HSimulate: public HInstruction {
private:
static const int kNoIndex = -1;
void AddValue(int index, HValue* value) {
- assigned_indexes_.Add(index);
+ assigned_indexes_.Add(index, zone_);
// Resize the list of pushed values.
- values_.Add(NULL);
+ values_.Add(NULL, zone_);
// Set the operand through the base method in HValue to make sure that the
// use lists are correctly updated.
SetOperandAt(values_.length() - 1, value);
@@ -1309,6 +1345,7 @@ class HSimulate: public HInstruction {
int pop_count_;
ZoneList<HValue*> values_;
ZoneList<int> assigned_indexes_;
+ Zone* zone_;
};
@@ -1321,6 +1358,7 @@ class HStackCheck: public HTemplateInstruction<1> {
HStackCheck(HValue* context, Type type) : type_(type) {
SetOperandAt(0, context);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* context() { return OperandAt(0); }
@@ -1354,13 +1392,15 @@ class HEnterInlined: public HTemplateInstruction<0> {
FunctionLiteral* function,
CallKind call_kind,
bool is_construct,
- Variable* arguments)
+ Variable* arguments_var,
+ ZoneList<HValue*>* arguments_values)
: closure_(closure),
arguments_count_(arguments_count),
function_(function),
call_kind_(call_kind),
is_construct_(is_construct),
- arguments_(arguments) {
+ arguments_var_(arguments_var),
+ arguments_values_(arguments_values) {
}
virtual void PrintDataTo(StringStream* stream);
@@ -1375,7 +1415,8 @@ class HEnterInlined: public HTemplateInstruction<0> {
return Representation::None();
}
- Variable* arguments() { return arguments_; }
+ Variable* arguments_var() { return arguments_var_; }
+ ZoneList<HValue*>* arguments_values() { return arguments_values_; }
DECLARE_CONCRETE_INSTRUCTION(EnterInlined)
@@ -1385,19 +1426,28 @@ class HEnterInlined: public HTemplateInstruction<0> {
FunctionLiteral* function_;
CallKind call_kind_;
bool is_construct_;
- Variable* arguments_;
+ Variable* arguments_var_;
+ ZoneList<HValue*>* arguments_values_;
};
class HLeaveInlined: public HTemplateInstruction<0> {
public:
- HLeaveInlined() {}
+ explicit HLeaveInlined(bool arguments_pushed)
+ : arguments_pushed_(arguments_pushed) { }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
+ bool arguments_pushed() {
+ return arguments_pushed_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(LeaveInlined)
+
+ private:
+ bool arguments_pushed_;
};
@@ -1605,14 +1655,26 @@ class HInvokeFunction: public HBinaryCall {
: HBinaryCall(context, function, argument_count) {
}
+ HInvokeFunction(HValue* context,
+ HValue* function,
+ Handle<JSFunction> known_function,
+ int argument_count)
+ : HBinaryCall(context, function, argument_count),
+ known_function_(known_function) {
+ }
+
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
HValue* context() { return first(); }
HValue* function() { return second(); }
+ Handle<JSFunction> known_function() { return known_function_; }
DECLARE_CONCRETE_INSTRUCTION(InvokeFunction)
+
+ private:
+ Handle<JSFunction> known_function_;
};
@@ -1786,7 +1848,9 @@ class HCallRuntime: public HCall<1> {
class HJSArrayLength: public HTemplateInstruction<2> {
public:
- HJSArrayLength(HValue* value, HValue* typecheck) {
+ HJSArrayLength(HValue* value, HValue* typecheck,
+ HType type = HType::Tagged()) {
+ set_type(type);
// The length of an array is stored as a tagged value in the array
// object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number.
@@ -1810,7 +1874,7 @@ class HJSArrayLength: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength)
protected:
- virtual bool DataEquals(HValue* other) { return true; }
+ virtual bool DataEquals(HValue* other_raw) { return true; }
};
@@ -1865,6 +1929,8 @@ class HBitNot: public HUnaryOperation {
}
virtual HType CalculateInferredType();
+ virtual HValue* Canonicalize();
+
DECLARE_CONCRETE_INSTRUCTION(BitNot)
protected:
@@ -1887,6 +1953,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathAbs:
set_representation(Representation::Tagged());
SetFlag(kFlexibleRepresentation);
+ SetGVNFlag(kChangesNewSpacePromotion);
break;
case kMathSqrt:
case kMathPowHalf:
@@ -1895,6 +1962,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathCos:
case kMathTan:
set_representation(Representation::Double());
+ SetGVNFlag(kChangesNewSpacePromotion);
break;
default:
UNREACHABLE();
@@ -1935,15 +2003,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
}
}
- virtual HValue* Canonicalize() {
- // If the input is integer32 then we replace the floor instruction
- // with its inputs. This happens before the representation changes are
- // introduced.
- if (op() == kMathFloor) {
- if (value()->representation().IsInteger32()) return value();
- }
- return this;
- }
+ virtual HValue* Canonicalize();
BuiltinFunctionId op() const { return op_; }
const char* OpName() const;
@@ -2003,14 +2063,10 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
};
-class HCheckMap: public HTemplateInstruction<2> {
+class HCheckMaps: public HTemplateInstruction<2> {
public:
- HCheckMap(HValue* value,
- Handle<Map> map,
- HValue* typecheck = NULL,
- CompareMapMode mode = REQUIRE_EXACT_MAP)
- : map_(map),
- mode_(mode) {
+ HCheckMaps(HValue* value, Handle<Map> map, Zone* zone,
+ HValue* typecheck = NULL) {
SetOperandAt(0, value);
// If callers don't depend on a typecheck, they can pass in NULL. In that
// case we use a copy of the |value| argument as a dummy value.
@@ -2018,14 +2074,43 @@ class HCheckMap: public HTemplateInstruction<2> {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
- // If the map to check doesn't have the untransitioned elements, it must not
- // be hoisted above TransitionElements instructions.
- if (mode == REQUIRE_EXACT_MAP || !map->has_fast_smi_only_elements()) {
- SetGVNFlag(kDependsOnElementsKind);
+ SetGVNFlag(kDependsOnElementsKind);
+ map_set()->Add(map, zone);
+ }
+ HCheckMaps(HValue* value, SmallMapList* maps, Zone* zone) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, value);
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnElementsKind);
+ for (int i = 0; i < maps->length(); i++) {
+ map_set()->Add(maps->at(i), zone);
}
- has_element_transitions_ =
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL) != NULL ||
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL) != NULL;
+ map_set()->Sort();
+ }
+
+ static HCheckMaps* NewWithTransitions(HValue* object, Handle<Map> map,
+ Zone* zone) {
+ HCheckMaps* check_map = new(zone) HCheckMaps(object, map, zone);
+ SmallMapList* map_set = check_map->map_set();
+
+ // Since transitioned elements maps of the initial map don't fail the map
+ // check, the CheckMaps instruction doesn't need to depend on ElementsKinds.
+ check_map->ClearGVNFlag(kDependsOnElementsKind);
+
+ ElementsKind kind = map->elements_kind();
+ bool packed = IsFastPackedElementsKind(kind);
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ Map* transitioned_map =
+ map->LookupElementsTransitionMap(kind);
+ if (transitioned_map) {
+ map_set->Add(Handle<Map>(transitioned_map), zone);
+ }
+ };
+ map_set->Sort();
+ return check_map;
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -2035,25 +2120,23 @@ class HCheckMap: public HTemplateInstruction<2> {
virtual HType CalculateInferredType();
HValue* value() { return OperandAt(0); }
- Handle<Map> map() const { return map_; }
- CompareMapMode mode() const { return mode_; }
+ SmallMapList* map_set() { return &map_set_; }
- DECLARE_CONCRETE_INSTRUCTION(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps)
protected:
virtual bool DataEquals(HValue* other) {
- HCheckMap* b = HCheckMap::cast(other);
- // Two CheckMaps instructions are DataEqual if their maps are identical and
- // they have the same mode. The mode comparison can be ignored if the map
- // has no elements transitions.
- return map_.is_identical_to(b->map()) &&
- (b->mode() == mode() || !has_element_transitions_);
+ HCheckMaps* b = HCheckMaps::cast(other);
+ // Relies on the fact that map_set has been sorted before.
+ if (map_set()->length() != b->map_set()->length()) return false;
+ for (int i = 0; i < map_set()->length(); i++) {
+ if (!map_set()->at(i).is_identical_to(b->map_set()->at(i))) return false;
+ }
+ return true;
}
private:
- bool has_element_transitions_;
- Handle<Map> map_;
- CompareMapMode mode_;
+ SmallMapList map_set_;
};
@@ -2092,17 +2175,17 @@ class HCheckFunction: public HUnaryOperation {
class HCheckInstanceType: public HUnaryOperation {
public:
- static HCheckInstanceType* NewIsSpecObject(HValue* value) {
- return new HCheckInstanceType(value, IS_SPEC_OBJECT);
+ static HCheckInstanceType* NewIsSpecObject(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_SPEC_OBJECT);
}
- static HCheckInstanceType* NewIsJSArray(HValue* value) {
- return new HCheckInstanceType(value, IS_JS_ARRAY);
+ static HCheckInstanceType* NewIsJSArray(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_JS_ARRAY);
}
- static HCheckInstanceType* NewIsString(HValue* value) {
- return new HCheckInstanceType(value, IS_STRING);
+ static HCheckInstanceType* NewIsString(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_STRING);
}
- static HCheckInstanceType* NewIsSymbol(HValue* value) {
- return new HCheckInstanceType(value, IS_SYMBOL);
+ static HCheckInstanceType* NewIsSymbol(HValue* value, Zone* zone) {
+ return new(zone) HCheckInstanceType(value, IS_SYMBOL);
}
virtual void PrintDataTo(StringStream* stream);
@@ -2251,8 +2334,8 @@ class HCheckSmi: public HUnaryOperation {
class HPhi: public HValue {
public:
- explicit HPhi(int merged_index)
- : inputs_(2),
+ HPhi(int merged_index, Zone* zone)
+ : inputs_(2, zone),
merged_index_(merged_index),
phi_id_(-1),
is_live_(false),
@@ -2331,11 +2414,15 @@ class HPhi: public HValue {
bool AllOperandsConvertibleToInteger() {
for (int i = 0; i < OperandCount(); ++i) {
- if (!OperandAt(i)->IsConvertibleToInteger()) return false;
+ if (!OperandAt(i)->IsConvertibleToInteger()) {
+ return false;
+ }
}
return true;
}
+ void ResetInteger32Uses();
+
protected:
virtual void DeleteFromGraph();
virtual void InternalSetOperandAt(int index, HValue* value) {
@@ -2407,8 +2494,8 @@ class HConstant: public HTemplateInstruction<0> {
virtual void PrintDataTo(StringStream* stream);
virtual HType CalculateInferredType();
bool IsInteger() const { return handle_->IsSmi(); }
- HConstant* CopyToRepresentation(Representation r) const;
- HConstant* CopyToTruncatedInt32() const;
+ HConstant* CopyToRepresentation(Representation r, Zone* zone) const;
+ HConstant* CopyToTruncatedInt32(Zone* zone) const;
bool HasInteger32Value() const { return has_int32_value_; }
int32_t Integer32Value() const {
ASSERT(HasInteger32Value());
@@ -2485,6 +2572,7 @@ class HBinaryOperation: public HTemplateInstruction<3> {
if (IsCommutative() && left()->IsConstant()) return right();
return left();
}
+
HValue* MostConstantOperand() {
if (IsCommutative() && left()->IsConstant()) return left();
return right();
@@ -2549,7 +2637,7 @@ class HApplyArguments: public HTemplateInstruction<4> {
class HArgumentsElements: public HTemplateInstruction<0> {
public:
- HArgumentsElements() {
+ explicit HArgumentsElements(bool from_inlined) : from_inlined_(from_inlined) {
// The value produced by this instruction is a pointer into the stack
// that looks as if it was a smi because of alignment.
set_representation(Representation::Tagged());
@@ -2562,8 +2650,12 @@ class HArgumentsElements: public HTemplateInstruction<0> {
return Representation::None();
}
+ bool from_inlined() const { return from_inlined_; }
+
protected:
virtual bool DataEquals(HValue* other) { return true; }
+
+ bool from_inlined_;
};
@@ -2646,6 +2738,9 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
set_representation(Representation::Tagged());
SetFlag(kFlexibleRepresentation);
SetAllSideEffects();
+ observed_input_representation_[0] = Representation::Tagged();
+ observed_input_representation_[1] = Representation::None();
+ observed_input_representation_[2] = Representation::None();
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -2665,7 +2760,38 @@ class HBitwiseBinaryOperation: public HBinaryOperation {
virtual HType CalculateInferredType();
+ virtual Representation ObservedInputRepresentation(int index) {
+ return observed_input_representation_[index];
+ }
+
+ void InitializeObservedInputRepresentation(Representation r) {
+ observed_input_representation_[1] = r;
+ observed_input_representation_[2] = r;
+ }
+
DECLARE_ABSTRACT_INSTRUCTION(BitwiseBinaryOperation)
+
+ private:
+ Representation observed_input_representation_[3];
+};
+
+
+class HMathFloorOfDiv: public HBinaryOperation {
+ public:
+ HMathFloorOfDiv(HValue* context, HValue* left, HValue* right)
+ : HBinaryOperation(context, left, right) {
+ set_representation(Representation::Integer32());
+ SetFlag(kUseGVN);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Integer32();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathFloorOfDiv)
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
};
@@ -3083,6 +3209,7 @@ class HPower: public HTemplateInstruction<2> {
SetOperandAt(1, right);
set_representation(Representation::Double());
SetFlag(kUseGVN);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* left() { return OperandAt(0); }
@@ -3282,6 +3409,8 @@ class HBitwise: public HBitwiseBinaryOperation {
HValue* left,
HValue* right);
+ virtual void PrintDataTo(StringStream* stream);
+
DECLARE_CONCRETE_INSTRUCTION(Bitwise)
protected:
@@ -3529,6 +3658,12 @@ inline bool StoringValueNeedsWriteBarrier(HValue* value) {
}
+inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
+ HValue* new_space_dominator) {
+ return !object->IsAllocateObject() || (object != new_space_dominator);
+}
+
+
class HStoreGlobalCell: public HUnaryOperation {
public:
HStoreGlobalCell(HValue* value,
@@ -3759,7 +3894,8 @@ class HLoadNamedFieldPolymorphic: public HTemplateInstruction<2> {
HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
SmallMapList* types,
- Handle<String> name);
+ Handle<String> name,
+ Zone* zone);
HValue* context() { return OperandAt(0); }
HValue* object() { return OperandAt(1); }
@@ -3836,15 +3972,29 @@ class HLoadFunctionPrototype: public HUnaryOperation {
virtual bool DataEquals(HValue* other) { return true; }
};
-
-class HLoadKeyedFastElement: public HTemplateInstruction<2> {
+class ArrayInstructionInterface {
public:
- enum HoleCheckMode { PERFORM_HOLE_CHECK, OMIT_HOLE_CHECK };
+ virtual HValue* GetKey() = 0;
+ virtual void SetKey(HValue* key) = 0;
+ virtual void SetIndexOffset(uint32_t index_offset) = 0;
+ virtual bool IsDehoisted() = 0;
+ virtual void SetDehoisted(bool is_dehoisted) = 0;
+ virtual ~ArrayInstructionInterface() { };
+};
+class HLoadKeyedFastElement
+ : public HTemplateInstruction<2>, public ArrayInstructionInterface {
+ public:
HLoadKeyedFastElement(HValue* obj,
HValue* key,
- HoleCheckMode hole_check_mode = PERFORM_HOLE_CHECK)
- : hole_check_mode_(hole_check_mode) {
+ ElementsKind elements_kind = FAST_ELEMENTS)
+ : bit_field_(0) {
+ ASSERT(IsFastSmiOrObjectElementsKind(elements_kind));
+ bit_field_ = ElementsKindField::encode(elements_kind);
+ if (IsFastSmiElementsKind(elements_kind) &&
+ IsFastPackedElementsKind(elements_kind)) {
+ set_type(HType::Smi());
+ }
SetOperandAt(0, obj);
SetOperandAt(1, key);
set_representation(Representation::Tagged());
@@ -3854,6 +4004,19 @@ class HLoadKeyedFastElement: public HTemplateInstruction<2> {
HValue* object() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
+ uint32_t index_offset() { return IndexOffsetField::decode(bit_field_); }
+ void SetIndexOffset(uint32_t index_offset) {
+ bit_field_ = IndexOffsetField::update(bit_field_, index_offset);
+ }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return IsDehoistedField::decode(bit_field_); }
+ void SetDehoisted(bool is_dehoisted) {
+ bit_field_ = IsDehoistedField::update(bit_field_, is_dehoisted);
+ }
+ ElementsKind elements_kind() const {
+ return ElementsKindField::decode(bit_field_);
+ }
virtual Representation RequiredInputRepresentation(int index) {
// The key is supposed to be Integer32.
@@ -3872,17 +4035,32 @@ class HLoadKeyedFastElement: public HTemplateInstruction<2> {
virtual bool DataEquals(HValue* other) {
if (!other->IsLoadKeyedFastElement()) return false;
HLoadKeyedFastElement* other_load = HLoadKeyedFastElement::cast(other);
- return hole_check_mode_ == other_load->hole_check_mode_;
+ if (IsDehoisted() && index_offset() != other_load->index_offset())
+ return false;
+ return elements_kind() == other_load->elements_kind();
}
private:
- HoleCheckMode hole_check_mode_;
+ class ElementsKindField: public BitField<ElementsKind, 0, 4> {};
+ class IndexOffsetField: public BitField<uint32_t, 4, 27> {};
+ class IsDehoistedField: public BitField<bool, 31, 1> {};
+ uint32_t bit_field_;
};
-class HLoadKeyedFastDoubleElement: public HTemplateInstruction<2> {
+enum HoleCheckMode { PERFORM_HOLE_CHECK, OMIT_HOLE_CHECK };
+
+
+class HLoadKeyedFastDoubleElement
+ : public HTemplateInstruction<2>, public ArrayInstructionInterface {
public:
- HLoadKeyedFastDoubleElement(HValue* elements, HValue* key) {
+ HLoadKeyedFastDoubleElement(
+ HValue* elements,
+ HValue* key,
+ HoleCheckMode hole_check_mode = PERFORM_HOLE_CHECK)
+ : index_offset_(0),
+ is_dehoisted_(false),
+ hole_check_mode_(hole_check_mode) {
SetOperandAt(0, elements);
SetOperandAt(1, key);
set_representation(Representation::Double());
@@ -3892,6 +4070,12 @@ class HLoadKeyedFastDoubleElement: public HTemplateInstruction<2> {
HValue* elements() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
+ uint32_t index_offset() { return index_offset_; }
+ void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return is_dehoisted_; }
+ void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
virtual Representation RequiredInputRepresentation(int index) {
// The key is supposed to be Integer32.
@@ -3900,21 +4084,38 @@ class HLoadKeyedFastDoubleElement: public HTemplateInstruction<2> {
: Representation::Integer32();
}
+ bool RequiresHoleCheck() {
+ return hole_check_mode_ == PERFORM_HOLE_CHECK;
+ }
+
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastDoubleElement)
protected:
- virtual bool DataEquals(HValue* other) { return true; }
+ virtual bool DataEquals(HValue* other) {
+ if (!other->IsLoadKeyedFastDoubleElement()) return false;
+ HLoadKeyedFastDoubleElement* other_load =
+ HLoadKeyedFastDoubleElement::cast(other);
+ return hole_check_mode_ == other_load->hole_check_mode_;
+ }
+
+ private:
+ uint32_t index_offset_;
+ bool is_dehoisted_;
+ HoleCheckMode hole_check_mode_;
};
-class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
+class HLoadKeyedSpecializedArrayElement
+ : public HTemplateInstruction<2>, public ArrayInstructionInterface {
public:
HLoadKeyedSpecializedArrayElement(HValue* external_elements,
HValue* key,
ElementsKind elements_kind)
- : elements_kind_(elements_kind) {
+ : elements_kind_(elements_kind),
+ index_offset_(0),
+ is_dehoisted_(false) {
SetOperandAt(0, external_elements);
SetOperandAt(1, key);
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
@@ -3942,6 +4143,12 @@ class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
HValue* external_pointer() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
ElementsKind elements_kind() const { return elements_kind_; }
+ uint32_t index_offset() { return index_offset_; }
+ void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return is_dehoisted_; }
+ void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
virtual Range* InferRange(Zone* zone);
@@ -3957,6 +4164,8 @@ class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
private:
ElementsKind elements_kind_;
+ uint32_t index_offset_;
+ bool is_dehoisted_;
};
@@ -3995,9 +4204,12 @@ class HStoreNamedField: public HTemplateInstruction<2> {
int offset)
: name_(name),
is_in_object_(in_object),
- offset_(offset) {
+ offset_(offset),
+ new_space_dominator_(NULL) {
SetOperandAt(0, obj);
SetOperandAt(1, val);
+ SetFlag(kTrackSideEffectDominators);
+ SetGVNFlag(kDependsOnNewSpacePromotion);
if (is_in_object_) {
SetGVNFlag(kChangesInobjectFields);
} else {
@@ -4010,6 +4222,10 @@ class HStoreNamedField: public HTemplateInstruction<2> {
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
+ virtual void SetSideEffectDominator(GVNFlag side_effect, HValue* dominator) {
+ ASSERT(side_effect == kChangesNewSpacePromotion);
+ new_space_dominator_ = dominator;
+ }
virtual void PrintDataTo(StringStream* stream);
HValue* object() { return OperandAt(0); }
@@ -4020,9 +4236,15 @@ class HStoreNamedField: public HTemplateInstruction<2> {
int offset() const { return offset_; }
Handle<Map> transition() const { return transition_; }
void set_transition(Handle<Map> map) { transition_ = map; }
+ HValue* new_space_dominator() const { return new_space_dominator_; }
bool NeedsWriteBarrier() {
- return StoringValueNeedsWriteBarrier(value());
+ return StoringValueNeedsWriteBarrier(value()) &&
+ ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
+ }
+
+ bool NeedsWriteBarrierForMap() {
+ return ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator());
}
private:
@@ -4030,6 +4252,7 @@ class HStoreNamedField: public HTemplateInstruction<2> {
bool is_in_object_;
int offset_;
Handle<Map> transition_;
+ HValue* new_space_dominator_;
};
@@ -4068,11 +4291,12 @@ class HStoreNamedGeneric: public HTemplateInstruction<3> {
};
-class HStoreKeyedFastElement: public HTemplateInstruction<3> {
+class HStoreKeyedFastElement
+ : public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
HStoreKeyedFastElement(HValue* obj, HValue* key, HValue* val,
ElementsKind elements_kind = FAST_ELEMENTS)
- : elements_kind_(elements_kind) {
+ : elements_kind_(elements_kind), index_offset_(0), is_dehoisted_(false) {
SetOperandAt(0, obj);
SetOperandAt(1, key);
SetOperandAt(2, val);
@@ -4090,8 +4314,14 @@ class HStoreKeyedFastElement: public HTemplateInstruction<3> {
HValue* key() { return OperandAt(1); }
HValue* value() { return OperandAt(2); }
bool value_is_smi() {
- return elements_kind_ == FAST_SMI_ONLY_ELEMENTS;
+ return IsFastSmiElementsKind(elements_kind_);
}
+ uint32_t index_offset() { return index_offset_; }
+ void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return is_dehoisted_; }
+ void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
bool NeedsWriteBarrier() {
if (value_is_smi()) {
@@ -4107,14 +4337,18 @@ class HStoreKeyedFastElement: public HTemplateInstruction<3> {
private:
ElementsKind elements_kind_;
+ uint32_t index_offset_;
+ bool is_dehoisted_;
};
-class HStoreKeyedFastDoubleElement: public HTemplateInstruction<3> {
+class HStoreKeyedFastDoubleElement
+ : public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
HStoreKeyedFastDoubleElement(HValue* elements,
HValue* key,
- HValue* val) {
+ HValue* val)
+ : index_offset_(0), is_dehoisted_(false) {
SetOperandAt(0, elements);
SetOperandAt(1, key);
SetOperandAt(2, val);
@@ -4134,24 +4368,37 @@ class HStoreKeyedFastDoubleElement: public HTemplateInstruction<3> {
HValue* elements() { return OperandAt(0); }
HValue* key() { return OperandAt(1); }
HValue* value() { return OperandAt(2); }
+ uint32_t index_offset() { return index_offset_; }
+ void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return is_dehoisted_; }
+ void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
bool NeedsWriteBarrier() {
return StoringValueNeedsWriteBarrier(value());
}
+ bool NeedsCanonicalization();
+
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastDoubleElement)
+
+ private:
+ uint32_t index_offset_;
+ bool is_dehoisted_;
};
-class HStoreKeyedSpecializedArrayElement: public HTemplateInstruction<3> {
+class HStoreKeyedSpecializedArrayElement
+ : public HTemplateInstruction<3>, public ArrayInstructionInterface {
public:
HStoreKeyedSpecializedArrayElement(HValue* external_elements,
HValue* key,
HValue* val,
ElementsKind elements_kind)
- : elements_kind_(elements_kind) {
+ : elements_kind_(elements_kind), index_offset_(0), is_dehoisted_(false) {
SetGVNFlag(kChangesSpecializedArrayElements);
SetOperandAt(0, external_elements);
SetOperandAt(1, key);
@@ -4179,11 +4426,19 @@ class HStoreKeyedSpecializedArrayElement: public HTemplateInstruction<3> {
HValue* key() { return OperandAt(1); }
HValue* value() { return OperandAt(2); }
ElementsKind elements_kind() const { return elements_kind_; }
+ uint32_t index_offset() { return index_offset_; }
+ void SetIndexOffset(uint32_t index_offset) { index_offset_ = index_offset; }
+ HValue* GetKey() { return key(); }
+ void SetKey(HValue* key) { SetOperandAt(1, key); }
+ bool IsDehoisted() { return is_dehoisted_; }
+ void SetDehoisted(bool is_dehoisted) { is_dehoisted_ = is_dehoisted; }
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement)
private:
ElementsKind elements_kind_;
+ uint32_t index_offset_;
+ bool is_dehoisted_;
};
@@ -4230,8 +4485,19 @@ class HTransitionElementsKind: public HTemplateInstruction<1> {
transitioned_map_(transitioned_map) {
SetOperandAt(0, object);
SetFlag(kUseGVN);
+ // Don't set GVN DependOn flags here. That would defeat GVN's detection of
+ // congruent HTransitionElementsKind instructions. Instruction hoisting
+ // handles HTransitionElementsKind instruction specially, explicitly adding
+ // DependsOn flags during its dependency calculations.
SetGVNFlag(kChangesElementsKind);
- SetGVNFlag(kChangesElementsPointer);
+ if (original_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
+ if (transitioned_map->has_fast_double_elements()) {
+ SetGVNFlag(kChangesElementsPointer);
+ SetGVNFlag(kChangesNewSpacePromotion);
+ }
set_representation(Representation::Tagged());
}
@@ -4293,6 +4559,7 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -4324,6 +4591,7 @@ class HStringCharFromCode: public HTemplateInstruction<2> {
SetOperandAt(1, char_code);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -4376,8 +4644,12 @@ class HAllocateObject: public HTemplateInstruction<1> {
: constructor_(constructor) {
SetOperandAt(0, context);
set_representation(Representation::Tagged());
+ SetGVNFlag(kChangesNewSpacePromotion);
}
+ // Maximum instance size for which allocations will be inlined.
+ static const int kMaxSize = 64 * kPointerSize;
+
HValue* context() { return OperandAt(0); }
Handle<JSFunction> constructor() { return constructor_; }
@@ -4421,6 +4693,7 @@ class HFastLiteral: public HMaterializedLiteral<1> {
boilerplate_(boilerplate),
total_size_(total_size) {
SetOperandAt(0, context);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
// Maximum depth and total number of elements and properties for literal
@@ -4456,12 +4729,13 @@ class HArrayLiteral: public HMaterializedLiteral<1> {
length_(length),
boilerplate_object_(boilerplate_object) {
SetOperandAt(0, context);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* context() { return OperandAt(0); }
ElementsKind boilerplate_elements_kind() const {
if (!boilerplate_object_->IsJSObject()) {
- return FAST_ELEMENTS;
+ return TERMINAL_FAST_ELEMENTS_KIND;
}
return Handle<JSObject>::cast(boilerplate_object_)->GetElementsKind();
}
@@ -4496,6 +4770,7 @@ class HObjectLiteral: public HMaterializedLiteral<1> {
fast_elements_(fast_elements),
has_function_(has_function) {
SetOperandAt(0, context);
+ SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* context() { return OperandAt(0); }
@@ -4557,6 +4832,7 @@ class HFunctionLiteral: public HTemplateInstruction<1> {
: shared_info_(shared), pretenure_(pretenure) {
SetOperandAt(0, context);
set_representation(Representation::Tagged());
+ SetGVNFlag(kChangesNewSpacePromotion);
}
HValue* context() { return OperandAt(0); }
diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc
index fd7560a372..61488aff2a 100644
--- a/deps/v8/src/hydrogen.cc
+++ b/deps/v8/src/hydrogen.cc
@@ -55,19 +55,19 @@ namespace internal {
HBasicBlock::HBasicBlock(HGraph* graph)
: block_id_(graph->GetNextBlockID()),
graph_(graph),
- phis_(4),
+ phis_(4, graph->zone()),
first_(NULL),
last_(NULL),
end_(NULL),
loop_information_(NULL),
- predecessors_(2),
+ predecessors_(2, graph->zone()),
dominator_(NULL),
- dominated_blocks_(4),
+ dominated_blocks_(4, graph->zone()),
last_environment_(NULL),
argument_count_(-1),
first_instruction_index_(-1),
last_instruction_index_(-1),
- deleted_phis_(4),
+ deleted_phis_(4, graph->zone()),
parent_loop_header_(NULL),
is_inline_return_target_(false),
is_deoptimizing_(false),
@@ -76,7 +76,7 @@ HBasicBlock::HBasicBlock(HGraph* graph)
void HBasicBlock::AttachLoopInformation() {
ASSERT(!IsLoopHeader());
- loop_information_ = new(zone()) HLoopInformation(this);
+ loop_information_ = new(zone()) HLoopInformation(this, zone());
}
@@ -88,7 +88,7 @@ void HBasicBlock::DetachLoopInformation() {
void HBasicBlock::AddPhi(HPhi* phi) {
ASSERT(!IsStartBlock());
- phis_.Add(phi);
+ phis_.Add(phi, zone());
phi->SetBlock(this);
}
@@ -113,20 +113,20 @@ void HBasicBlock::AddInstruction(HInstruction* instr) {
first_ = last_ = entry;
}
instr->InsertAfter(last_);
- last_ = instr;
}
HDeoptimize* HBasicBlock::CreateDeoptimize(
HDeoptimize::UseEnvironment has_uses) {
ASSERT(HasEnvironment());
- if (has_uses == HDeoptimize::kNoUses) return new(zone()) HDeoptimize(0);
+ if (has_uses == HDeoptimize::kNoUses)
+ return new(zone()) HDeoptimize(0, zone());
HEnvironment* environment = last_environment();
- HDeoptimize* instr = new(zone()) HDeoptimize(environment->length());
+ HDeoptimize* instr = new(zone()) HDeoptimize(environment->length(), zone());
for (int i = 0; i < environment->length(); i++) {
HValue* val = environment->values()->at(i);
- instr->AddEnvironmentValue(val);
+ instr->AddEnvironmentValue(val, zone());
}
return instr;
@@ -142,7 +142,7 @@ HSimulate* HBasicBlock::CreateSimulate(int ast_id) {
int push_count = environment->push_count();
int pop_count = environment->pop_count();
- HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count);
+ HSimulate* instr = new(zone()) HSimulate(ast_id, pop_count, zone());
for (int i = push_count - 1; i >= 0; --i) {
instr->AddPushedValue(environment->ExpressionStackAt(i));
}
@@ -165,11 +165,15 @@ void HBasicBlock::Finish(HControlInstruction* end) {
}
-void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) {
+void HBasicBlock::Goto(HBasicBlock* block, FunctionState* state) {
+ bool drop_extra = state != NULL && state->drop_extra();
+ bool arguments_pushed = state != NULL && state->arguments_pushed();
+
if (block->IsInlineReturnTarget()) {
- AddInstruction(new(zone()) HLeaveInlined);
+ AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
last_environment_ = last_environment()->DiscardInlined(drop_extra);
}
+
AddSimulate(AstNode::kNoNumber);
HGoto* instr = new(zone()) HGoto(block);
Finish(instr);
@@ -178,10 +182,13 @@ void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) {
void HBasicBlock::AddLeaveInlined(HValue* return_value,
HBasicBlock* target,
- bool drop_extra) {
+ FunctionState* state) {
+ bool drop_extra = state != NULL && state->drop_extra();
+ bool arguments_pushed = state != NULL && state->arguments_pushed();
+
ASSERT(target->IsInlineReturnTarget());
ASSERT(return_value != NULL);
- AddInstruction(new(zone()) HLeaveInlined);
+ AddInstruction(new(zone()) HLeaveInlined(arguments_pushed));
last_environment_ = last_environment()->DiscardInlined(drop_extra);
last_environment()->Push(return_value);
AddSimulate(AstNode::kNoNumber);
@@ -272,7 +279,7 @@ void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
SetInitialEnvironment(pred->last_environment()->Copy());
}
- predecessors_.Add(pred);
+ predecessors_.Add(pred, zone());
}
@@ -285,7 +292,7 @@ void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
dominated_blocks_[index]->block_id() < block->block_id()) {
++index;
}
- dominated_blocks_.InsertAt(index, block);
+ dominated_blocks_.InsertAt(index, block, zone());
}
@@ -398,7 +405,7 @@ void HBasicBlock::Verify() {
void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
- this->back_edges_.Add(block);
+ this->back_edges_.Add(block, block->zone());
AddBlock(block);
}
@@ -424,7 +431,7 @@ void HLoopInformation::AddBlock(HBasicBlock* block) {
AddBlock(block->parent_loop_header());
} else {
block->set_parent_loop_header(loop_header());
- blocks_.Add(block);
+ blocks_.Add(block, block->zone());
for (int i = 0; i < block->predecessors()->length(); ++i) {
AddBlock(block->predecessors()->at(i));
}
@@ -445,8 +452,8 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
int block_count,
HBasicBlock* dont_visit)
: visited_count_(0),
- stack_(16),
- reachable_(block_count, ZONE),
+ stack_(16, entry_block->zone()),
+ reachable_(block_count, entry_block->zone()),
dont_visit_(dont_visit) {
PushBlock(entry_block);
Analyze();
@@ -460,7 +467,7 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
if (block != NULL && block != dont_visit_ &&
!reachable_.Contains(block->block_id())) {
reachable_.Add(block->block_id());
- stack_.Add(block);
+ stack_.Add(block, block->zone());
visited_count_++;
}
}
@@ -598,7 +605,8 @@ HConstant* HGraph::GetConstantHole() {
HGraphBuilder::HGraphBuilder(CompilationInfo* info,
- TypeFeedbackOracle* oracle)
+ TypeFeedbackOracle* oracle,
+ Zone* zone)
: function_state_(NULL),
initial_function_state_(this, info, oracle, NORMAL_RETURN),
ast_context_(NULL),
@@ -606,7 +614,8 @@ HGraphBuilder::HGraphBuilder(CompilationInfo* info,
graph_(NULL),
current_block_(NULL),
inlined_count_(0),
- zone_(info->isolate()->zone()),
+ globals_(10, zone),
+ zone_(zone),
inline_bailout_(false) {
// This is not initialized in the initializer list because the
// constructor for the initial state relies on function_state_ == NULL
@@ -665,22 +674,24 @@ void HBasicBlock::FinishExit(HControlInstruction* instruction) {
}
-HGraph::HGraph(CompilationInfo* info)
+HGraph::HGraph(CompilationInfo* info, Zone* zone)
: isolate_(info->isolate()),
next_block_id_(0),
entry_block_(NULL),
- blocks_(8),
- values_(16),
- phi_list_(NULL) {
+ blocks_(8, zone),
+ values_(16, zone),
+ phi_list_(NULL),
+ zone_(zone),
+ is_recursive_(false) {
start_environment_ =
- new(zone()) HEnvironment(NULL, info->scope(), info->closure());
+ new(zone) HEnvironment(NULL, info->scope(), info->closure(), zone);
start_environment_->set_ast_id(AstNode::kFunctionEntryId);
entry_block_ = CreateBasicBlock();
entry_block_->SetInitialEnvironment(start_environment_);
}
-Handle<Code> HGraph::Compile(CompilationInfo* info) {
+Handle<Code> HGraph::Compile(CompilationInfo* info, Zone* zone) {
int values = GetMaximumValueID();
if (values > LUnallocated::kMaxVirtualRegisters) {
if (FLAG_trace_bailout) {
@@ -701,7 +712,7 @@ Handle<Code> HGraph::Compile(CompilationInfo* info) {
}
MacroAssembler assembler(info->isolate(), NULL, 0);
- LCodeGen generator(chunk, &assembler, info);
+ LCodeGen generator(chunk, &assembler, info, zone);
chunk->MarkEmptyBlocks();
@@ -723,7 +734,7 @@ Handle<Code> HGraph::Compile(CompilationInfo* info) {
HBasicBlock* HGraph::CreateBasicBlock() {
HBasicBlock* result = new(zone()) HBasicBlock(this);
- blocks_.Add(result);
+ blocks_.Add(result, zone());
return result;
}
@@ -746,7 +757,7 @@ void HGraph::OrderBlocks() {
HPhase phase("H_Block ordering");
BitVector visited(blocks_.length(), zone());
- ZoneList<HBasicBlock*> reverse_result(8);
+ ZoneList<HBasicBlock*> reverse_result(8, zone());
HBasicBlock* start = blocks_[0];
Postorder(start, &visited, &reverse_result, NULL);
@@ -754,7 +765,7 @@ void HGraph::OrderBlocks() {
int index = 0;
for (int i = reverse_result.length() - 1; i >= 0; --i) {
HBasicBlock* b = reverse_result[i];
- blocks_.Add(b);
+ blocks_.Add(b, zone());
b->set_block_id(index++);
}
}
@@ -800,7 +811,7 @@ void HGraph::Postorder(HBasicBlock* block,
ASSERT(block->end()->SecondSuccessor() == NULL ||
order->Contains(block->end()->SecondSuccessor()) ||
block->end()->SecondSuccessor()->IsLoopHeader());
- order->Add(block);
+ order->Add(block, zone());
}
@@ -842,9 +853,9 @@ void HGraph::EliminateRedundantPhis() {
// Worklist of phis that can potentially be eliminated. Initialized with
// all phi nodes. When elimination of a phi node modifies another phi node
// the modified phi node is added to the worklist.
- ZoneList<HPhi*> worklist(blocks_.length());
+ ZoneList<HPhi*> worklist(blocks_.length(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
- worklist.AddAll(*blocks_[i]->phis());
+ worklist.AddAll(*blocks_[i]->phis(), zone());
}
while (!worklist.is_empty()) {
@@ -862,7 +873,7 @@ void HGraph::EliminateRedundantPhis() {
for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
HValue* value = it.value();
value->SetOperandAt(it.index(), replacement);
- if (value->IsPhi()) worklist.Add(HPhi::cast(value));
+ if (value->IsPhi()) worklist.Add(HPhi::cast(value), zone());
}
block->RemovePhi(phi);
}
@@ -874,18 +885,18 @@ void HGraph::EliminateUnreachablePhis() {
HPhase phase("H_Unreachable phi elimination", this);
// Initialize worklist.
- ZoneList<HPhi*> phi_list(blocks_.length());
- ZoneList<HPhi*> worklist(blocks_.length());
+ ZoneList<HPhi*> phi_list(blocks_.length(), zone());
+ ZoneList<HPhi*> worklist(blocks_.length(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
for (int j = 0; j < blocks_[i]->phis()->length(); j++) {
HPhi* phi = blocks_[i]->phis()->at(j);
- phi_list.Add(phi);
+ phi_list.Add(phi, zone());
// We can't eliminate phis in the receiver position in the environment
// because in case of throwing an error we need this value to
// construct a stack trace.
if (phi->HasRealUses() || phi->IsReceiver()) {
phi->set_is_live(true);
- worklist.Add(phi);
+ worklist.Add(phi, zone());
}
}
}
@@ -897,7 +908,7 @@ void HGraph::EliminateUnreachablePhis() {
HValue* operand = phi->OperandAt(i);
if (operand->IsPhi() && !HPhi::cast(operand)->is_live()) {
HPhi::cast(operand)->set_is_live(true);
- worklist.Add(HPhi::cast(operand));
+ worklist.Add(HPhi::cast(operand), zone());
}
}
}
@@ -944,11 +955,11 @@ bool HGraph::CheckConstPhiUses() {
void HGraph::CollectPhis() {
int block_count = blocks_.length();
- phi_list_ = new ZoneList<HPhi*>(block_count);
+ phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
for (int i = 0; i < block_count; ++i) {
for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
HPhi* phi = blocks_[i]->phis()->at(j);
- phi_list_->Add(phi);
+ phi_list_->Add(phi, zone());
}
}
}
@@ -969,7 +980,7 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
HValue* use = it.value();
if (!in_worklist.Contains(use->id())) {
in_worklist.Add(use->id());
- worklist->Add(use);
+ worklist->Add(use, zone());
}
}
}
@@ -980,7 +991,7 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
class HRangeAnalysis BASE_EMBEDDED {
public:
explicit HRangeAnalysis(HGraph* graph) :
- graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { }
+ graph_(graph), zone_(graph->zone()), changed_ranges_(16, zone_) { }
void Analyze();
@@ -1125,7 +1136,7 @@ void HRangeAnalysis::RollBackTo(int index) {
void HRangeAnalysis::AddRange(HValue* value, Range* range) {
Range* original_range = value->range();
value->AddNewRange(range, zone_);
- changed_ranges_.Add(value);
+ changed_ranges_.Add(value, zone_);
Range* new_range = value->range();
TraceRange("Updated range of %d set to [%d,%d]\n",
value->id(),
@@ -1143,14 +1154,39 @@ void HRangeAnalysis::AddRange(HValue* value, Range* range) {
void TraceGVN(const char* msg, ...) {
- if (FLAG_trace_gvn) {
- va_list arguments;
- va_start(arguments, msg);
- OS::VPrint(msg, arguments);
- va_end(arguments);
- }
+ va_list arguments;
+ va_start(arguments, msg);
+ OS::VPrint(msg, arguments);
+ va_end(arguments);
}
+// Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
+// --trace-gvn is off.
+#define TRACE_GVN_1(msg, a1) \
+ if (FLAG_trace_gvn) { \
+ TraceGVN(msg, a1); \
+ }
+
+#define TRACE_GVN_2(msg, a1, a2) \
+ if (FLAG_trace_gvn) { \
+ TraceGVN(msg, a1, a2); \
+ }
+
+#define TRACE_GVN_3(msg, a1, a2, a3) \
+ if (FLAG_trace_gvn) { \
+ TraceGVN(msg, a1, a2, a3); \
+ }
+
+#define TRACE_GVN_4(msg, a1, a2, a3, a4) \
+ if (FLAG_trace_gvn) { \
+ TraceGVN(msg, a1, a2, a3, a4); \
+ }
+
+#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
+ if (FLAG_trace_gvn) { \
+ TraceGVN(msg, a1, a2, a3, a4, a5); \
+ }
+
HValueMap::HValueMap(Zone* zone, const HValueMap* other)
: array_size_(other->array_size_),
@@ -1228,18 +1264,18 @@ HValue* HValueMap::Lookup(HValue* value) const {
}
-void HValueMap::Resize(int new_size) {
+void HValueMap::Resize(int new_size, Zone* zone) {
ASSERT(new_size > count_);
// Hashing the values into the new array has no more collisions than in the
// old hash map, so we can use the existing lists_ array, if we are careful.
// Make sure we have at least one free element.
if (free_list_head_ == kNil) {
- ResizeLists(lists_size_ << 1);
+ ResizeLists(lists_size_ << 1, zone);
}
HValueMapListElement* new_array =
- ZONE->NewArray<HValueMapListElement>(new_size);
+ zone->NewArray<HValueMapListElement>(new_size);
memset(new_array, 0, sizeof(HValueMapListElement) * new_size);
HValueMapListElement* old_array = array_;
@@ -1257,14 +1293,14 @@ void HValueMap::Resize(int new_size) {
if (old_array[i].value != NULL) {
int current = old_array[i].next;
while (current != kNil) {
- Insert(lists_[current].value);
+ Insert(lists_[current].value, zone);
int next = lists_[current].next;
lists_[current].next = free_list_head_;
free_list_head_ = current;
current = next;
}
// Rehash the directly stored value.
- Insert(old_array[i].value);
+ Insert(old_array[i].value, zone);
}
}
}
@@ -1273,11 +1309,11 @@ void HValueMap::Resize(int new_size) {
}
-void HValueMap::ResizeLists(int new_size) {
+void HValueMap::ResizeLists(int new_size, Zone* zone) {
ASSERT(new_size > lists_size_);
HValueMapListElement* new_lists =
- ZONE->NewArray<HValueMapListElement>(new_size);
+ zone->NewArray<HValueMapListElement>(new_size);
memset(new_lists, 0, sizeof(HValueMapListElement) * new_size);
HValueMapListElement* old_lists = lists_;
@@ -1296,10 +1332,10 @@ void HValueMap::ResizeLists(int new_size) {
}
-void HValueMap::Insert(HValue* value) {
+void HValueMap::Insert(HValue* value, Zone* zone) {
ASSERT(value != NULL);
// Resizing when half of the hashtable is filled up.
- if (count_ >= array_size_ >> 1) Resize(array_size_ << 1);
+ if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
ASSERT(count_ < array_size_);
count_++;
uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode()));
@@ -1308,7 +1344,7 @@ void HValueMap::Insert(HValue* value) {
array_[pos].next = kNil;
} else {
if (free_list_head_ == kNil) {
- ResizeLists(lists_size_ << 1);
+ ResizeLists(lists_size_ << 1, zone);
}
int new_element_pos = free_list_head_;
ASSERT(new_element_pos != kNil);
@@ -1321,6 +1357,45 @@ void HValueMap::Insert(HValue* value) {
}
+HSideEffectMap::HSideEffectMap() : count_(0) {
+ memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
+}
+
+
+HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
+ *this = *other; // Calls operator=.
+}
+
+
+HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) {
+ if (this != &other) {
+ memcpy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
+ }
+ return *this;
+}
+
+void HSideEffectMap::Kill(GVNFlagSet flags) {
+ for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
+ GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
+ if (flags.Contains(changes_flag)) {
+ if (data_[i] != NULL) count_--;
+ data_[i] = NULL;
+ }
+ }
+}
+
+
+void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) {
+ for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
+ GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
+ if (flags.Contains(changes_flag)) {
+ if (data_[i] == NULL) count_++;
+ data_[i] = instr;
+ }
+ }
+}
+
+
class HStackCheckEliminator BASE_EMBEDDED {
public:
explicit HStackCheckEliminator(HGraph* graph) : graph_(graph) { }
@@ -1409,12 +1484,14 @@ class HGlobalValueNumberer BASE_EMBEDDED {
: graph_(graph),
info_(info),
removed_side_effects_(false),
- block_side_effects_(graph->blocks()->length()),
- loop_side_effects_(graph->blocks()->length()),
+ block_side_effects_(graph->blocks()->length(), graph->zone()),
+ loop_side_effects_(graph->blocks()->length(), graph->zone()),
visited_on_paths_(graph->zone(), graph->blocks()->length()) {
ASSERT(info->isolate()->heap()->allow_allocation(false));
- block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
- loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
+ block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
+ graph_->zone());
+ loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(),
+ graph_->zone());
}
~HGlobalValueNumberer() {
ASSERT(!info_->isolate()->heap()->allow_allocation(true));
@@ -1427,7 +1504,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
HBasicBlock* dominator,
HBasicBlock* dominated);
- void AnalyzeBlock(HBasicBlock* block, HValueMap* map);
+ void AnalyzeGraph();
void ComputeBlockSideEffects();
void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block,
@@ -1440,7 +1517,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
HGraph* graph() { return graph_; }
CompilationInfo* info() { return info_; }
- Zone* zone() { return graph_->zone(); }
+ Zone* zone() const { return graph_->zone(); }
HGraph* graph_;
CompilationInfo* info_;
@@ -1464,8 +1541,7 @@ bool HGlobalValueNumberer::Analyze() {
if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion();
}
- HValueMap* map = new(zone()) HValueMap();
- AnalyzeBlock(graph_->entry_block(), map);
+ AnalyzeGraph();
return removed_side_effects_;
}
@@ -1510,14 +1586,100 @@ void HGlobalValueNumberer::ComputeBlockSideEffects() {
}
+SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) {
+ char underlying_buffer[kLastFlag * 128];
+ Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer));
+#if DEBUG
+ int offset = 0;
+ const char* separator = "";
+ const char* comma = ", ";
+ buffer[0] = 0;
+ uint32_t set_depends_on = 0;
+ uint32_t set_changes = 0;
+ for (int bit = 0; bit < kLastFlag; ++bit) {
+ if ((flags.ToIntegral() & (1 << bit)) != 0) {
+ if (bit % 2 == 0) {
+ set_changes++;
+ } else {
+ set_depends_on++;
+ }
+ }
+ }
+ bool positive_changes = set_changes < (kLastFlag / 2);
+ bool positive_depends_on = set_depends_on < (kLastFlag / 2);
+ if (set_changes > 0) {
+ if (positive_changes) {
+ offset += OS::SNPrintF(buffer + offset, "changes [");
+ } else {
+ offset += OS::SNPrintF(buffer + offset, "changes all except [");
+ }
+ for (int bit = 0; bit < kLastFlag; ++bit) {
+ if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) {
+ switch (static_cast<GVNFlag>(bit)) {
+#define DECLARE_FLAG(type) \
+ case kChanges##type: \
+ offset += OS::SNPrintF(buffer + offset, separator); \
+ offset += OS::SNPrintF(buffer + offset, #type); \
+ separator = comma; \
+ break;
+GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
+GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
+#undef DECLARE_FLAG
+ default:
+ break;
+ }
+ }
+ }
+ offset += OS::SNPrintF(buffer + offset, "]");
+ }
+ if (set_depends_on > 0) {
+ separator = "";
+ if (set_changes > 0) {
+ offset += OS::SNPrintF(buffer + offset, ", ");
+ }
+ if (positive_depends_on) {
+ offset += OS::SNPrintF(buffer + offset, "depends on [");
+ } else {
+ offset += OS::SNPrintF(buffer + offset, "depends on all except [");
+ }
+ for (int bit = 0; bit < kLastFlag; ++bit) {
+ if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) {
+ switch (static_cast<GVNFlag>(bit)) {
+#define DECLARE_FLAG(type) \
+ case kDependsOn##type: \
+ offset += OS::SNPrintF(buffer + offset, separator); \
+ offset += OS::SNPrintF(buffer + offset, #type); \
+ separator = comma; \
+ break;
+GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
+GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
+#undef DECLARE_FLAG
+ default:
+ break;
+ }
+ }
+ }
+ offset += OS::SNPrintF(buffer + offset, "]");
+ }
+#else
+ OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral());
+#endif
+ size_t string_len = strlen(underlying_buffer) + 1;
+ ASSERT(string_len <= sizeof(underlying_buffer));
+ char* result = new char[strlen(underlying_buffer) + 1];
+ memcpy(result, underlying_buffer, string_len);
+ return SmartArrayPointer<char>(result);
+}
+
+
void HGlobalValueNumberer::LoopInvariantCodeMotion() {
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
HBasicBlock* block = graph_->blocks()->at(i);
if (block->IsLoopHeader()) {
GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
- TraceGVN("Try loop invariant motion for block B%d effects=0x%x\n",
- block->block_id(),
- side_effects.ToIntegral());
+ TRACE_GVN_2("Try loop invariant motion for block B%d %s\n",
+ block->block_id(),
+ *GetGVNFlagsString(side_effects));
GVNFlagSet accumulated_first_time_depends;
GVNFlagSet accumulated_first_time_changes;
@@ -1540,51 +1702,54 @@ void HGlobalValueNumberer::ProcessLoopBlock(
GVNFlagSet* first_time_changes) {
HBasicBlock* pre_header = loop_header->predecessors()->at(0);
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
- TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
- block->block_id(),
- depends_flags.ToIntegral());
+ TRACE_GVN_2("Loop invariant motion for B%d %s\n",
+ block->block_id(),
+ *GetGVNFlagsString(depends_flags));
HInstruction* instr = block->first();
while (instr != NULL) {
HInstruction* next = instr->next();
bool hoisted = false;
if (instr->CheckFlag(HValue::kUseGVN)) {
- TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, "
- "loop kills 0x%X\n",
- instr->id(),
- instr->Mnemonic(),
- instr->gvn_flags().ToIntegral(),
- depends_flags.ToIntegral());
+ TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n",
+ instr->id(),
+ instr->Mnemonic(),
+ *GetGVNFlagsString(instr->gvn_flags()),
+ *GetGVNFlagsString(loop_kills));
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
if (instr->IsTransitionElementsKind()) {
// It's possible to hoist transitions out of a loop as long as the
- // hoisting wouldn't move the transition past a DependsOn of one of it's
- // changes or any instructions that might change an objects map or
- // elements contents.
- GVNFlagSet changes = instr->ChangesFlags();
+ // hoisting wouldn't move the transition past an instruction that has a
+ // DependsOn flag for anything it changes.
GVNFlagSet hoist_depends_blockers =
- HValue::ConvertChangesToDependsFlags(changes);
- // In addition to not hoisting transitions above other instructions that
- // change dependencies that the transition changes, it must not be
- // hoisted above map changes and stores to an elements backing store
- // that the transition might change.
- GVNFlagSet hoist_change_blockers = changes;
- hoist_change_blockers.Add(kChangesMaps);
+ HValue::ConvertChangesToDependsFlags(instr->ChangesFlags());
+
+ // In addition, the transition must not be hoisted above elements kind
+ // changes, or if the transition is destructive to the elements buffer,
+ // changes to array pointer or array contents.
+ GVNFlagSet hoist_change_blockers;
+ hoist_change_blockers.Add(kChangesElementsKind);
HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr);
if (trans->original_map()->has_fast_double_elements()) {
+ hoist_change_blockers.Add(kChangesElementsPointer);
hoist_change_blockers.Add(kChangesDoubleArrayElements);
}
if (trans->transitioned_map()->has_fast_double_elements()) {
+ hoist_change_blockers.Add(kChangesElementsPointer);
hoist_change_blockers.Add(kChangesArrayElements);
}
- TraceGVN("Checking dependencies on HTransitionElementsKind %d (%s) "
- "hoist depends blockers 0x%X, hoist change blockers 0x%X, "
- "accumulated depends 0x%X, accumulated changes 0x%X\n",
- instr->id(),
- instr->Mnemonic(),
- hoist_depends_blockers.ToIntegral(),
- hoist_change_blockers.ToIntegral(),
- first_time_depends->ToIntegral(),
- first_time_changes->ToIntegral());
+ if (FLAG_trace_gvn) {
+ GVNFlagSet hoist_blockers = hoist_depends_blockers;
+ hoist_blockers.Add(hoist_change_blockers);
+ GVNFlagSet first_time = *first_time_changes;
+ first_time.Add(*first_time_depends);
+ TRACE_GVN_4("Checking dependencies on HTransitionElementsKind "
+ "%d (%s) hoist blockers: %s; "
+ "first-time accumulated: %s\n",
+ instr->id(),
+ instr->Mnemonic(),
+ *GetGVNFlagsString(hoist_blockers),
+ *GetGVNFlagsString(first_time));
+ }
// It's possible to hoist transition from the current loop loop only if
// they dominate all of the successor blocks in the same loop and there
// are not any instructions that have Changes/DependsOn that intervene
@@ -1607,7 +1772,7 @@ void HGlobalValueNumberer::ProcessLoopBlock(
}
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
- TraceGVN("Hoisting loop invariant instruction %d\n", instr->id());
+ TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id());
// Move the instruction out of the loop.
instr->Unlink();
instr->InsertBefore(pre_header->end());
@@ -1619,8 +1784,18 @@ void HGlobalValueNumberer::ProcessLoopBlock(
if (!hoisted) {
// If an instruction is not hoisted, we have to account for its side
// effects when hoisting later HTransitionElementsKind instructions.
+ GVNFlagSet previous_depends = *first_time_depends;
+ GVNFlagSet previous_changes = *first_time_changes;
first_time_depends->Add(instr->DependsOnFlags());
first_time_changes->Add(instr->ChangesFlags());
+ if (!(previous_depends == *first_time_depends)) {
+ TRACE_GVN_1("Updated first-time accumulated %s\n",
+ *GetGVNFlagsString(*first_time_depends));
+ }
+ if (!(previous_changes == *first_time_changes)) {
+ TRACE_GVN_1("Updated first-time accumulated %s\n",
+ *GetGVNFlagsString(*first_time_changes));
+ }
}
instr = next;
}
@@ -1660,65 +1835,220 @@ GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
}
-void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block, HValueMap* map) {
- TraceGVN("Analyzing block B%d%s\n",
- block->block_id(),
- block->IsLoopHeader() ? " (loop header)" : "");
+// Each instance of this class is like a "stack frame" for the recursive
+// traversal of the dominator tree done during GVN (the stack is handled
+// as a double linked list).
+// We reuse frames when possible so the list length is limited by the depth
+// of the dominator tree but this forces us to initialize each frame calling
+// an explicit "Initialize" method instead of a using constructor.
+class GvnBasicBlockState: public ZoneObject {
+ public:
+ static GvnBasicBlockState* CreateEntry(Zone* zone,
+ HBasicBlock* entry_block,
+ HValueMap* entry_map) {
+ return new(zone)
+ GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
+ }
+
+ HBasicBlock* block() { return block_; }
+ HValueMap* map() { return map_; }
+ HSideEffectMap* dominators() { return &dominators_; }
+
+ GvnBasicBlockState* next_in_dominator_tree_traversal(
+ Zone* zone,
+ HBasicBlock** dominator) {
+ // This assignment needs to happen before calling next_dominated() because
+ // that call can reuse "this" if we are at the last dominated block.
+ *dominator = block();
+ GvnBasicBlockState* result = next_dominated(zone);
+ if (result == NULL) {
+ GvnBasicBlockState* dominator_state = pop();
+ if (dominator_state != NULL) {
+ // This branch is guaranteed not to return NULL because pop() never
+ // returns a state where "is_done() == true".
+ *dominator = dominator_state->block();
+ result = dominator_state->next_dominated(zone);
+ } else {
+ // Unnecessary (we are returning NULL) but done for cleanness.
+ *dominator = NULL;
+ }
+ }
+ return result;
+ }
- // If this is a loop header kill everything killed by the loop.
- if (block->IsLoopHeader()) {
- map->Kill(loop_side_effects_[block->block_id()]);
+ private:
+ void Initialize(HBasicBlock* block,
+ HValueMap* map,
+ HSideEffectMap* dominators,
+ bool copy_map,
+ Zone* zone) {
+ block_ = block;
+ map_ = copy_map ? map->Copy(zone) : map;
+ dominated_index_ = -1;
+ length_ = block->dominated_blocks()->length();
+ if (dominators != NULL) {
+ dominators_ = *dominators;
+ }
+ }
+ bool is_done() { return dominated_index_ >= length_; }
+
+ GvnBasicBlockState(GvnBasicBlockState* previous,
+ HBasicBlock* block,
+ HValueMap* map,
+ HSideEffectMap* dominators,
+ Zone* zone)
+ : previous_(previous), next_(NULL) {
+ Initialize(block, map, dominators, true, zone);
+ }
+
+ GvnBasicBlockState* next_dominated(Zone* zone) {
+ dominated_index_++;
+ if (dominated_index_ == length_ - 1) {
+ // No need to copy the map for the last child in the dominator tree.
+ Initialize(block_->dominated_blocks()->at(dominated_index_),
+ map(),
+ dominators(),
+ false,
+ zone);
+ return this;
+ } else if (dominated_index_ < length_) {
+ return push(zone,
+ block_->dominated_blocks()->at(dominated_index_),
+ dominators());
+ } else {
+ return NULL;
+ }
}
- // Go through all instructions of the current block.
- HInstruction* instr = block->first();
- while (instr != NULL) {
- HInstruction* next = instr->next();
- GVNFlagSet flags = instr->ChangesFlags();
- if (!flags.IsEmpty()) {
- // Clear all instructions in the map that are affected by side effects.
- map->Kill(flags);
- TraceGVN("Instruction %d kills\n", instr->id());
+ GvnBasicBlockState* push(Zone* zone,
+ HBasicBlock* block,
+ HSideEffectMap* dominators) {
+ if (next_ == NULL) {
+ next_ =
+ new(zone) GvnBasicBlockState(this, block, map(), dominators, zone);
+ } else {
+ next_->Initialize(block, map(), dominators, true, zone);
}
- if (instr->CheckFlag(HValue::kUseGVN)) {
- ASSERT(!instr->HasObservableSideEffects());
- HValue* other = map->Lookup(instr);
- if (other != NULL) {
- ASSERT(instr->Equals(other) && other->Equals(instr));
- TraceGVN("Replacing value %d (%s) with value %d (%s)\n",
- instr->id(),
- instr->Mnemonic(),
- other->id(),
- other->Mnemonic());
- if (instr->HasSideEffects()) removed_side_effects_ = true;
- instr->DeleteAndReplaceWith(other);
- } else {
- map->Add(instr);
- }
+ return next_;
+ }
+ GvnBasicBlockState* pop() {
+ GvnBasicBlockState* result = previous_;
+ while (result != NULL && result->is_done()) {
+ TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
+ block()->block_id(),
+ previous_->block()->block_id())
+ result = result->previous_;
}
- instr = next;
+ return result;
}
- // Recursively continue analysis for all immediately dominated blocks.
- int length = block->dominated_blocks()->length();
- for (int i = 0; i < length; ++i) {
- HBasicBlock* dominated = block->dominated_blocks()->at(i);
- // No need to copy the map for the last child in the dominator tree.
- HValueMap* successor_map = (i == length - 1) ? map : map->Copy(zone());
+ GvnBasicBlockState* previous_;
+ GvnBasicBlockState* next_;
+ HBasicBlock* block_;
+ HValueMap* map_;
+ HSideEffectMap dominators_;
+ int dominated_index_;
+ int length_;
+};
+
+// This is a recursive traversal of the dominator tree but it has been turned
+// into a loop to avoid stack overflows.
+// The logical "stack frames" of the recursion are kept in a list of
+// GvnBasicBlockState instances.
+void HGlobalValueNumberer::AnalyzeGraph() {
+ HBasicBlock* entry_block = graph_->entry_block();
+ HValueMap* entry_map = new(zone()) HValueMap(zone());
+ GvnBasicBlockState* current =
+ GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
- // Kill everything killed on any path between this block and the
- // dominated block.
- // We don't have to traverse these paths if the value map is
- // already empty.
- // If the range of block ids (block_id, dominated_id) is empty
- // there are no such paths.
- if (!successor_map->IsEmpty() &&
- block->block_id() + 1 < dominated->block_id()) {
- visited_on_paths_.Clear();
- successor_map->Kill(CollectSideEffectsOnPathsToDominatedBlock(block,
- dominated));
+ while (current != NULL) {
+ HBasicBlock* block = current->block();
+ HValueMap* map = current->map();
+ HSideEffectMap* dominators = current->dominators();
+
+ TRACE_GVN_2("Analyzing block B%d%s\n",
+ block->block_id(),
+ block->IsLoopHeader() ? " (loop header)" : "");
+
+ // If this is a loop header kill everything killed by the loop.
+ if (block->IsLoopHeader()) {
+ map->Kill(loop_side_effects_[block->block_id()]);
}
- AnalyzeBlock(dominated, successor_map);
+
+ // Go through all instructions of the current block.
+ HInstruction* instr = block->first();
+ while (instr != NULL) {
+ HInstruction* next = instr->next();
+ GVNFlagSet flags = instr->ChangesFlags();
+ if (!flags.IsEmpty()) {
+ // Clear all instructions in the map that are affected by side effects.
+ // Store instruction as the dominating one for tracked side effects.
+ map->Kill(flags);
+ dominators->Store(flags, instr);
+ TRACE_GVN_2("Instruction %d %s\n", instr->id(),
+ *GetGVNFlagsString(flags));
+ }
+ if (instr->CheckFlag(HValue::kUseGVN)) {
+ ASSERT(!instr->HasObservableSideEffects());
+ HValue* other = map->Lookup(instr);
+ if (other != NULL) {
+ ASSERT(instr->Equals(other) && other->Equals(instr));
+ TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n",
+ instr->id(),
+ instr->Mnemonic(),
+ other->id(),
+ other->Mnemonic());
+ if (instr->HasSideEffects()) removed_side_effects_ = true;
+ instr->DeleteAndReplaceWith(other);
+ } else {
+ map->Add(instr, zone());
+ }
+ }
+ if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
+ for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
+ HValue* other = dominators->at(i);
+ GVNFlag changes_flag = HValue::ChangesFlagFromInt(i);
+ GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i);
+ if (instr->DependsOnFlags().Contains(depends_on_flag) &&
+ (other != NULL)) {
+ TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
+ i,
+ instr->id(),
+ instr->Mnemonic(),
+ other->id(),
+ other->Mnemonic());
+ instr->SetSideEffectDominator(changes_flag, other);
+ }
+ }
+ }
+ instr = next;
+ }
+
+ HBasicBlock* dominator_block;
+ GvnBasicBlockState* next =
+ current->next_in_dominator_tree_traversal(zone(), &dominator_block);
+
+ if (next != NULL) {
+ HBasicBlock* dominated = next->block();
+ HValueMap* successor_map = next->map();
+ HSideEffectMap* successor_dominators = next->dominators();
+
+ // Kill everything killed on any path between this block and the
+ // dominated block. We don't have to traverse these paths if the
+ // value map and the dominators list is already empty. If the range
+ // of block ids (block_id, dominated_id) is empty there are no such
+ // paths.
+ if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
+ dominator_block->block_id() + 1 < dominated->block_id()) {
+ visited_on_paths_.Clear();
+ GVNFlagSet side_effects_on_all_paths =
+ CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
+ dominated);
+ successor_map->Kill(side_effects_on_all_paths);
+ successor_dominators->Kill(side_effects_on_all_paths);
+ }
+ }
+ current = next;
}
}
@@ -1727,7 +2057,7 @@ class HInferRepresentation BASE_EMBEDDED {
public:
explicit HInferRepresentation(HGraph* graph)
: graph_(graph),
- worklist_(8),
+ worklist_(8, graph->zone()),
in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
void Analyze();
@@ -1739,7 +2069,7 @@ class HInferRepresentation BASE_EMBEDDED {
void AddDependantsToWorklist(HValue* current);
void InferBasedOnUses(HValue* current);
- Zone* zone() { return graph_->zone(); }
+ Zone* zone() const { return graph_->zone(); }
HGraph* graph_;
ZoneList<HValue*> worklist_;
@@ -1751,7 +2081,7 @@ void HInferRepresentation::AddToWorklist(HValue* current) {
if (current->representation().IsSpecialization()) return;
if (!current->CheckFlag(HValue::kFlexibleRepresentation)) return;
if (in_worklist_.Contains(current->id())) return;
- worklist_.Add(current);
+ worklist_.Add(current, zone());
in_worklist_.Add(current->id());
}
@@ -1818,8 +2148,16 @@ Representation HInferRepresentation::TryChange(HValue* value) {
for (HUseIterator it(value->uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
- Representation rep = use->RequiredInputRepresentation(it.index());
+ Representation rep = use->ObservedInputRepresentation(it.index());
if (rep.IsNone()) continue;
+ if (FLAG_trace_representation) {
+ PrintF("%d %s is used by %d %s as %s\n",
+ value->id(),
+ value->Mnemonic(),
+ use->id(),
+ use->Mnemonic(),
+ rep.Mnemonic());
+ }
if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
use_count[rep.kind()] += use->LoopWeight();
}
@@ -1854,12 +2192,12 @@ void HInferRepresentation::Analyze() {
// bit-vector of length <number of phis>.
const ZoneList<HPhi*>* phi_list = graph_->phi_list();
int phi_count = phi_list->length();
- ZoneList<BitVector*> connected_phis(phi_count);
+ ZoneList<BitVector*> connected_phis(phi_count, graph_->zone());
for (int i = 0; i < phi_count; ++i) {
phi_list->at(i)->InitRealUses(i);
BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
connected_set->Add(i);
- connected_phis.Add(connected_set);
+ connected_phis.Add(connected_set, zone());
}
// (2) Do a fixed point iteration to find the set of connected phis. A
@@ -1883,21 +2221,34 @@ void HInferRepresentation::Analyze() {
}
}
- // (3) Use the phi reachability information from step 2 to
- // (a) sum up the non-phi use counts of all connected phis.
- // (b) push information about values which can't be converted to integer
- // without deoptimization through the phi use-def chains, avoiding
- // unnecessary deoptimizations later.
+ // (3a) Use the phi reachability information from step 2 to
+ // push information about values which can't be converted to integer
+ // without deoptimization through the phi use-def chains, avoiding
+ // unnecessary deoptimizations later.
for (int i = 0; i < phi_count; ++i) {
HPhi* phi = phi_list->at(i);
bool cti = phi->AllOperandsConvertibleToInteger();
+ if (cti) continue;
+
+ for (BitVector::Iterator it(connected_phis.at(i));
+ !it.Done();
+ it.Advance()) {
+ HPhi* phi = phi_list->at(it.Current());
+ phi->set_is_convertible_to_integer(false);
+ phi->ResetInteger32Uses();
+ }
+ }
+
+ // (3b) Use the phi reachability information from step 2 to
+ // sum up the non-phi use counts of all connected phis.
+ for (int i = 0; i < phi_count; ++i) {
+ HPhi* phi = phi_list->at(i);
for (BitVector::Iterator it(connected_phis.at(i));
!it.Done();
it.Advance()) {
int index = it.Current();
- HPhi* it_use = phi_list->at(it.Current());
- if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice!
- if (!cti) it_use->set_is_convertible_to_integer(false);
+ HPhi* it_use = phi_list->at(index);
+ if (index != i) phi->AddNonPhiUsesFrom(it_use); // Don't count twice.
}
}
@@ -1955,9 +2306,9 @@ void HGraph::InitializeInferredTypes(int from_inclusive, int to_inclusive) {
i = last_back_edge->block_id();
// Update phis of the loop header now after the whole loop body is
// guaranteed to be processed.
- ZoneList<HValue*> worklist(block->phis()->length());
+ ZoneList<HValue*> worklist(block->phis()->length(), zone());
for (int j = 0; j < block->phis()->length(); ++j) {
- worklist.Add(block->phis()->at(j));
+ worklist.Add(block->phis()->at(j), zone());
}
InferTypes(&worklist);
}
@@ -2024,8 +2375,8 @@ void HGraph::InsertRepresentationChangeForUse(HValue* value,
HConstant* constant = HConstant::cast(value);
// Try to create a new copy of the constant with the new representation.
new_value = is_truncating
- ? constant->CopyToTruncatedInt32()
- : constant->CopyToRepresentation(to);
+ ? constant->CopyToTruncatedInt32(zone())
+ : constant->CopyToRepresentation(to, zone());
}
if (new_value == NULL) {
@@ -2178,6 +2529,8 @@ FunctionState::FunctionState(HGraphBuilder* owner,
return_handling_(return_handling),
function_return_(NULL),
test_context_(NULL),
+ entry_(NULL),
+ arguments_elements_(NULL),
outer_(owner->function_state()) {
if (outer_ != NULL) {
// State for an inline function.
@@ -2337,8 +2690,8 @@ void TestContext::ReturnControl(HControlInstruction* instr, int ast_id) {
instr->SetSuccessorAt(0, empty_true);
instr->SetSuccessorAt(1, empty_false);
owner()->current_block()->Finish(instr);
- empty_true->Goto(if_true(), owner()->function_state()->drop_extra());
- empty_false->Goto(if_false(), owner()->function_state()->drop_extra());
+ empty_true->Goto(if_true(), owner()->function_state());
+ empty_false->Goto(if_false(), owner()->function_state());
owner()->set_current_block(NULL);
}
@@ -2359,8 +2712,8 @@ void TestContext::BuildBranch(HValue* value) {
HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
builder->current_block()->Finish(test);
- empty_true->Goto(if_true(), owner()->function_state()->drop_extra());
- empty_false->Goto(if_false(), owner()->function_state()->drop_extra());
+ empty_true->Goto(if_true(), owner()->function_state());
+ empty_false->Goto(if_false(), owner()->function_state());
builder->set_current_block(NULL);
}
@@ -2442,7 +2795,7 @@ void HGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs) {
HGraph* HGraphBuilder::CreateGraph() {
- graph_ = new(zone()) HGraph(info());
+ graph_ = new(zone()) HGraph(info(), zone());
if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
{
@@ -2484,7 +2837,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Handle implicit declaration of the function name in named function
// expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) {
- HandleDeclaration(scope->function(), CONST, NULL, NULL);
+ VisitVariableDeclaration(scope->function());
}
VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId);
@@ -2565,27 +2918,413 @@ HGraph* HGraphBuilder::CreateGraph() {
HStackCheckEliminator sce(graph());
sce.Process();
- // Replace the results of check instructions with the original value, if the
- // result is used. This is safe now, since we don't do code motion after this
- // point. It enables better register allocation since the value produced by
- // check instructions is really a copy of the original value.
- graph()->ReplaceCheckedValues();
+ graph()->EliminateRedundantBoundsChecks();
+ graph()->DehoistSimpleArrayIndexComputations();
return graph();
}
-void HGraph::ReplaceCheckedValues() {
- HPhase phase("H_Replace checked values", this);
+// We try to "factor up" HBoundsCheck instructions towards the root of the
+// dominator tree.
+// For now we handle checks where the index is like "exp + int32value".
+// If in the dominator tree we check "exp + v1" and later (dominated)
+// "exp + v2", if v2 <= v1 we can safely remove the second check, and if
+// v2 > v1 we can use v2 in the 1st check and again remove the second.
+// To do so we keep a dictionary of all checks where the key if the pair
+// "exp, length".
+// The class BoundsCheckKey represents this key.
+class BoundsCheckKey : public ZoneObject {
+ public:
+ HValue* IndexBase() const { return index_base_; }
+ HValue* Length() const { return length_; }
+
+ uint32_t Hash() {
+ return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
+ }
+
+ static BoundsCheckKey* Create(Zone* zone,
+ HBoundsCheck* check,
+ int32_t* offset) {
+ HValue* index_base = NULL;
+ HConstant* constant = NULL;
+ bool is_sub = false;
+
+ if (check->index()->IsAdd()) {
+ HAdd* index = HAdd::cast(check->index());
+ if (index->left()->IsConstant()) {
+ constant = HConstant::cast(index->left());
+ index_base = index->right();
+ } else if (index->right()->IsConstant()) {
+ constant = HConstant::cast(index->right());
+ index_base = index->left();
+ }
+ } else if (check->index()->IsSub()) {
+ HSub* index = HSub::cast(check->index());
+ is_sub = true;
+ if (index->left()->IsConstant()) {
+ constant = HConstant::cast(index->left());
+ index_base = index->right();
+ } else if (index->right()->IsConstant()) {
+ constant = HConstant::cast(index->right());
+ index_base = index->left();
+ }
+ }
+
+ if (constant != NULL && constant->HasInteger32Value()) {
+ *offset = is_sub ? - constant->Integer32Value()
+ : constant->Integer32Value();
+ } else {
+ *offset = 0;
+ index_base = check->index();
+ }
+
+ return new(zone) BoundsCheckKey(index_base, check->length());
+ }
+
+ private:
+ BoundsCheckKey(HValue* index_base, HValue* length)
+ : index_base_(index_base),
+ length_(length) { }
+
+ HValue* index_base_;
+ HValue* length_;
+};
+
+
+// Data about each HBoundsCheck that can be eliminated or moved.
+// It is the "value" in the dictionary indexed by "base-index, length"
+// (the key is BoundsCheckKey).
+// We scan the code with a dominator tree traversal.
+// Traversing the dominator tree we keep a stack (implemented as a singly
+// linked list) of "data" for each basic block that contains a relevant check
+// with the same key (the dictionary holds the head of the list).
+// We also keep all the "data" created for a given basic block in a list, and
+// use it to "clean up" the dictionary when backtracking in the dominator tree
+// traversal.
+// Doing this each dictionary entry always directly points to the check that
+// is dominating the code being examined now.
+// We also track the current "offset" of the index expression and use it to
+// decide if any check is already "covered" (so it can be removed) or not.
+class BoundsCheckBbData: public ZoneObject {
+ public:
+ BoundsCheckKey* Key() const { return key_; }
+ int32_t LowerOffset() const { return lower_offset_; }
+ int32_t UpperOffset() const { return upper_offset_; }
+ HBasicBlock* BasicBlock() const { return basic_block_; }
+ HBoundsCheck* Check() const { return check_; }
+ BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
+ BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
+
+ bool OffsetIsCovered(int32_t offset) const {
+ return offset >= LowerOffset() && offset <= UpperOffset();
+ }
+
+ // This method removes new_check and modifies the current check so that it
+ // also "covers" what new_check covered.
+ // The obvious precondition is that new_check follows Check() in the
+ // same basic block, and that new_offset is not covered (otherwise we
+ // could simply remove new_check).
+ // As a consequence LowerOffset() or UpperOffset() change (the covered
+ // range grows).
+ //
+ // In the general case the check covering the current range should be like
+ // these two checks:
+ // 0 <= Key()->IndexBase() + LowerOffset()
+ // Key()->IndexBase() + UpperOffset() < Key()->Length()
+ //
+ // We can transform the second check like this:
+ // Key()->IndexBase() + LowerOffset() <
+ // Key()->Length() + (LowerOffset() - UpperOffset())
+ // so we can handle both checks with a single unsigned comparison.
+ //
+ // The bulk of this method changes Check()->index() and Check()->length()
+ // replacing them with new HAdd instructions to perform the transformation
+ // described above.
+ void CoverCheck(HBoundsCheck* new_check,
+ int32_t new_offset) {
+ ASSERT(new_check->index()->representation().IsInteger32());
+
+ if (new_offset > upper_offset_) {
+ upper_offset_ = new_offset;
+ } else if (new_offset < lower_offset_) {
+ lower_offset_ = new_offset;
+ } else {
+ ASSERT(false);
+ }
+
+ BuildOffsetAdd(&added_index_,
+ &added_index_offset_,
+ Key()->IndexBase(),
+ new_check->index()->representation(),
+ lower_offset_);
+ Check()->SetOperandAt(0, added_index_);
+ BuildOffsetAdd(&added_length_,
+ &added_length_offset_,
+ Key()->Length(),
+ new_check->length()->representation(),
+ lower_offset_ - upper_offset_);
+ Check()->SetOperandAt(1, added_length_);
+
+ new_check->DeleteAndReplaceWith(NULL);
+ }
+
+ void RemoveZeroOperations() {
+ RemoveZeroAdd(&added_index_, &added_index_offset_);
+ RemoveZeroAdd(&added_length_, &added_length_offset_);
+ }
+
+ BoundsCheckBbData(BoundsCheckKey* key,
+ int32_t lower_offset,
+ int32_t upper_offset,
+ HBasicBlock* bb,
+ HBoundsCheck* check,
+ BoundsCheckBbData* next_in_bb,
+ BoundsCheckBbData* father_in_dt)
+ : key_(key),
+ lower_offset_(lower_offset),
+ upper_offset_(upper_offset),
+ basic_block_(bb),
+ check_(check),
+ added_index_offset_(NULL),
+ added_index_(NULL),
+ added_length_offset_(NULL),
+ added_length_(NULL),
+ next_in_bb_(next_in_bb),
+ father_in_dt_(father_in_dt) { }
+
+ private:
+ BoundsCheckKey* key_;
+ int32_t lower_offset_;
+ int32_t upper_offset_;
+ HBasicBlock* basic_block_;
+ HBoundsCheck* check_;
+ HConstant* added_index_offset_;
+ HAdd* added_index_;
+ HConstant* added_length_offset_;
+ HAdd* added_length_;
+ BoundsCheckBbData* next_in_bb_;
+ BoundsCheckBbData* father_in_dt_;
+
+ void BuildOffsetAdd(HAdd** add,
+ HConstant** constant,
+ HValue* original_value,
+ Representation representation,
+ int32_t new_offset) {
+ HConstant* new_constant = new(BasicBlock()->zone())
+ HConstant(Handle<Object>(Smi::FromInt(new_offset)),
+ Representation::Integer32());
+ if (*add == NULL) {
+ new_constant->InsertBefore(Check());
+ *add = new(BasicBlock()->zone()) HAdd(NULL,
+ original_value,
+ new_constant);
+ (*add)->AssumeRepresentation(representation);
+ (*add)->InsertBefore(Check());
+ } else {
+ new_constant->InsertBefore(*add);
+ (*constant)->DeleteAndReplaceWith(new_constant);
+ }
+ *constant = new_constant;
+ }
+
+ void RemoveZeroAdd(HAdd** add, HConstant** constant) {
+ if (*add != NULL && (*constant)->Integer32Value() == 0) {
+ (*add)->DeleteAndReplaceWith((*add)->left());
+ (*constant)->DeleteAndReplaceWith(NULL);
+ }
+ }
+};
+
+
+static bool BoundsCheckKeyMatch(void* key1, void* key2) {
+ BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
+ BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
+ return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
+}
+
+
+class BoundsCheckTable : private ZoneHashMap {
+ public:
+ BoundsCheckBbData** LookupOrInsert(BoundsCheckKey* key, Zone* zone) {
+ return reinterpret_cast<BoundsCheckBbData**>(
+ &(Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value));
+ }
+
+ void Insert(BoundsCheckKey* key, BoundsCheckBbData* data, Zone* zone) {
+ Lookup(key, key->Hash(), true, ZoneAllocationPolicy(zone))->value = data;
+ }
+
+ void Delete(BoundsCheckKey* key) {
+ Remove(key, key->Hash());
+ }
+
+ explicit BoundsCheckTable(Zone* zone)
+ : ZoneHashMap(BoundsCheckKeyMatch, ZoneHashMap::kDefaultHashMapCapacity,
+ ZoneAllocationPolicy(zone)) { }
+};
+
+
+// Eliminates checks in bb and recursively in the dominated blocks.
+// Also replace the results of check instructions with the original value, if
+// the result is used. This is safe now, since we don't do code motion after
+// this point. It enables better register allocation since the value produced
+// by check instructions is really a copy of the original value.
+void HGraph::EliminateRedundantBoundsChecks(HBasicBlock* bb,
+ BoundsCheckTable* table) {
+ BoundsCheckBbData* bb_data_list = NULL;
+
+ for (HInstruction* i = bb->first(); i != NULL; i = i->next()) {
+ if (!i->IsBoundsCheck()) continue;
+
+ HBoundsCheck* check = HBoundsCheck::cast(i);
+ check->ReplaceAllUsesWith(check->index());
+
+ if (!FLAG_array_bounds_checks_elimination) continue;
+
+ int32_t offset;
+ BoundsCheckKey* key =
+ BoundsCheckKey::Create(zone(), check, &offset);
+ BoundsCheckBbData** data_p = table->LookupOrInsert(key, zone());
+ BoundsCheckBbData* data = *data_p;
+ if (data == NULL) {
+ bb_data_list = new(zone()) BoundsCheckBbData(key,
+ offset,
+ offset,
+ bb,
+ check,
+ bb_data_list,
+ NULL);
+ *data_p = bb_data_list;
+ } else if (data->OffsetIsCovered(offset)) {
+ check->DeleteAndReplaceWith(NULL);
+ } else if (data->BasicBlock() == bb) {
+ data->CoverCheck(check, offset);
+ } else {
+ int32_t new_lower_offset = offset < data->LowerOffset()
+ ? offset
+ : data->LowerOffset();
+ int32_t new_upper_offset = offset > data->UpperOffset()
+ ? offset
+ : data->UpperOffset();
+ bb_data_list = new(zone()) BoundsCheckBbData(key,
+ new_lower_offset,
+ new_upper_offset,
+ bb,
+ check,
+ bb_data_list,
+ data);
+ table->Insert(key, bb_data_list, zone());
+ }
+ }
+
+ for (int i = 0; i < bb->dominated_blocks()->length(); ++i) {
+ EliminateRedundantBoundsChecks(bb->dominated_blocks()->at(i), table);
+ }
+
+ for (BoundsCheckBbData* data = bb_data_list;
+ data != NULL;
+ data = data->NextInBasicBlock()) {
+ data->RemoveZeroOperations();
+ if (data->FatherInDominatorTree()) {
+ table->Insert(data->Key(), data->FatherInDominatorTree(), zone());
+ } else {
+ table->Delete(data->Key());
+ }
+ }
+}
+
+
+void HGraph::EliminateRedundantBoundsChecks() {
+ HPhase phase("H_Eliminate bounds checks", this);
+ AssertNoAllocation no_gc;
+ BoundsCheckTable checks_table(zone());
+ EliminateRedundantBoundsChecks(entry_block(), &checks_table);
+}
+
+
+static void DehoistArrayIndex(ArrayInstructionInterface* array_operation) {
+ HValue* index = array_operation->GetKey();
+
+ HConstant* constant;
+ HValue* subexpression;
+ int32_t sign;
+ if (index->IsAdd()) {
+ sign = 1;
+ HAdd* add = HAdd::cast(index);
+ if (add->left()->IsConstant()) {
+ subexpression = add->right();
+ constant = HConstant::cast(add->left());
+ } else if (add->right()->IsConstant()) {
+ subexpression = add->left();
+ constant = HConstant::cast(add->right());
+ } else {
+ return;
+ }
+ } else if (index->IsSub()) {
+ sign = -1;
+ HSub* sub = HSub::cast(index);
+ if (sub->left()->IsConstant()) {
+ subexpression = sub->right();
+ constant = HConstant::cast(sub->left());
+ } else if (sub->right()->IsConstant()) {
+ subexpression = sub->left();
+ constant = HConstant::cast(sub->right());
+ } return;
+ } else {
+ return;
+ }
+
+ if (!constant->HasInteger32Value()) return;
+ int32_t value = constant->Integer32Value() * sign;
+ // We limit offset values to 30 bits because we want to avoid the risk of
+ // overflows when the offset is added to the object header size.
+ if (value >= 1 << 30 || value < 0) return;
+ array_operation->SetKey(subexpression);
+ if (index->HasNoUses()) {
+ index->DeleteAndReplaceWith(NULL);
+ }
+ ASSERT(value >= 0);
+ array_operation->SetIndexOffset(static_cast<uint32_t>(value));
+ array_operation->SetDehoisted(true);
+}
+
+
+void HGraph::DehoistSimpleArrayIndexComputations() {
+ if (!FLAG_array_index_dehoisting) return;
+
+ HPhase phase("H_Dehoist index computations", this);
for (int i = 0; i < blocks()->length(); ++i) {
- HInstruction* instr = blocks()->at(i)->first();
- while (instr != NULL) {
- if (instr->IsBoundsCheck()) {
- // Replace all uses of the checked value with the original input.
- ASSERT(instr->UseCount() > 0);
- instr->ReplaceAllUsesWith(HBoundsCheck::cast(instr)->index());
+ for (HInstruction* instr = blocks()->at(i)->first();
+ instr != NULL;
+ instr = instr->next()) {
+ ArrayInstructionInterface* array_instruction = NULL;
+ if (instr->IsLoadKeyedFastElement()) {
+ HLoadKeyedFastElement* op = HLoadKeyedFastElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else if (instr->IsLoadKeyedFastDoubleElement()) {
+ HLoadKeyedFastDoubleElement* op =
+ HLoadKeyedFastDoubleElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else if (instr->IsLoadKeyedSpecializedArrayElement()) {
+ HLoadKeyedSpecializedArrayElement* op =
+ HLoadKeyedSpecializedArrayElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else if (instr->IsStoreKeyedFastElement()) {
+ HStoreKeyedFastElement* op = HStoreKeyedFastElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else if (instr->IsStoreKeyedFastDoubleElement()) {
+ HStoreKeyedFastDoubleElement* op =
+ HStoreKeyedFastDoubleElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else if (instr->IsStoreKeyedSpecializedArrayElement()) {
+ HStoreKeyedSpecializedArrayElement* op =
+ HStoreKeyedSpecializedArrayElement::cast(instr);
+ array_instruction = static_cast<ArrayInstructionInterface*>(op);
+ } else {
+ continue;
}
- instr = instr->next();
+ DehoistArrayIndex(array_instruction);
}
}
}
@@ -2619,9 +3358,9 @@ void HGraphBuilder::PushAndAdd(HInstruction* instr) {
template <class Instruction>
HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
int count = call->argument_count();
- ZoneList<HValue*> arguments(count);
+ ZoneList<HValue*> arguments(count, zone());
for (int i = 0; i < count; ++i) {
- arguments.Add(Pop());
+ arguments.Add(Pop(), zone());
}
while (!arguments.is_empty()) {
@@ -2701,7 +3440,7 @@ void HGraphBuilder::VisitBlock(Block* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- if (stmt->block_scope() != NULL) {
+ if (stmt->scope() != NULL) {
return Bailout("ScopedBlock");
}
BreakAndContinueInfo break_info(stmt);
@@ -2855,10 +3594,10 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
if (context->IsTest()) {
TestContext* test = TestContext::cast(context);
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(test->if_true(), function_state()->drop_extra());
+ current_block()->Goto(test->if_true(), function_state());
} else if (context->IsEffect()) {
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(function_return(), function_state()->drop_extra());
+ current_block()->Goto(function_return(), function_state());
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
@@ -2875,10 +3614,10 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
current_block()->Finish(typecheck);
if_spec_object->AddLeaveInlined(return_value,
function_return(),
- function_state()->drop_extra());
+ function_state());
not_spec_object->AddLeaveInlined(receiver,
function_return(),
- function_state()->drop_extra());
+ function_state());
}
} else {
// Return from an inlined function, visit the subexpression in the
@@ -2890,14 +3629,14 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
test->if_false());
} else if (context->IsEffect()) {
CHECK_ALIVE(VisitForEffect(stmt->expression()));
- current_block()->Goto(function_return(), function_state()->drop_extra());
+ current_block()->Goto(function_return(), function_state());
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
HValue* return_value = Pop();
current_block()->AddLeaveInlined(return_value,
function_return(),
- function_state()->drop_extra());
+ function_state());
}
}
set_current_block(NULL);
@@ -3121,13 +3860,13 @@ bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
int first_expression_index = environment()->first_expression_index();
int length = environment()->length();
ZoneList<HUnknownOSRValue*>* osr_values =
- new(zone()) ZoneList<HUnknownOSRValue*>(length);
+ new(zone()) ZoneList<HUnknownOSRValue*>(length, zone());
for (int i = 0; i < first_expression_index; ++i) {
HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
AddInstruction(osr_value);
environment()->Bind(i, osr_value);
- osr_values->Add(osr_value);
+ osr_values->Add(osr_value, zone());
}
if (first_expression_index != length) {
@@ -3136,7 +3875,7 @@ bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
AddInstruction(osr_value);
environment()->Push(osr_value);
- osr_values->Add(osr_value);
+ osr_values->Add(osr_value, zone());
}
}
@@ -3389,8 +4128,7 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
HValue* key = AddInstruction(
new(zone()) HLoadKeyedFastElement(
environment()->ExpressionStackAt(2), // Enum cache.
- environment()->ExpressionStackAt(0), // Iteration index.
- HLoadKeyedFastElement::OMIT_HOLE_CHECK));
+ environment()->ExpressionStackAt(0))); // Iteration index.
// Check if the expected map still matches that of the enumerable.
// If not just deoptimize.
@@ -3681,11 +4419,12 @@ static bool IsFastLiteral(Handle<JSObject> boilerplate,
elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
if (boilerplate->HasFastDoubleElements()) {
*total_size += FixedDoubleArray::SizeFor(elements->length());
- } else if (boilerplate->HasFastElements()) {
+ } else if (boilerplate->HasFastObjectElements()) {
+ Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
int length = elements->length();
for (int i = 0; i < length; i++) {
if ((*max_properties)-- == 0) return false;
- Handle<Object> value = JSObject::GetElement(boilerplate, i);
+ Handle<Object> value(fast_elements->get(i));
if (value->IsJSObject()) {
Handle<JSObject> value_object = Handle<JSObject>::cast(value);
if (!IsFastLiteral(value_object,
@@ -3763,7 +4502,7 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
// of the property values and is the value of the entire expression.
PushAndAdd(literal);
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
@@ -3782,7 +4521,8 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
property->RecordTypeFeedback(oracle());
CHECK_ALIVE(VisitForValue(value));
HValue* value = Pop();
- HInstruction* store = BuildStoreNamed(literal, value, property);
+ HInstruction* store;
+ CHECK_ALIVE(store = BuildStoreNamed(literal, value, property));
AddInstruction(store);
if (store->HasObservableSideEffects()) AddSimulate(key->id());
} else {
@@ -3887,11 +4627,13 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
Representation::Integer32()));
switch (boilerplate_elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
// Smi-only arrays need a smi check.
AddInstruction(new(zone()) HCheckSmi(value));
// Fall through.
case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
AddInstruction(new(zone()) HStoreKeyedFastElement(
elements,
key,
@@ -3899,6 +4641,7 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
boilerplate_elements_kind));
break;
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
key,
value));
@@ -3914,21 +4657,22 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
}
-// Sets the lookup result and returns true if the store can be inlined.
-static bool ComputeStoredField(Handle<Map> type,
- Handle<String> name,
- LookupResult* lookup) {
+// Sets the lookup result and returns true if the load/store can be inlined.
+static bool ComputeLoadStoreField(Handle<Map> type,
+ Handle<String> name,
+ LookupResult* lookup,
+ bool is_store) {
type->LookupInDescriptors(NULL, *name, lookup);
if (!lookup->IsFound()) return false;
if (lookup->type() == FIELD) return true;
- return (lookup->type() == MAP_TRANSITION) &&
+ return is_store && (lookup->type() == MAP_TRANSITION) &&
(type->unused_property_fields() > 0);
}
-static int ComputeStoredFieldIndex(Handle<Map> type,
- Handle<String> name,
- LookupResult* lookup) {
+static int ComputeLoadStoreFieldIndex(Handle<Map> type,
+ Handle<String> name,
+ LookupResult* lookup) {
ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
if (lookup->type() == FIELD) {
return lookup->GetLocalFieldIndexFromMap(*type);
@@ -3945,13 +4689,40 @@ HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
Handle<Map> type,
LookupResult* lookup,
bool smi_and_map_check) {
+ ASSERT(lookup->IsFound());
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(new(zone()) HCheckMap(object, type, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(object, type, zone()));
+ }
+
+ // If the property does not exist yet, we have to check that it wasn't made
+ // readonly or turned into a setter by some meanwhile modifications on the
+ // prototype chain.
+ if (!lookup->IsProperty()) {
+ Object* proto = type->prototype();
+ // First check that the prototype chain isn't affected already.
+ LookupResult proto_result(isolate());
+ proto->Lookup(*name, &proto_result);
+ if (proto_result.IsProperty()) {
+ // If the inherited property could induce readonly-ness, bail out.
+ if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
+ Bailout("improper object on prototype chain for store");
+ return NULL;
+ }
+ // We only need to check up to the preexisting property.
+ proto = proto_result.holder();
+ } else {
+ // Otherwise, find the top prototype.
+ while (proto->GetPrototype()->IsJSObject()) proto = proto->GetPrototype();
+ ASSERT(proto->GetPrototype()->IsNull());
+ }
+ ASSERT(proto->IsJSObject());
+ AddInstruction(new(zone()) HCheckPrototypeMaps(
+ Handle<JSObject>(JSObject::cast(type->prototype())),
+ Handle<JSObject>(JSObject::cast(proto))));
}
- int index = ComputeStoredFieldIndex(type, name, lookup);
+ int index = ComputeLoadStoreFieldIndex(type, name, lookup);
bool is_in_object = index < 0;
int offset = index * kPointerSize;
if (index < 0) {
@@ -3997,7 +4768,7 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
LookupResult lookup(isolate());
Handle<Map> type = prop->GetReceiverType();
bool is_monomorphic = prop->IsMonomorphic() &&
- ComputeStoredField(type, name, &lookup);
+ ComputeLoadStoreField(type, name, &lookup, true);
return is_monomorphic
? BuildStoreNamedField(object, name, value, type, &lookup,
@@ -4019,7 +4790,7 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
LookupResult lookup(isolate());
SmallMapList* types = expr->GetReceiverTypes();
bool is_monomorphic = expr->IsMonomorphic() &&
- ComputeStoredField(types->first(), name, &lookup);
+ ComputeLoadStoreField(types->first(), name, &lookup, true);
return is_monomorphic
? BuildStoreNamedField(object, name, value, types->first(), &lookup,
@@ -4028,6 +4799,60 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
}
+void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
+ HValue* object,
+ SmallMapList* types,
+ Handle<String> name) {
+ int count = 0;
+ int previous_field_offset = 0;
+ bool previous_field_is_in_object = false;
+ bool is_monomorphic_field = true;
+ Handle<Map> map;
+ LookupResult lookup(isolate());
+ for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
+ map = types->at(i);
+ if (ComputeLoadStoreField(map, name, &lookup, false)) {
+ int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
+ bool is_in_object = index < 0;
+ int offset = index * kPointerSize;
+ if (index < 0) {
+ // Negative property indices are in-object properties, indexed
+ // from the end of the fixed part of the object.
+ offset += map->instance_size();
+ } else {
+ offset += FixedArray::kHeaderSize;
+ }
+ if (count == 0) {
+ previous_field_offset = offset;
+ previous_field_is_in_object = is_in_object;
+ } else if (is_monomorphic_field) {
+ is_monomorphic_field = (offset == previous_field_offset) &&
+ (is_in_object == previous_field_is_in_object);
+ }
+ ++count;
+ }
+ }
+
+ // Use monomorphic load if property lookup results in the same field index
+ // for all maps. Requires special map check on the set of all handled maps.
+ HInstruction* instr;
+ if (count == types->length() && is_monomorphic_field) {
+ AddInstruction(new(zone()) HCheckMaps(object, types, zone()));
+ instr = BuildLoadNamedField(object, expr, map, &lookup, false);
+ } else {
+ HValue* context = environment()->LookupContext();
+ instr = new(zone()) HLoadNamedFieldPolymorphic(context,
+ object,
+ types,
+ name,
+ zone());
+ }
+
+ instr->set_position(expr->position());
+ return ast_context()->ReturnInstruction(instr, expr->id());
+}
+
+
void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
@@ -4041,7 +4866,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
Handle<Map> map = types->at(i);
LookupResult lookup(isolate());
- if (ComputeStoredField(map, name, &lookup)) {
+ if (ComputeLoadStoreField(map, name, &lookup, true)) {
if (count == 0) {
AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once.
join = graph()->CreateBasicBlock();
@@ -4054,8 +4879,9 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
current_block()->Finish(compare);
set_current_block(if_true);
- HInstruction* instr =
- BuildStoreNamedField(object, name, value, map, &lookup, false);
+ HInstruction* instr;
+ CHECK_ALIVE(instr =
+ BuildStoreNamedField(object, name, value, map, &lookup, false));
instr->set_position(expr->position());
// Goto will add the HSimulate for the store.
AddInstruction(instr);
@@ -4106,7 +4932,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
CHECK_ALIVE(VisitForValue(prop->obj()));
HValue* value = NULL;
@@ -4123,10 +4949,8 @@ void HGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
ASSERT(!name.is_null());
SmallMapList* types = expr->GetReceiverTypes();
- LookupResult lookup(isolate());
-
if (expr->IsMonomorphic()) {
- instr = BuildStoreNamed(object, value, expr);
+ CHECK_ALIVE(instr = BuildStoreNamed(object, value, expr));
} else if (types != NULL && types->length() > 1) {
HandlePolymorphicStoreNamedField(expr, object, value, types, name);
@@ -4279,7 +5103,7 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
return ast_context()->ReturnValue(Pop());
} else if (prop != NULL) {
- prop->RecordTypeFeedback(oracle());
+ prop->RecordTypeFeedback(oracle(), zone());
if (prop->key()->IsPropertyName()) {
// Named property.
@@ -4305,7 +5129,8 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
PushAndAdd(instr);
if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
- HInstruction* store = BuildStoreNamed(obj, instr, prop);
+ HInstruction* store;
+ CHECK_ALIVE(store = BuildStoreNamed(obj, instr, prop));
AddInstruction(store);
// Drop the simulated receiver and value. Return the value.
Drop(2);
@@ -4337,7 +5162,7 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
PushAndAdd(instr);
if (instr->HasObservableSideEffects()) AddSimulate(operation->id());
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
HandleKeyedElementAccess(obj, key, instr, expr, expr->AssignmentId(),
RelocInfo::kNoPosition,
true, // is_store
@@ -4385,7 +5210,7 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
// We insert a use of the old value to detect unsupported uses of const
// variables (e.g. initialization inside a loop).
HValue* old_value = environment()->Lookup(var);
- AddInstruction(new HUseConst(old_value));
+ AddInstruction(new(zone()) HUseConst(old_value));
}
} else if (var->mode() == CONST_HARMONY) {
if (expr->op() != Token::INIT_CONST_HARMONY) {
@@ -4512,8 +5337,7 @@ HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
bool smi_and_map_check) {
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(new(zone()) HCheckMap(object, type, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(object, type, zone()));
}
int index = lookup->GetLocalFieldIndexFromMap(*type);
@@ -4557,8 +5381,7 @@ HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
true);
} else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- AddInstruction(new(zone()) HCheckMap(obj, map, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(obj, map, zone()));
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
return new(zone()) HConstant(function, Representation::Tagged());
} else {
@@ -4605,9 +5428,12 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
break;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4632,13 +5458,16 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
ASSERT(val != NULL);
switch (elements_kind) {
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
return new(zone()) HStoreKeyedFastDoubleElement(
elements, checked_key, val);
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
// Smi-only arrays need a smi check.
AddInstruction(new(zone()) HCheckSmi(val));
// Fall through.
case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
return new(zone()) HStoreKeyedFastElement(
elements, checked_key, val, elements_kind);
default:
@@ -4647,10 +5476,14 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
}
}
// It's an element load (!is_store).
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key);
- } else { // FAST_ELEMENTS or FAST_SMI_ONLY_ELEMENTS.
- return new(zone()) HLoadKeyedFastElement(elements, checked_key);
+ HoleCheckMode mode = IsFastPackedElementsKind(elements_kind) ?
+ OMIT_HOLE_CHECK :
+ PERFORM_HOLE_CHECK;
+ if (IsFastDoubleElementsKind(elements_kind)) {
+ return new(zone()) HLoadKeyedFastDoubleElement(elements, checked_key, mode);
+ } else { // Smi or Object elements.
+ return new(zone()) HLoadKeyedFastElement(elements, checked_key,
+ elements_kind);
}
}
@@ -4658,15 +5491,30 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
+ HValue* dependency,
Handle<Map> map,
bool is_store) {
- HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map));
- bool fast_smi_only_elements = map->has_fast_smi_only_elements();
- bool fast_elements = map->has_fast_elements();
+ HInstruction* mapcheck =
+ AddInstruction(new(zone()) HCheckMaps(object, map, zone(), dependency));
+ // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
+ // on a HElementsTransition instruction. The flag can also be removed if the
+ // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
+ // ElementsKind transitions. Finally, the dependency can be removed for stores
+ // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
+ // generated store code.
+ if (dependency ||
+ (map->elements_kind() == FAST_HOLEY_ELEMENTS) ||
+ (map->elements_kind() == FAST_ELEMENTS && is_store)) {
+ mapcheck->ClearGVNFlag(kDependsOnElementsKind);
+ }
+ bool fast_smi_only_elements = map->has_fast_smi_elements();
+ bool fast_elements = map->has_fast_object_elements();
HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
if (is_store && (fast_elements || fast_smi_only_elements)) {
- AddInstruction(new(zone()) HCheckMap(
- elements, isolate()->factory()->fixed_array_map()));
+ HCheckMaps* check_cow_map = new(zone()) HCheckMaps(
+ elements, isolate()->factory()->fixed_array_map(), zone());
+ check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
+ AddInstruction(check_cow_map);
}
HInstruction* length = NULL;
HInstruction* checked_key = NULL;
@@ -4683,7 +5531,8 @@ HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
fast_elements ||
map->has_fast_double_elements());
if (map->instance_type() == JS_ARRAY_TYPE) {
- length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
+ length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck,
+ HType::Smi()));
} else {
length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
}
@@ -4719,8 +5568,8 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
for (int i = 0; i < maps->length(); ++i) {
Handle<Map> map = maps->at(i);
ElementsKind elements_kind = map->elements_kind();
- if (elements_kind == FAST_DOUBLE_ELEMENTS ||
- elements_kind == FAST_ELEMENTS) {
+ if (IsFastElementsKind(elements_kind) &&
+ elements_kind != GetInitialFastElementsKind()) {
possible_transitioned_maps.Add(map);
}
}
@@ -4734,15 +5583,20 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
int num_untransitionable_maps = 0;
Handle<Map> untransitionable_map;
+ HTransitionElementsKind* transition = NULL;
for (int i = 0; i < maps->length(); ++i) {
Handle<Map> map = maps->at(i);
ASSERT(map->IsMap());
if (!transition_target.at(i).is_null()) {
- AddInstruction(new(zone()) HTransitionElementsKind(
- object, map, transition_target.at(i)));
+ ASSERT(Map::IsValidElementsTransition(
+ map->elements_kind(),
+ transition_target.at(i)->elements_kind()));
+ transition = new(zone()) HTransitionElementsKind(
+ object, map, transition_target.at(i));
+ AddInstruction(transition);
} else {
type_todo[map->elements_kind()] = true;
- if (map->elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND) {
+ if (IsExternalArrayElementsKind(map->elements_kind())) {
todo_external_array = true;
}
num_untransitionable_maps++;
@@ -4759,14 +5613,14 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
: BuildLoadKeyedGeneric(object, key));
} else {
instr = AddInstruction(BuildMonomorphicElementAccess(
- object, key, val, untransitionable_map, is_store));
+ object, key, val, transition, untransitionable_map, is_store));
}
*has_side_effects |= instr->HasObservableSideEffects();
instr->set_position(position);
return is_store ? NULL : instr;
}
- AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(object, zone()));
HBasicBlock* join = graph()->CreateBasicBlock();
HInstruction* elements_kind_instr =
@@ -4776,20 +5630,18 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
HLoadExternalArrayPointer* external_elements = NULL;
HInstruction* checked_key = NULL;
- // Generated code assumes that FAST_SMI_ONLY_ELEMENTS, FAST_ELEMENTS,
- // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS are handled before external
- // arrays.
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
- STATIC_ASSERT(FAST_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ // Generated code assumes that FAST_* and DICTIONARY_ELEMENTS ElementsKinds
+ // are handled before external arrays.
+ STATIC_ASSERT(FAST_SMI_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
STATIC_ASSERT(FAST_DOUBLE_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
STATIC_ASSERT(DICTIONARY_ELEMENTS < FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
for (ElementsKind elements_kind = FIRST_ELEMENTS_KIND;
elements_kind <= LAST_ELEMENTS_KIND;
elements_kind = ElementsKind(elements_kind + 1)) {
- // After having handled FAST_ELEMENTS, FAST_SMI_ONLY_ELEMENTS,
- // FAST_DOUBLE_ELEMENTS and DICTIONARY_ELEMENTS, we need to add some code
- // that's executed for all external array cases.
+ // After having handled FAST_* and DICTIONARY_ELEMENTS, we need to add some
+ // code that's executed for all external array cases.
STATIC_ASSERT(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND ==
LAST_ELEMENTS_KIND);
if (elements_kind == FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
@@ -4811,13 +5663,11 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
set_current_block(if_true);
HInstruction* access;
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS) {
- if (is_store && elements_kind != FAST_DOUBLE_ELEMENTS) {
- AddInstruction(new(zone()) HCheckMap(
+ if (IsFastElementsKind(elements_kind)) {
+ if (is_store && !IsFastDoubleElementsKind(elements_kind)) {
+ AddInstruction(new(zone()) HCheckMaps(
elements, isolate()->factory()->fixed_array_map(),
- elements_kind_branch));
+ zone(), elements_kind_branch));
}
// TODO(jkummerow): The need for these two blocks could be avoided
// in one of two ways:
@@ -4837,7 +5687,8 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
set_current_block(if_jsarray);
HInstruction* length;
- length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck));
+ length = AddInstruction(new(zone()) HJSArrayLength(object, typecheck,
+ HType::Smi()));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
access = AddInstruction(BuildFastElementAccess(
elements, checked_key, val, elements_kind, is_store));
@@ -4901,7 +5752,7 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
: BuildLoadKeyedGeneric(obj, key);
} else {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
+ instr = BuildMonomorphicElementAccess(obj, key, val, NULL, map, is_store);
}
} else if (expr->GetReceiverTypes() != NULL &&
!expr->GetReceiverTypes()->is_empty()) {
@@ -4933,6 +5784,34 @@ HInstruction* HGraphBuilder::BuildStoreKeyedGeneric(HValue* object,
function_strict_mode_flag());
}
+
+void HGraphBuilder::EnsureArgumentsArePushedForAccess() {
+ // Outermost function already has arguments on the stack.
+ if (function_state()->outer() == NULL) return;
+
+ if (function_state()->arguments_pushed()) return;
+
+ // Push arguments when entering inlined function.
+ HEnterInlined* entry = function_state()->entry();
+
+ ZoneList<HValue*>* arguments_values = entry->arguments_values();
+
+ HInstruction* insert_after = entry;
+ for (int i = 0; i < arguments_values->length(); i++) {
+ HValue* argument = arguments_values->at(i);
+ HInstruction* push_argument = new(zone()) HPushArgument(argument);
+ push_argument->InsertAfter(insert_after);
+ insert_after = push_argument;
+ }
+
+ HArgumentsElements* arguments_elements =
+ new(zone()) HArgumentsElements(true);
+ arguments_elements->ClearFlag(HValue::kUseGVN);
+ arguments_elements->InsertAfter(insert_after);
+ function_state()->set_arguments_elements(arguments_elements);
+}
+
+
bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
VariableProxy* proxy = expr->obj()->AsVariableProxy();
if (proxy == NULL) return false;
@@ -4941,31 +5820,51 @@ bool HGraphBuilder::TryArgumentsAccess(Property* expr) {
return false;
}
- // Our implementation of arguments (based on this stack frame or an
- // adapter below it) does not work for inlined functions.
- if (function_state()->outer() != NULL) {
- Bailout("arguments access in inlined function");
- return true;
- }
-
HInstruction* result = NULL;
if (expr->key()->IsPropertyName()) {
Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
if (!name->IsEqualTo(CStrVector("length"))) return false;
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
- result = new(zone()) HArgumentsLength(elements);
+
+ if (function_state()->outer() == NULL) {
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ result = new(zone()) HArgumentsLength(elements);
+ } else {
+ // Number of arguments without receiver.
+ int argument_count = environment()->
+ arguments_environment()->parameter_count() - 1;
+ result = new(zone()) HConstant(
+ Handle<Object>(Smi::FromInt(argument_count)),
+ Representation::Integer32());
+ }
} else {
Push(graph()->GetArgumentsObject());
VisitForValue(expr->key());
if (HasStackOverflow() || current_block() == NULL) return true;
HValue* key = Pop();
Drop(1); // Arguments object.
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
- HInstruction* length = AddInstruction(
- new(zone()) HArgumentsLength(elements));
- HInstruction* checked_key =
- AddInstruction(new(zone()) HBoundsCheck(key, length));
- result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
+ if (function_state()->outer() == NULL) {
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
+ HInstruction* length = AddInstruction(
+ new(zone()) HArgumentsLength(elements));
+ HInstruction* checked_key =
+ AddInstruction(new(zone()) HBoundsCheck(key, length));
+ result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
+ } else {
+ EnsureArgumentsArePushedForAccess();
+
+ // Number of arguments without receiver.
+ HInstruction* elements = function_state()->arguments_elements();
+ int argument_count = environment()->
+ arguments_environment()->parameter_count() - 1;
+ HInstruction* length = AddInstruction(new(zone()) HConstant(
+ Handle<Object>(Smi::FromInt(argument_count)),
+ Representation::Integer32()));
+ HInstruction* checked_key =
+ AddInstruction(new(zone()) HBoundsCheck(key, length));
+ result = new(zone()) HAccessArgumentsAt(elements, length, checked_key);
+ }
}
ast_context()->ReturnInstruction(result, expr->id());
return true;
@@ -4976,7 +5875,7 @@ void HGraphBuilder::VisitProperty(Property* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
if (TryArgumentsAccess(expr)) return;
@@ -4987,13 +5886,12 @@ void HGraphBuilder::VisitProperty(Property* expr) {
HValue* array = Pop();
AddInstruction(new(zone()) HCheckNonSmi(array));
HInstruction* mapcheck =
- AddInstruction(HCheckInstanceType::NewIsJSArray(array));
+ AddInstruction(HCheckInstanceType::NewIsJSArray(array, zone()));
instr = new(zone()) HJSArrayLength(array, mapcheck);
-
} else if (expr->IsStringLength()) {
HValue* string = Pop();
AddInstruction(new(zone()) HCheckNonSmi(string));
- AddInstruction(HCheckInstanceType::NewIsString(string));
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
instr = new(zone()) HStringLength(string);
} else if (expr->IsStringAccess()) {
CHECK_ALIVE(VisitForValue(expr->key()));
@@ -5019,8 +5917,8 @@ void HGraphBuilder::VisitProperty(Property* expr) {
instr = BuildLoadNamed(obj, expr, types->first(), name);
} else if (types != NULL && types->length() > 1) {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- HValue* context = environment()->LookupContext();
- instr = new(zone()) HLoadNamedFieldPolymorphic(context, obj, types, name);
+ HandlePolymorphicLoadNamedField(expr, obj, types, name);
+ return;
} else {
instr = BuildLoadNamedGeneric(obj, expr);
}
@@ -5061,8 +5959,8 @@ void HGraphBuilder::AddCheckConstantFunction(Call* expr,
// its prototypes.
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(receiver));
- AddInstruction(new(zone()) HCheckMap(receiver, receiver_map, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map,
+ zone()));
}
if (!expr->holder().is_null()) {
AddInstruction(new(zone()) HCheckPrototypeMaps(
@@ -5072,6 +5970,39 @@ void HGraphBuilder::AddCheckConstantFunction(Call* expr,
}
+class FunctionSorter {
+ public:
+ FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { }
+ FunctionSorter(int index, int ticks, int ast_length, int src_length)
+ : index_(index),
+ ticks_(ticks),
+ ast_length_(ast_length),
+ src_length_(src_length) { }
+
+ int index() const { return index_; }
+ int ticks() const { return ticks_; }
+ int ast_length() const { return ast_length_; }
+ int src_length() const { return src_length_; }
+
+ private:
+ int index_;
+ int ticks_;
+ int ast_length_;
+ int src_length_;
+};
+
+
+static int CompareHotness(void const* a, void const* b) {
+ FunctionSorter const* function1 = reinterpret_cast<FunctionSorter const*>(a);
+ FunctionSorter const* function2 = reinterpret_cast<FunctionSorter const*>(b);
+ int diff = function1->ticks() - function2->ticks();
+ if (diff != 0) return -diff;
+ diff = function1->ast_length() - function2->ast_length();
+ if (diff != 0) return diff;
+ return function1->src_length() - function2->src_length();
+}
+
+
void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
HValue* receiver,
SmallMapList* types,
@@ -5080,51 +6011,73 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
// maps are identical. In that case we can avoid repeatedly generating the
// same prototype map checks.
int argument_count = expr->arguments()->length() + 1; // Includes receiver.
- int count = 0;
HBasicBlock* join = NULL;
- for (int i = 0; i < types->length() && count < kMaxCallPolymorphism; ++i) {
+ FunctionSorter order[kMaxCallPolymorphism];
+ int ordered_functions = 0;
+ for (int i = 0;
+ i < types->length() && ordered_functions < kMaxCallPolymorphism;
+ ++i) {
Handle<Map> map = types->at(i);
if (expr->ComputeTarget(map, name)) {
- if (count == 0) {
- // Only needed once.
- AddInstruction(new(zone()) HCheckNonSmi(receiver));
- join = graph()->CreateBasicBlock();
- }
- ++count;
- HBasicBlock* if_true = graph()->CreateBasicBlock();
- HBasicBlock* if_false = graph()->CreateBasicBlock();
- HCompareMap* compare =
- new(zone()) HCompareMap(receiver, map, if_true, if_false);
- current_block()->Finish(compare);
+ order[ordered_functions++] =
+ FunctionSorter(i,
+ expr->target()->shared()->profiler_ticks(),
+ InliningAstSize(expr->target()),
+ expr->target()->shared()->SourceSize());
+ }
+ }
- set_current_block(if_true);
- AddCheckConstantFunction(expr, receiver, map, false);
- if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
- PrintF("Trying to inline the polymorphic call to %s\n",
- *name->ToCString());
- }
- if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
- // Trying to inline will signal that we should bailout from the
- // entire compilation by setting stack overflow on the visitor.
- if (HasStackOverflow()) return;
- } else {
- HCallConstantFunction* call =
- new(zone()) HCallConstantFunction(expr->target(), argument_count);
- call->set_position(expr->position());
- PreProcessCall(call);
- AddInstruction(call);
- if (!ast_context()->IsEffect()) Push(call);
- }
+ qsort(reinterpret_cast<void*>(&order[0]),
+ ordered_functions,
+ sizeof(order[0]),
+ &CompareHotness);
- if (current_block() != NULL) current_block()->Goto(join);
- set_current_block(if_false);
+ for (int fn = 0; fn < ordered_functions; ++fn) {
+ int i = order[fn].index();
+ Handle<Map> map = types->at(i);
+ if (fn == 0) {
+ // Only needed once.
+ AddInstruction(new(zone()) HCheckNonSmi(receiver));
+ join = graph()->CreateBasicBlock();
+ }
+ HBasicBlock* if_true = graph()->CreateBasicBlock();
+ HBasicBlock* if_false = graph()->CreateBasicBlock();
+ HCompareMap* compare =
+ new(zone()) HCompareMap(receiver, map, if_true, if_false);
+ current_block()->Finish(compare);
+
+ set_current_block(if_true);
+ expr->ComputeTarget(map, name);
+ AddCheckConstantFunction(expr, receiver, map, false);
+ if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
+ Handle<JSFunction> caller = info()->closure();
+ SmartArrayPointer<char> caller_name =
+ caller->shared()->DebugName()->ToCString();
+ PrintF("Trying to inline the polymorphic call to %s from %s\n",
+ *name->ToCString(),
+ *caller_name);
+ }
+ if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
+ // Trying to inline will signal that we should bailout from the
+ // entire compilation by setting stack overflow on the visitor.
+ if (HasStackOverflow()) return;
+ } else {
+ HCallConstantFunction* call =
+ new(zone()) HCallConstantFunction(expr->target(), argument_count);
+ call->set_position(expr->position());
+ PreProcessCall(call);
+ AddInstruction(call);
+ if (!ast_context()->IsEffect()) Push(call);
}
+
+ if (current_block() != NULL) current_block()->Goto(join);
+ set_current_block(if_false);
}
// Finish up. Unconditionally deoptimize if we've handled all the maps we
// know about and do not want to handle ones we've never seen. Otherwise
// use a generic IC.
- if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
+ if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
current_block()->FinishExitWithDeoptimization(HDeoptimize::kNoUses);
} else {
HValue* context = environment()->LookupContext();
@@ -5173,14 +6126,11 @@ void HGraphBuilder::TraceInline(Handle<JSFunction> target,
}
-bool HGraphBuilder::TryInline(CallKind call_kind,
- Handle<JSFunction> target,
- ZoneList<Expression*>* arguments,
- HValue* receiver,
- int ast_id,
- int return_id,
- ReturnHandlingFlag return_handling) {
- if (!FLAG_use_inlining) return false;
+static const int kNotInlinable = 1000000000;
+
+
+int HGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
+ if (!FLAG_use_inlining) return kNotInlinable;
// Precondition: call is monomorphic and we have found a target with the
// appropriate arity.
@@ -5189,29 +6139,46 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// Do a quick check on source code length to avoid parsing large
// inlining candidates.
- if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize)
- || target_shared->SourceSize() > kUnlimitedMaxSourceSize) {
+ if (target_shared->SourceSize() >
+ Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
TraceInline(target, caller, "target text too big");
- return false;
+ return kNotInlinable;
}
// Target must be inlineable.
if (!target->IsInlineable()) {
TraceInline(target, caller, "target not inlineable");
- return false;
+ return kNotInlinable;
}
if (target_shared->dont_inline() || target_shared->dont_optimize()) {
TraceInline(target, caller, "target contains unsupported syntax [early]");
- return false;
+ return kNotInlinable;
}
int nodes_added = target_shared->ast_node_count();
- if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
- nodes_added > kUnlimitedMaxInlinedSize) {
+ return nodes_added;
+}
+
+
+bool HGraphBuilder::TryInline(CallKind call_kind,
+ Handle<JSFunction> target,
+ ZoneList<Expression*>* arguments,
+ HValue* receiver,
+ int ast_id,
+ int return_id,
+ ReturnHandlingFlag return_handling) {
+ int nodes_added = InliningAstSize(target);
+ if (nodes_added == kNotInlinable) return false;
+
+ Handle<JSFunction> caller = info()->closure();
+
+ if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
TraceInline(target, caller, "target AST is too large [early]");
return false;
}
+ Handle<SharedFunctionInfo> target_shared(target->shared());
+
#if !defined(V8_TARGET_ARCH_IA32)
// Target must be able to use caller's context.
CompilationInfo* outer_info = info();
@@ -5249,8 +6216,8 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
}
// We don't want to add more than a certain number of nodes from inlining.
- if ((FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) ||
- inlined_count_ > kUnlimitedMaxInlinedNodes) {
+ if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
+ kUnlimitedMaxInlinedNodesCumulative)) {
TraceInline(target, caller, "cumulative AST node limit reached");
return false;
}
@@ -5277,8 +6244,7 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// The following conditions must be checked again after re-parsing, because
// earlier the information might not have been complete due to lazy parsing.
nodes_added = function->ast_node_count();
- if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
- nodes_added > kUnlimitedMaxInlinedSize) {
+ if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
TraceInline(target, caller, "target AST is too large [late]");
return false;
}
@@ -5329,7 +6295,7 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
// The scope info might not have been set if a lazily compiled
// function is inlined before being called for the first time.
Handle<ScopeInfo> target_scope_info =
- ScopeInfo::Create(target_info.scope());
+ ScopeInfo::Create(target_info.scope(), zone());
target_shared->set_scope_info(*target_scope_info);
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
@@ -5348,7 +6314,8 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
TypeFeedbackOracle target_oracle(
Handle<Code>(target_shared->code()),
Handle<Context>(target->context()->global_context()),
- isolate());
+ isolate(),
+ zone());
// The function state is new-allocated because we need to delete it
// in two different places.
FunctionState* target_state = new FunctionState(
@@ -5368,25 +6335,48 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
//
// TODO(kmillikin): implement the same inlining on other platforms so we
// can remove the unsightly ifdefs in this function.
- HConstant* context = new HConstant(Handle<Context>(target->context()),
- Representation::Tagged());
+ HConstant* context =
+ new(zone()) HConstant(Handle<Context>(target->context()),
+ Representation::Tagged());
AddInstruction(context);
inner_env->BindContext(context);
#endif
+
AddSimulate(return_id);
current_block()->UpdateEnvironment(inner_env);
- AddInstruction(new(zone()) HEnterInlined(target,
- arguments->length(),
- function,
- call_kind,
- function_state()->is_construct(),
- function->scope()->arguments()));
+
+ ZoneList<HValue*>* arguments_values = NULL;
+
+ // If the function uses arguments copy current arguments values
+ // to use them for materialization.
+ if (function->scope()->arguments() != NULL) {
+ HEnvironment* arguments_env = inner_env->arguments_environment();
+ int arguments_count = arguments_env->parameter_count();
+ arguments_values = new(zone()) ZoneList<HValue*>(arguments_count, zone());
+ for (int i = 0; i < arguments_count; i++) {
+ arguments_values->Add(arguments_env->Lookup(i), zone());
+ }
+ }
+
+ HEnterInlined* enter_inlined =
+ new(zone()) HEnterInlined(target,
+ arguments->length(),
+ function,
+ call_kind,
+ function_state()->is_construct(),
+ function->scope()->arguments(),
+ arguments_values);
+ function_state()->set_entry(enter_inlined);
+ AddInstruction(enter_inlined);
+
// If the function uses arguments object create and bind one.
if (function->scope()->arguments() != NULL) {
ASSERT(function->scope()->arguments()->IsStackAllocated());
- environment()->Bind(function->scope()->arguments(),
- graph()->GetArgumentsObject());
+ inner_env->Bind(function->scope()->arguments(),
+ graph()->GetArgumentsObject());
}
+
+
VisitDeclarations(target_info.scope()->declarations());
VisitStatements(function->body());
if (HasStackOverflow()) {
@@ -5415,17 +6405,17 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
: undefined;
current_block()->AddLeaveInlined(return_value,
function_return(),
- function_state()->drop_extra());
+ function_state());
} else if (call_context()->IsEffect()) {
ASSERT(function_return() != NULL);
- current_block()->Goto(function_return(), function_state()->drop_extra());
+ current_block()->Goto(function_return(), function_state());
} else {
ASSERT(call_context()->IsTest());
ASSERT(inlined_test_context() != NULL);
HBasicBlock* target = function_state()->is_construct()
? inlined_test_context()->if_true()
: inlined_test_context()->if_false();
- current_block()->Goto(target, function_state()->drop_extra());
+ current_block()->Goto(target, function_state());
}
}
@@ -5443,12 +6433,12 @@ bool HGraphBuilder::TryInline(CallKind call_kind,
if (if_true->HasPredecessor()) {
if_true->SetJoinId(ast_id);
HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
- if_true->Goto(true_target, function_state()->drop_extra());
+ if_true->Goto(true_target, function_state());
}
if (if_false->HasPredecessor()) {
if_false->SetJoinId(ast_id);
HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
- if_false->Goto(false_target, function_state()->drop_extra());
+ if_false->Goto(false_target, function_state());
}
set_current_block(NULL);
return true;
@@ -5757,7 +6747,8 @@ bool HGraphBuilder::TryCallApply(Call* expr) {
HValue* receiver = Pop();
if (function_state()->outer() == NULL) {
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
HInstruction* length =
AddInstruction(new(zone()) HArgumentsLength(elements));
HValue* wrapped_receiver =
@@ -5938,6 +6929,11 @@ void HGraphBuilder::VisitCall(Call* expr) {
return;
}
if (TryInlineCall(expr)) return;
+
+ if (expr->target().is_identical_to(info()->closure())) {
+ graph()->MarkRecursive();
+ }
+
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
} else {
@@ -5958,8 +6954,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
HValue* function = Top();
HValue* context = environment()->LookupContext();
HGlobalObject* global = new(zone()) HGlobalObject(context);
- HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
AddInstruction(global);
+ HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global);
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
@@ -5976,9 +6972,11 @@ void HGraphBuilder::VisitCall(Call* expr) {
if (TryInlineCall(expr, true)) { // Drop function from environment.
return;
} else {
- call = PreProcessCall(new(zone()) HInvokeFunction(context,
- function,
- argument_count));
+ call = PreProcessCall(
+ new(zone()) HInvokeFunction(context,
+ function,
+ expr->target(),
+ argument_count));
Drop(1); // The function.
}
@@ -5987,8 +6985,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
HValue* function = Top();
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
- HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
AddInstruction(global_object);
+ HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
AddInstruction(receiver);
PushAndAdd(new(zone()) HPushArgument(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
@@ -6006,7 +7004,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
// Checks whether allocation using the given constructor can be inlined.
static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
return constructor->has_initial_map() &&
- constructor->initial_map()->instance_type() == JS_OBJECT_TYPE;
+ constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
+ constructor->initial_map()->instance_size() < HAllocateObject::kMaxSize;
}
@@ -6393,7 +7392,7 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
} else {
// Argument of the count operation is a property.
ASSERT(prop != NULL);
- prop->RecordTypeFeedback(oracle());
+ prop->RecordTypeFeedback(oracle(), zone());
if (prop->key()->IsPropertyName()) {
// Named property.
@@ -6416,7 +7415,8 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
after = BuildIncrement(returns_original_input, expr);
input = Pop();
- HInstruction* store = BuildStoreNamed(obj, after, prop);
+ HInstruction* store;
+ CHECK_ALIVE(store = BuildStoreNamed(obj, after, prop));
AddInstruction(store);
// Overwrite the receiver in the bailout environment with the result
@@ -6446,7 +7446,7 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
after = BuildIncrement(returns_original_input, expr);
input = Pop();
- expr->RecordTypeFeedback(oracle());
+ expr->RecordTypeFeedback(oracle(), zone());
HandleKeyedElementAccess(obj, key, after, expr, expr->AssignmentId(),
RelocInfo::kNoPosition,
true, // is_store
@@ -6472,7 +7472,7 @@ HStringCharCodeAt* HGraphBuilder::BuildStringCharCodeAt(HValue* context,
HValue* string,
HValue* index) {
AddInstruction(new(zone()) HCheckNonSmi(string));
- AddInstruction(HCheckInstanceType::NewIsString(string));
+ AddInstruction(HCheckInstanceType::NewIsString(string, zone()));
HStringLength* length = new(zone()) HStringLength(string);
AddInstruction(length);
HInstruction* checked_index =
@@ -6496,9 +7496,9 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
case Token::ADD:
if (info.IsString()) {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsString(left));
+ AddInstruction(HCheckInstanceType::NewIsString(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsString(right));
+ AddInstruction(HCheckInstanceType::NewIsString(right, zone()));
instr = new(zone()) HStringAdd(context, left, right);
} else {
instr = HAdd::NewHAdd(zone(), context, left, right);
@@ -6544,8 +7544,10 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
}
Representation rep = ToRepresentation(info);
// We only generate either int32 or generic tagged bitwise operations.
- if (instr->IsBitwiseBinaryOperation() && rep.IsDouble()) {
- rep = Representation::Integer32();
+ if (instr->IsBitwiseBinaryOperation()) {
+ HBitwiseBinaryOperation::cast(instr)->
+ InitializeObservedInputRepresentation(rep);
+ if (rep.IsDouble()) rep = Representation::Integer32();
}
TraceRepresentation(expr->op(), info, instr, rep);
instr->AssumeRepresentation(rep);
@@ -6898,20 +7900,18 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
Handle<Map> map = oracle()->GetCompareMap(expr);
if (!map.is_null()) {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(new(zone()) HCheckMap(left, map, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(left, map, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(new(zone()) HCheckMap(right, map, NULL,
- ALLOW_ELEMENT_TRANSITION_MAPS));
+ AddInstruction(HCheckMaps::NewWithTransitions(right, map, zone()));
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
return ast_context()->ReturnControl(result, expr->id());
} else {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(left));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(right));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(right, zone()));
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
@@ -6924,9 +7924,9 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
} else if (type_info.IsString() && oracle()->IsSymbolCompare(expr) &&
(op == Token::EQ || op == Token::EQ_STRICT)) {
AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsSymbol(left));
+ AddInstruction(HCheckInstanceType::NewIsSymbol(left, zone()));
AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsSymbol(right));
+ AddInstruction(HCheckInstanceType::NewIsSymbol(right, zone()));
HCompareObjectEqAndBranch* result =
new(zone()) HCompareObjectEqAndBranch(left, right);
result->set_position(expr->position());
@@ -6974,90 +7974,50 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
- int length = declarations->length();
- int global_count = 0;
- for (int i = 0; i < declarations->length(); i++) {
- Declaration* decl = declarations->at(i);
- FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
- HandleDeclaration(decl->proxy(),
- decl->mode(),
- fun_decl != NULL ? fun_decl->fun() : NULL,
- &global_count);
- }
-
- // Batch declare global functions and variables.
- if (global_count > 0) {
+ ASSERT(globals_.is_empty());
+ AstVisitor::VisitDeclarations(declarations);
+ if (!globals_.is_empty()) {
Handle<FixedArray> array =
- isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
- for (int j = 0, i = 0; i < length; i++) {
- Declaration* decl = declarations->at(i);
- Variable* var = decl->proxy()->var();
-
- if (var->IsUnallocated()) {
- array->set(j++, *(var->name()));
- FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
- if (fun_decl == NULL) {
- if (var->binding_needs_init()) {
- // In case this binding needs initialization use the hole.
- array->set_the_hole(j++);
- } else {
- array->set_undefined(j++);
- }
- } else {
- Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(fun_decl->fun(), info()->script());
- // Check for stack-overflow exception.
- if (function.is_null()) {
- SetStackOverflow();
- return;
- }
- array->set(j++, *function);
- }
- }
- }
+ isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
+ for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
DeclareGlobalsNativeFlag::encode(info()->is_native()) |
DeclareGlobalsLanguageMode::encode(info()->language_mode());
- HInstruction* result =
- new(zone()) HDeclareGlobals(environment()->LookupContext(),
- array,
- flags);
+ HInstruction* result = new(zone()) HDeclareGlobals(
+ environment()->LookupContext(), array, flags);
AddInstruction(result);
+ globals_.Clear();
}
}
-void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
- Variable* var = proxy->var();
- bool binding_needs_init =
- (mode == CONST || mode == CONST_HARMONY || mode == LET);
- switch (var->location()) {
+void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ VariableMode mode = declaration->mode();
+ Variable* variable = proxy->var();
+ bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
+ switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ globals_.Add(variable->name(), zone());
+ globals_.Add(variable->binding_needs_init()
+ ? isolate()->factory()->the_hole_value()
+ : isolate()->factory()->undefined_value(), zone());
return;
case Variable::PARAMETER:
case Variable::LOCAL:
+ if (hole_init) {
+ HValue* value = graph()->GetConstantHole();
+ environment()->Bind(variable, value);
+ }
+ break;
case Variable::CONTEXT:
- if (binding_needs_init || function != NULL) {
- HValue* value = NULL;
- if (function != NULL) {
- CHECK_ALIVE(VisitForValue(function));
- value = Pop();
- } else {
- value = graph()->GetConstantHole();
- }
- if (var->IsContextSlot()) {
- HValue* context = environment()->LookupContext();
- HStoreContextSlot* store = new HStoreContextSlot(
- context, var->index(), HStoreContextSlot::kNoCheck, value);
- AddInstruction(store);
- if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
- } else {
- environment()->Bind(var, value);
- }
+ if (hole_init) {
+ HValue* value = graph()->GetConstantHole();
+ HValue* context = environment()->LookupContext();
+ HStoreContextSlot* store = new(zone()) HStoreContextSlot(
+ context, variable->index(), HStoreContextSlot::kNoCheck, value);
+ AddInstruction(store);
+ if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
}
break;
case Variable::LOOKUP:
@@ -7066,48 +8026,74 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
}
-void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
- UNREACHABLE();
-}
-
-
-void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) {
- UNREACHABLE();
+void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ globals_.Add(variable->name(), zone());
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(declaration->fun(), info()->script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) return SetStackOverflow();
+ globals_.Add(function, zone());
+ return;
+ }
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ CHECK_ALIVE(VisitForValue(declaration->fun()));
+ HValue* value = Pop();
+ environment()->Bind(variable, value);
+ break;
+ }
+ case Variable::CONTEXT: {
+ CHECK_ALIVE(VisitForValue(declaration->fun()));
+ HValue* value = Pop();
+ HValue* context = environment()->LookupContext();
+ HStoreContextSlot* store = new(zone()) HStoreContextSlot(
+ context, variable->index(), HStoreContextSlot::kNoCheck, value);
+ AddInstruction(store);
+ if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
+ break;
+ }
+ case Variable::LOOKUP:
+ return Bailout("unsupported lookup slot in declaration");
+ }
}
-void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
+void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* declaration) {
UNREACHABLE();
}
-void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* decl) {
+void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* declaration) {
UNREACHABLE();
}
-void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* decl) {
+void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* declaration) {
UNREACHABLE();
}
void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
- // TODO(rossberg)
+ UNREACHABLE();
}
void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
- // TODO(rossberg)
+ UNREACHABLE();
}
void HGraphBuilder::VisitModulePath(ModulePath* module) {
- // TODO(rossberg)
+ UNREACHABLE();
}
void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
- // TODO(rossberg)
+ UNREACHABLE();
}
@@ -7228,7 +8214,8 @@ void HGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
// function is blacklisted by AstNode::IsInlineable.
ASSERT(function_state()->outer() == NULL);
ASSERT(call->arguments()->length() == 0);
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
HArgumentsLength* result = new(zone()) HArgumentsLength(elements);
return ast_context()->ReturnInstruction(result, call->id());
}
@@ -7242,7 +8229,8 @@ void HGraphBuilder::GenerateArguments(CallRuntime* call) {
ASSERT(call->arguments()->length() == 1);
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
HValue* index = Pop();
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
+ HInstruction* elements = AddInstruction(
+ new(zone()) HArgumentsElements(false));
HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
HAccessArgumentsAt* result =
new(zone()) HAccessArgumentsAt(elements, length, index);
@@ -7308,11 +8296,11 @@ void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
// Create in-object property store to kValueOffset.
set_current_block(if_js_value);
Handle<String> name = isolate()->factory()->undefined_symbol();
- AddInstruction(new HStoreNamedField(object,
- name,
- value,
- true, // in-object store.
- JSValue::kValueOffset));
+ AddInstruction(new(zone()) HStoreNamedField(object,
+ name,
+ value,
+ true, // in-object store.
+ JSValue::kValueOffset));
if_js_value->Goto(join);
join->SetJoinId(call->id());
set_current_block(join);
@@ -7600,10 +8588,11 @@ void HGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
HEnvironment::HEnvironment(HEnvironment* outer,
Scope* scope,
- Handle<JSFunction> closure)
+ Handle<JSFunction> closure,
+ Zone* zone)
: closure_(closure),
- values_(0),
- assigned_variables_(4),
+ values_(0, zone),
+ assigned_variables_(4, zone),
frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
@@ -7611,14 +8600,15 @@ HEnvironment::HEnvironment(HEnvironment* outer,
outer_(outer),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber) {
+ ast_id_(AstNode::kNoNumber),
+ zone_(zone) {
Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
}
-HEnvironment::HEnvironment(const HEnvironment* other)
- : values_(0),
- assigned_variables_(0),
+HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
+ : values_(0, zone),
+ assigned_variables_(0, zone),
frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
@@ -7626,7 +8616,8 @@ HEnvironment::HEnvironment(const HEnvironment* other)
outer_(NULL),
pop_count_(0),
push_count_(0),
- ast_id_(other->ast_id()) {
+ ast_id_(other->ast_id()),
+ zone_(zone) {
Initialize(other);
}
@@ -7634,17 +8625,19 @@ HEnvironment::HEnvironment(const HEnvironment* other)
HEnvironment::HEnvironment(HEnvironment* outer,
Handle<JSFunction> closure,
FrameType frame_type,
- int arguments)
+ int arguments,
+ Zone* zone)
: closure_(closure),
- values_(arguments),
- assigned_variables_(0),
+ values_(arguments, zone),
+ assigned_variables_(0, zone),
frame_type_(frame_type),
parameter_count_(arguments),
local_count_(0),
outer_(outer),
pop_count_(0),
push_count_(0),
- ast_id_(AstNode::kNoNumber) {
+ ast_id_(AstNode::kNoNumber),
+ zone_(zone) {
}
@@ -7656,15 +8649,15 @@ void HEnvironment::Initialize(int parameter_count,
// Avoid reallocating the temporaries' backing store on the first Push.
int total = parameter_count + specials_count_ + local_count + stack_height;
- values_.Initialize(total + 4);
- for (int i = 0; i < total; ++i) values_.Add(NULL);
+ values_.Initialize(total + 4, zone());
+ for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
}
void HEnvironment::Initialize(const HEnvironment* other) {
closure_ = other->closure();
- values_.AddAll(other->values_);
- assigned_variables_.AddAll(other->assigned_variables_);
+ values_.AddAll(other->values_, zone());
+ assigned_variables_.AddAll(other->assigned_variables_, zone());
frame_type_ = other->frame_type_;
parameter_count_ = other->parameter_count_;
local_count_ = other->local_count_;
@@ -7692,7 +8685,7 @@ void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
} else if (values_[i] != other->values_[i]) {
// There is a fresh value on the incoming edge, a phi is needed.
ASSERT(values_[i] != NULL && other->values_[i] != NULL);
- HPhi* phi = new(block->zone()) HPhi(i);
+ HPhi* phi = new(zone()) HPhi(i, zone());
HValue* old_value = values_[i];
for (int j = 0; j < block->predecessors()->length(); j++) {
phi->AddInput(old_value);
@@ -7708,7 +8701,7 @@ void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
void HEnvironment::Bind(int index, HValue* value) {
ASSERT(value != NULL);
if (!assigned_variables_.Contains(index)) {
- assigned_variables_.Add(index);
+ assigned_variables_.Add(index, zone());
}
values_[index] = value;
}
@@ -7748,7 +8741,7 @@ void HEnvironment::Drop(int count) {
HEnvironment* HEnvironment::Copy() const {
- return new(closure()->GetIsolate()->zone()) HEnvironment(this);
+ return new(zone()) HEnvironment(this, zone());
}
@@ -7762,7 +8755,7 @@ HEnvironment* HEnvironment::CopyWithoutHistory() const {
HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
HEnvironment* new_env = Copy();
for (int i = 0; i < values_.length(); ++i) {
- HPhi* phi = new(loop_header->zone()) HPhi(i);
+ HPhi* phi = new(zone()) HPhi(i, zone());
phi->AddInput(values_[i]);
new_env->values_[i] = phi;
loop_header->AddPhi(phi);
@@ -7776,8 +8769,9 @@ HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
Handle<JSFunction> target,
FrameType frame_type,
int arguments) const {
- HEnvironment* new_env = new(closure()->GetIsolate()->zone())
- HEnvironment(outer, target, frame_type, arguments + 1);
+ HEnvironment* new_env =
+ new(zone()) HEnvironment(outer, target, frame_type,
+ arguments + 1, zone());
for (int i = 0; i <= arguments; ++i) { // Include receiver.
new_env->Push(ExpressionStackAt(arguments - i));
}
@@ -7817,7 +8811,7 @@ HEnvironment* HEnvironment::CopyForInlining(
}
HEnvironment* inner =
- new(zone) HEnvironment(outer, function->scope(), target);
+ new(zone) HEnvironment(outer, function->scope(), target, zone);
// Get the argument values from the original environment.
for (int i = 0; i <= arity; ++i) { // Include receiver.
HValue* push = (i <= arguments) ?
@@ -8007,27 +9001,28 @@ void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
for (int i = 0; i < fixed_d->length(); ++i) {
- TraceLiveRange(fixed_d->at(i), "fixed");
+ TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
}
const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
for (int i = 0; i < fixed->length(); ++i) {
- TraceLiveRange(fixed->at(i), "fixed");
+ TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
}
const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
for (int i = 0; i < live_ranges->length(); ++i) {
- TraceLiveRange(live_ranges->at(i), "object");
+ TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
}
}
-void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
+void HTracer::TraceLiveRange(LiveRange* range, const char* type,
+ Zone* zone) {
if (range != NULL && !range->IsEmpty()) {
PrintIndent();
trace_.Add("%d %s", range->id(), type);
if (range->HasRegisterAssigned()) {
- LOperand* op = range->CreateAssignedOperand(ZONE);
+ LOperand* op = range->CreateAssignedOperand(zone);
int assigned_reg = op->index();
if (op->IsDoubleRegister()) {
trace_.Add(" \"%s\"",
diff --git a/deps/v8/src/hydrogen.h b/deps/v8/src/hydrogen.h
index e2779bb226..6fa3d1b9ff 100644
--- a/deps/v8/src/hydrogen.h
+++ b/deps/v8/src/hydrogen.h
@@ -42,6 +42,7 @@ namespace internal {
// Forward declarations.
class BitVector;
+class FunctionState;
class HEnvironment;
class HGraph;
class HLoopInformation;
@@ -76,7 +77,7 @@ class HBasicBlock: public ZoneObject {
return &deleted_phis_;
}
void RecordDeletedPhi(int merge_index) {
- deleted_phis_.Add(merge_index);
+ deleted_phis_.Add(merge_index, zone());
}
HBasicBlock* dominator() const { return dominator_; }
HEnvironment* last_environment() const { return last_environment_; }
@@ -121,7 +122,7 @@ class HBasicBlock: public ZoneObject {
void Finish(HControlInstruction* last);
void FinishExit(HControlInstruction* instruction);
- void Goto(HBasicBlock* block, bool drop_extra = false);
+ void Goto(HBasicBlock* block, FunctionState* state = NULL);
int PredecessorIndexOf(HBasicBlock* predecessor) const;
void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
@@ -136,7 +137,7 @@ class HBasicBlock: public ZoneObject {
// instruction and updating the bailout environment.
void AddLeaveInlined(HValue* return_value,
HBasicBlock* target,
- bool drop_extra = false);
+ FunctionState* state = NULL);
// If a target block is tagged as an inline function return, all
// predecessors should contain the inlined exit sequence:
@@ -157,7 +158,7 @@ class HBasicBlock: public ZoneObject {
dominates_loop_successors_ = true;
}
- inline Zone* zone();
+ inline Zone* zone() const;
#ifdef DEBUG
void Verify();
@@ -211,12 +212,12 @@ class HPredecessorIterator BASE_EMBEDDED {
class HLoopInformation: public ZoneObject {
public:
- explicit HLoopInformation(HBasicBlock* loop_header)
- : back_edges_(4),
+ HLoopInformation(HBasicBlock* loop_header, Zone* zone)
+ : back_edges_(4, zone),
loop_header_(loop_header),
- blocks_(8),
+ blocks_(8, zone),
stack_check_(NULL) {
- blocks_.Add(loop_header);
+ blocks_.Add(loop_header, zone);
}
virtual ~HLoopInformation() {}
@@ -240,13 +241,13 @@ class HLoopInformation: public ZoneObject {
HStackCheck* stack_check_;
};
-
+class BoundsCheckTable;
class HGraph: public ZoneObject {
public:
- explicit HGraph(CompilationInfo* info);
+ HGraph(CompilationInfo* info, Zone* zone);
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return zone_; }
const ZoneList<HBasicBlock*>* blocks() const { return &blocks_; }
const ZoneList<HPhi*>* phi_list() const { return phi_list_; }
@@ -265,6 +266,8 @@ class HGraph: public ZoneObject {
void OrderBlocks();
void AssignDominators();
void ReplaceCheckedValues();
+ void EliminateRedundantBoundsChecks();
+ void DehoistSimpleArrayIndexComputations();
void PropagateDeoptimizingMark();
// Returns false if there are phi-uses of the arguments-object
@@ -277,7 +280,7 @@ class HGraph: public ZoneObject {
void CollectPhis();
- Handle<Code> Compile(CompilationInfo* info);
+ Handle<Code> Compile(CompilationInfo* info, Zone* zone);
void set_undefined_constant(HConstant* constant) {
undefined_constant_.set(constant);
@@ -301,7 +304,7 @@ class HGraph: public ZoneObject {
int GetMaximumValueID() const { return values_.length(); }
int GetNextBlockID() { return next_block_id_++; }
int GetNextValueID(HValue* value) {
- values_.Add(value);
+ values_.Add(value, zone());
return values_.length() - 1;
}
HValue* LookupValue(int id) const {
@@ -333,6 +336,14 @@ class HGraph: public ZoneObject {
osr_values_.set(values);
}
+ void MarkRecursive() {
+ is_recursive_ = true;
+ }
+
+ bool is_recursive() const {
+ return is_recursive_;
+ }
+
private:
void Postorder(HBasicBlock* block,
BitVector* visited,
@@ -357,6 +368,7 @@ class HGraph: public ZoneObject {
void InferTypes(ZoneList<HValue*>* worklist);
void InitializeInferredTypes(int from_inclusive, int to_inclusive);
void CheckForBackEdge(HBasicBlock* block, HBasicBlock* successor);
+ void EliminateRedundantBoundsChecks(HBasicBlock* bb, BoundsCheckTable* table);
Isolate* isolate_;
int next_block_id_;
@@ -376,11 +388,15 @@ class HGraph: public ZoneObject {
SetOncePointer<HBasicBlock> osr_loop_entry_;
SetOncePointer<ZoneList<HUnknownOSRValue*> > osr_values_;
+ Zone* zone_;
+
+ bool is_recursive_;
+
DISALLOW_COPY_AND_ASSIGN(HGraph);
};
-Zone* HBasicBlock::zone() { return graph_->zone(); }
+Zone* HBasicBlock::zone() const { return graph_->zone(); }
// Type of stack frame an environment might refer to.
@@ -391,7 +407,8 @@ class HEnvironment: public ZoneObject {
public:
HEnvironment(HEnvironment* outer,
Scope* scope,
- Handle<JSFunction> closure);
+ Handle<JSFunction> closure,
+ Zone* zone);
HEnvironment* DiscardInlined(bool drop_extra) {
HEnvironment* outer = outer_;
@@ -458,7 +475,7 @@ class HEnvironment: public ZoneObject {
void Push(HValue* value) {
ASSERT(value != NULL);
++push_count_;
- values_.Add(value);
+ values_.Add(value, zone());
}
HValue* Pop() {
@@ -515,13 +532,16 @@ class HEnvironment: public ZoneObject {
void PrintTo(StringStream* stream);
void PrintToStd();
+ Zone* zone() const { return zone_; }
+
private:
- explicit HEnvironment(const HEnvironment* other);
+ HEnvironment(const HEnvironment* other, Zone* zone);
HEnvironment(HEnvironment* outer,
Handle<JSFunction> closure,
FrameType frame_type,
- int arguments);
+ int arguments,
+ Zone* zone);
// Create an artificial stub environment (e.g. for argument adaptor or
// constructor stub).
@@ -559,6 +579,7 @@ class HEnvironment: public ZoneObject {
int pop_count_;
int push_count_;
int ast_id_;
+ Zone* zone_;
};
@@ -603,7 +624,7 @@ class AstContext {
HGraphBuilder* owner() const { return owner_; }
- inline Zone* zone();
+ inline Zone* zone() const;
// We want to be able to assert, in a context-specific way, that the stack
// height makes sense when the context is filled.
@@ -715,6 +736,16 @@ class FunctionState {
FunctionState* outer() { return outer_; }
+ HEnterInlined* entry() { return entry_; }
+ void set_entry(HEnterInlined* entry) { entry_ = entry; }
+
+ HArgumentsElements* arguments_elements() { return arguments_elements_; }
+ void set_arguments_elements(HArgumentsElements* arguments_elements) {
+ arguments_elements_ = arguments_elements;
+ }
+
+ bool arguments_pushed() { return arguments_elements() != NULL; }
+
private:
HGraphBuilder* owner_;
@@ -741,6 +772,12 @@ class FunctionState {
// return blocks. NULL in all other cases.
TestContext* test_context_;
+ // When inlining HEnterInlined instruction corresponding to the function
+ // entry.
+ HEnterInlined* entry_;
+
+ HArgumentsElements* arguments_elements_;
+
FunctionState* outer_;
};
@@ -801,7 +838,7 @@ class HGraphBuilder: public AstVisitor {
BreakAndContinueScope* next_;
};
- HGraphBuilder(CompilationInfo* info, TypeFeedbackOracle* oracle);
+ HGraphBuilder(CompilationInfo* info, TypeFeedbackOracle* oracle, Zone* zone);
HGraph* CreateGraph();
@@ -851,15 +888,11 @@ class HGraphBuilder: public AstVisitor {
static const int kMaxLoadPolymorphism = 4;
static const int kMaxStorePolymorphism = 4;
- static const int kMaxInlinedNodes = 196;
- static const int kMaxInlinedSize = 196;
- static const int kMaxSourceSize = 600;
-
// Even in the 'unlimited' case we have to have some limit in order not to
// overflow the stack.
- static const int kUnlimitedMaxInlinedNodes = 1000;
- static const int kUnlimitedMaxInlinedSize = 1000;
- static const int kUnlimitedMaxSourceSize = 600;
+ static const int kUnlimitedMaxInlinedSourceSize = 100000;
+ static const int kUnlimitedMaxInlinedNodes = 10000;
+ static const int kUnlimitedMaxInlinedNodesCumulative = 10000;
// Simple accessors.
void set_function_state(FunctionState* state) { function_state_ = state; }
@@ -896,11 +929,6 @@ class HGraphBuilder: public AstVisitor {
INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_DECLARATION)
#undef INLINE_FUNCTION_GENERATOR_DECLARATION
- void HandleDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function,
- int* global_count);
-
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
void VisitTypeof(UnaryOperation* expr);
@@ -994,11 +1022,13 @@ class HGraphBuilder: public AstVisitor {
LookupResult* lookup,
bool is_store);
+ void EnsureArgumentsArePushedForAccess();
bool TryArgumentsAccess(Property* expr);
// Try to optimize fun.apply(receiver, arguments) pattern.
bool TryCallApply(Call* expr);
+ int InliningAstSize(Handle<JSFunction> target);
bool TryInline(CallKind call_kind,
Handle<JSFunction> target,
ZoneList<Expression*>* arguments,
@@ -1029,6 +1059,10 @@ class HGraphBuilder: public AstVisitor {
void HandlePropertyAssignment(Assignment* expr);
void HandleCompoundAssignment(Assignment* expr);
+ void HandlePolymorphicLoadNamedField(Property* expr,
+ HValue* object,
+ SmallMapList* types,
+ Handle<String> name);
void HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
@@ -1076,6 +1110,7 @@ class HGraphBuilder: public AstVisitor {
HInstruction* BuildMonomorphicElementAccess(HValue* object,
HValue* key,
HValue* val,
+ HValue* dependency,
Handle<Map> map,
bool is_store);
HValue* HandlePolymorphicElementAccess(HValue* object,
@@ -1126,7 +1161,7 @@ class HGraphBuilder: public AstVisitor {
Handle<Map> receiver_map,
bool smi_and_map_check);
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
// The translation state of the currently-being-translated function.
FunctionState* function_state_;
@@ -1145,6 +1180,7 @@ class HGraphBuilder: public AstVisitor {
HBasicBlock* current_block_;
int inlined_count_;
+ ZoneList<Handle<Object> > globals_;
Zone* zone_;
@@ -1157,12 +1193,12 @@ class HGraphBuilder: public AstVisitor {
};
-Zone* AstContext::zone() { return owner_->zone(); }
+Zone* AstContext::zone() const { return owner_->zone(); }
class HValueMap: public ZoneObject {
public:
- HValueMap()
+ explicit HValueMap(Zone* zone)
: array_size_(0),
lists_size_(0),
count_(0),
@@ -1170,15 +1206,15 @@ class HValueMap: public ZoneObject {
array_(NULL),
lists_(NULL),
free_list_head_(kNil) {
- ResizeLists(kInitialSize);
- Resize(kInitialSize);
+ ResizeLists(kInitialSize, zone);
+ Resize(kInitialSize, zone);
}
void Kill(GVNFlagSet flags);
- void Add(HValue* value) {
+ void Add(HValue* value, Zone* zone) {
present_flags_.Add(value->gvn_flags());
- Insert(value);
+ Insert(value, zone);
}
HValue* Lookup(HValue* value) const;
@@ -1202,9 +1238,9 @@ class HValueMap: public ZoneObject {
HValueMap(Zone* zone, const HValueMap* other);
- void Resize(int new_size);
- void ResizeLists(int new_size);
- void Insert(HValue* value);
+ void Resize(int new_size, Zone* zone);
+ void ResizeLists(int new_size, Zone* zone);
+ void Insert(HValue* value, Zone* zone);
uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
int array_size_;
@@ -1219,6 +1255,31 @@ class HValueMap: public ZoneObject {
};
+class HSideEffectMap BASE_EMBEDDED {
+ public:
+ HSideEffectMap();
+ explicit HSideEffectMap(HSideEffectMap* other);
+ HSideEffectMap& operator= (const HSideEffectMap& other);
+
+ void Kill(GVNFlagSet flags);
+
+ void Store(GVNFlagSet flags, HInstruction* instr);
+
+ bool IsEmpty() const { return count_ == 0; }
+
+ inline HInstruction* operator[](int i) const {
+ ASSERT(0 <= i);
+ ASSERT(i < kNumberOfTrackedSideEffects);
+ return data_[i];
+ }
+ inline HInstruction* at(int i) const { return operator[](i); }
+
+ private:
+ int count_;
+ HInstruction* data_[kNumberOfTrackedSideEffects];
+};
+
+
class HStatistics: public Malloced {
public:
void Initialize(CompilationInfo* info);
@@ -1332,7 +1393,7 @@ class HTracer: public Malloced {
WriteChars(filename, "", 0, false);
}
- void TraceLiveRange(LiveRange* range, const char* type);
+ void TraceLiveRange(LiveRange* range, const char* type, Zone* zone);
void Trace(const char* name, HGraph* graph, LChunk* chunk);
void FlushToFile();
diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h
index 929b485ebf..4ead80b0ec 100644
--- a/deps/v8/src/ia32/assembler-ia32.h
+++ b/deps/v8/src/ia32/assembler-ia32.h
@@ -640,6 +640,9 @@ class Assembler : public AssemblerBase {
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
+ static const byte kJnzShortOpcode = kJccShortPrefix | not_zero;
+ static const byte kJzShortOpcode = kJccShortPrefix | zero;
+
// ---------------------------------------------------------------------------
// Code generation
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index a5d42cfbe4..be46ff216f 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -831,7 +831,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Copy all arguments from the array to the stack.
Label entry, loop;
- __ mov(eax, Operand(ebp, kIndexOffset));
+ __ mov(ecx, Operand(ebp, kIndexOffset));
__ jmp(&entry);
__ bind(&loop);
__ mov(edx, Operand(ebp, kArgumentsOffset)); // load arguments
@@ -848,16 +848,17 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ push(eax);
// Update the index on the stack and in register eax.
- __ mov(eax, Operand(ebp, kIndexOffset));
- __ add(eax, Immediate(1 << kSmiTagSize));
- __ mov(Operand(ebp, kIndexOffset), eax);
+ __ mov(ecx, Operand(ebp, kIndexOffset));
+ __ add(ecx, Immediate(1 << kSmiTagSize));
+ __ mov(Operand(ebp, kIndexOffset), ecx);
__ bind(&entry);
- __ cmp(eax, Operand(ebp, kLimitOffset));
+ __ cmp(ecx, Operand(ebp, kLimitOffset));
__ j(not_equal, &loop);
// Invoke the function.
Label call_proxy;
+ __ mov(eax, ecx);
ParameterCount actual(eax);
__ SmiUntag(eax);
__ mov(edi, Operand(ebp, kFunctionOffset));
@@ -899,7 +900,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1002,7 +1003,8 @@ static void AllocateJSArray(MacroAssembler* masm,
ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
- __ LoadInitialArrayMap(array_function, scratch, elements_array);
+ __ LoadInitialArrayMap(array_function, scratch,
+ elements_array, fill_with_hole);
// Allocate the JSArray object together with space for a FixedArray with the
// requested elements.
@@ -1273,11 +1275,11 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ jmp(&prepare_generic_code_call);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
__ mov(ebx, Operand(esp, 0));
__ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
__ LoadTransitionedArrayMapConditional(
- FAST_SMI_ONLY_ELEMENTS,
+ FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
edi,
eax,
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index 4faa6a4b24..df04b289b4 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -1681,6 +1681,11 @@ void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
}
+// Input:
+// edx: left operand (tagged)
+// eax: right operand (tagged)
+// Output:
+// eax: result (tagged)
void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
Label call_runtime;
ASSERT(operands_type_ == BinaryOpIC::INT32);
@@ -1690,31 +1695,37 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
case Token::ADD:
case Token::SUB:
case Token::MUL:
- case Token::DIV: {
+ case Token::DIV:
+ case Token::MOD: {
Label not_floats;
Label not_int32;
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
- switch (op_) {
- case Token::ADD: __ addsd(xmm0, xmm1); break;
- case Token::SUB: __ subsd(xmm0, xmm1); break;
- case Token::MUL: __ mulsd(xmm0, xmm1); break;
- case Token::DIV: __ divsd(xmm0, xmm1); break;
- default: UNREACHABLE();
- }
- // Check result type if it is currently Int32.
- if (result_type_ <= BinaryOpIC::INT32) {
- __ cvttsd2si(ecx, Operand(xmm0));
- __ cvtsi2sd(xmm2, ecx);
- __ ucomisd(xmm0, xmm2);
- __ j(not_zero, &not_int32);
- __ j(carry, &not_int32);
+ if (op_ == Token::MOD) {
+ GenerateRegisterArgsPush(masm);
+ __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
+ } else {
+ switch (op_) {
+ case Token::ADD: __ addsd(xmm0, xmm1); break;
+ case Token::SUB: __ subsd(xmm0, xmm1); break;
+ case Token::MUL: __ mulsd(xmm0, xmm1); break;
+ case Token::DIV: __ divsd(xmm0, xmm1); break;
+ default: UNREACHABLE();
+ }
+ // Check result type if it is currently Int32.
+ if (result_type_ <= BinaryOpIC::INT32) {
+ __ cvttsd2si(ecx, Operand(xmm0));
+ __ cvtsi2sd(xmm2, ecx);
+ __ ucomisd(xmm0, xmm2);
+ __ j(not_zero, &not_int32);
+ __ j(carry, &not_int32);
+ }
+ GenerateHeapResultAllocation(masm, &call_runtime);
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
+ __ ret(0);
}
- GenerateHeapResultAllocation(masm, &call_runtime);
- __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
- __ ret(0);
} else { // SSE2 not available, use FPU.
FloatingPointHelper::CheckFloatOperands(masm, &not_floats, ebx);
FloatingPointHelper::LoadFloatOperands(
@@ -1722,20 +1733,28 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
ecx,
FloatingPointHelper::ARGS_IN_REGISTERS);
FloatingPointHelper::CheckFloatOperandsAreInt32(masm, &not_int32);
- switch (op_) {
- case Token::ADD: __ faddp(1); break;
- case Token::SUB: __ fsubp(1); break;
- case Token::MUL: __ fmulp(1); break;
- case Token::DIV: __ fdivp(1); break;
- default: UNREACHABLE();
+ if (op_ == Token::MOD) {
+ // The operands are now on the FPU stack, but we don't need them.
+ __ fstp(0);
+ __ fstp(0);
+ GenerateRegisterArgsPush(masm);
+ __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
+ } else {
+ switch (op_) {
+ case Token::ADD: __ faddp(1); break;
+ case Token::SUB: __ fsubp(1); break;
+ case Token::MUL: __ fmulp(1); break;
+ case Token::DIV: __ fdivp(1); break;
+ default: UNREACHABLE();
+ }
+ Label after_alloc_failure;
+ GenerateHeapResultAllocation(masm, &after_alloc_failure);
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
+ __ ret(0);
+ __ bind(&after_alloc_failure);
+ __ fstp(0); // Pop FPU stack before calling runtime.
+ __ jmp(&call_runtime);
}
- Label after_alloc_failure;
- GenerateHeapResultAllocation(masm, &after_alloc_failure);
- __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
- __ ret(0);
- __ bind(&after_alloc_failure);
- __ ffree();
- __ jmp(&call_runtime);
}
__ bind(&not_floats);
@@ -1744,10 +1763,6 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
break;
}
- case Token::MOD: {
- // For MOD we go directly to runtime in the non-smi case.
- break;
- }
case Token::BIT_OR:
case Token::BIT_AND:
case Token::BIT_XOR:
@@ -1758,11 +1773,6 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
Label not_floats;
Label not_int32;
Label non_smi_result;
- /* {
- CpuFeatures::Scope use_sse2(SSE2);
- FloatingPointHelper::LoadSSE2Operands(masm, &not_floats);
- FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, &not_int32, ecx);
- }*/
FloatingPointHelper::LoadUnknownsAsIntegers(masm,
use_sse3_,
&not_floats);
@@ -1833,8 +1843,8 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
default: UNREACHABLE(); break;
}
- // If an allocation fails, or SHR or MOD hit a hard case,
- // use the runtime system to get the correct result.
+ // If an allocation fails, or SHR hits a hard case, use the runtime system to
+ // get the correct result.
__ bind(&call_runtime);
switch (op_) {
@@ -1855,8 +1865,6 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
__ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION);
break;
case Token::MOD:
- GenerateRegisterArgsPush(masm);
- __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION);
break;
case Token::BIT_OR:
__ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION);
@@ -1957,7 +1965,7 @@ void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ ret(0);
__ bind(&after_alloc_failure);
- __ ffree();
+ __ fstp(0); // Pop FPU stack before calling runtime.
__ jmp(&call_runtime);
}
@@ -2161,8 +2169,8 @@ void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ ret(0);
__ bind(&after_alloc_failure);
- __ ffree();
- __ jmp(&call_runtime);
+ __ fstp(0); // Pop FPU stack before calling runtime.
+ __ jmp(&call_runtime);
}
__ bind(&not_floats);
break;
@@ -3814,20 +3822,24 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
+ static const int kRegExpExecuteArguments = 9;
__ EnterApiExitFrame(kRegExpExecuteArguments);
- // Argument 8: Pass current isolate address.
- __ mov(Operand(esp, 7 * kPointerSize),
+ // Argument 9: Pass current isolate address.
+ __ mov(Operand(esp, 8 * kPointerSize),
Immediate(ExternalReference::isolate_address()));
- // Argument 7: Indicate that this is a direct call from JavaScript.
- __ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
+ // Argument 8: Indicate that this is a direct call from JavaScript.
+ __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
__ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
- __ mov(Operand(esp, 5 * kPointerSize), esi);
+ __ mov(Operand(esp, 6 * kPointerSize), esi);
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
// Argument 5: static offsets vector buffer.
__ mov(Operand(esp, 4 * kPointerSize),
@@ -3890,7 +3902,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ cmp(eax, NativeRegExpMacroAssembler::SUCCESS);
+ __ cmp(eax, 1);
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ j(equal, &success);
Label failure;
__ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
@@ -5006,11 +5020,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ j(not_equal, &not_outermost_js, Label::kNear);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
__ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
- Label cont;
- __ jmp(&cont, Label::kNear);
+ __ jmp(&invoke, Label::kNear);
__ bind(&not_outermost_js);
__ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
- __ bind(&cont);
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
@@ -6162,7 +6174,11 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ sub(ecx, edx);
__ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
Label not_original_string;
- __ j(not_equal, &not_original_string, Label::kNear);
+ // Shorter than original string's length: an actual substring.
+ __ j(below, &not_original_string, Label::kNear);
+ // Longer than original string's length or negative: unsafe arguments.
+ __ j(above, &runtime);
+ // Return original string.
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
@@ -7047,8 +7063,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
{ REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
{ REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
@@ -7320,9 +7336,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ CheckFastElements(edi, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
__ JumpIfSmi(eax, &smi_element);
- __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear);
+ __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7344,7 +7360,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ pop(edx);
__ jmp(&slow_elements);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
__ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
@@ -7357,15 +7373,15 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
OMIT_SMI_CHECK);
__ ret(0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
__ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
FixedArrayBase::kHeaderSize), eax);
__ ret(0);
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ push(edx);
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index ea61910322..eb6868729b 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -351,7 +351,7 @@ OS::MemCopyFunction CreateMemCopyFunction() {
#define __ ACCESS_MASM(masm)
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : value
@@ -372,7 +372,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- eax : value
@@ -397,9 +397,25 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// Allocate new FixedDoubleArray.
// edx: receiver
// edi: length of source FixedArray (smi-tagged)
- __ lea(esi, Operand(edi, times_4, FixedDoubleArray::kHeaderSize));
+ __ lea(esi, Operand(edi,
+ times_4,
+ FixedDoubleArray::kHeaderSize + kPointerSize));
__ AllocateInNewSpace(esi, eax, ebx, no_reg, &gc_required, TAG_OBJECT);
+ Label aligned, aligned_done;
+ __ test(eax, Immediate(kDoubleAlignmentMask - kHeapObjectTag));
+ __ j(zero, &aligned, Label::kNear);
+ __ mov(FieldOperand(eax, 0),
+ Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
+ __ add(eax, Immediate(kPointerSize));
+ __ jmp(&aligned_done);
+
+ __ bind(&aligned);
+ __ mov(Operand(eax, esi, times_1, -kPointerSize-1),
+ Immediate(masm->isolate()->factory()->one_pointer_filler_map()));
+
+ __ bind(&aligned_done);
+
// eax: destination FixedDoubleArray
// edi: number of elements
// edx: receiver
diff --git a/deps/v8/src/ia32/debug-ia32.cc b/deps/v8/src/ia32/debug-ia32.cc
index d13fa759ca..d153e18ee9 100644
--- a/deps/v8/src/ia32/debug-ia32.cc
+++ b/deps/v8/src/ia32/debug-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -91,9 +91,11 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() {
rinfo()->PatchCode(original_rinfo()->pc(), Assembler::kDebugBreakSlotLength);
}
+// All debug break stubs support padding for LiveEdit.
+const bool Debug::FramePaddingLayout::kIsSupported = true;
-#define __ ACCESS_MASM(masm)
+#define __ ACCESS_MASM(masm)
static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
RegList object_regs,
@@ -103,6 +105,13 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Load padding words on stack.
+ for (int i = 0; i < Debug::FramePaddingLayout::kInitialSize; i++) {
+ __ push(Immediate(Smi::FromInt(
+ Debug::FramePaddingLayout::kPaddingValue)));
+ }
+ __ push(Immediate(Smi::FromInt(Debug::FramePaddingLayout::kInitialSize)));
+
// Store the registers containing live values on the expression stack to
// make sure that these are correctly updated during GC. Non object values
// are stored as a smi causing it to be untouched by GC.
@@ -134,6 +143,10 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
CEntryStub ceb(1);
__ CallStub(&ceb);
+ // Automatically find register that could be used after register restore.
+ // We need one register for padding skip instructions.
+ Register unused_reg = { -1 };
+
// Restore the register values containing object pointers from the
// expression stack.
for (int i = kNumJSCallerSaved; --i >= 0;) {
@@ -142,15 +155,29 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
if (FLAG_debug_code) {
__ Set(reg, Immediate(kDebugZapValue));
}
+ bool taken = reg.code() == esi.code();
if ((object_regs & (1 << r)) != 0) {
__ pop(reg);
+ taken = true;
}
if ((non_object_regs & (1 << r)) != 0) {
__ pop(reg);
__ SmiUntag(reg);
+ taken = true;
+ }
+ if (!taken) {
+ unused_reg = reg;
}
}
+ ASSERT(unused_reg.code() != -1);
+
+ // Read current padding counter and skip corresponding number of words.
+ __ pop(unused_reg);
+ // We divide stored value by 2 (untagging) and multiply it by word's size.
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiShiftSize == 0);
+ __ lea(esp, Operand(esp, unused_reg, times_half_pointer_size, 0));
+
// Get rid of the internal frame.
}
@@ -172,10 +199,10 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
// Register state for IC load call (from ic-ia32.cc).
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, eax.bit() | ecx.bit(), 0, false);
+ Generate_DebugBreakCallHelper(masm, ecx.bit() | edx.bit(), 0, false);
}
@@ -194,10 +221,10 @@ void Debug::GenerateStoreICDebugBreak(MacroAssembler* masm) {
void Debug::GenerateKeyedLoadICDebugBreak(MacroAssembler* masm) {
// Register state for keyed IC load call (from ic-ia32.cc).
// ----------- S t a t e -------------
+ // -- ecx : key
// -- edx : receiver
- // -- eax : key
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, eax.bit() | edx.bit(), 0, false);
+ Generate_DebugBreakCallHelper(masm, ecx.bit() | edx.bit(), 0, false);
}
diff --git a/deps/v8/src/ia32/deoptimizer-ia32.cc b/deps/v8/src/ia32/deoptimizer-ia32.cc
index 6de2c81b9d..326207fbc6 100644
--- a/deps/v8/src/ia32/deoptimizer-ia32.cc
+++ b/deps/v8/src/ia32/deoptimizer-ia32.cc
@@ -239,13 +239,13 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// ok:
if (FLAG_count_based_interrupts) {
- ASSERT_EQ(*(call_target_address - 3), kJnsInstruction);
- ASSERT_EQ(*(call_target_address - 2), kJnsOffset);
+ ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
} else {
- ASSERT_EQ(*(call_target_address - 3), kJaeInstruction);
- ASSERT_EQ(*(call_target_address - 2), kJaeOffset);
+ ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
}
- ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
*(call_target_address - 3) = kNopByteOne;
*(call_target_address - 2) = kNopByteTwo;
Assembler::set_target_address_at(call_target_address,
@@ -266,9 +266,9 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
- ASSERT_EQ(*(call_target_address - 3), kNopByteOne);
- ASSERT_EQ(*(call_target_address - 2), kNopByteTwo);
- ASSERT_EQ(*(call_target_address - 1), kCallInstruction);
+ ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
+ ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
if (FLAG_count_based_interrupts) {
*(call_target_address - 3) = kJnsInstruction;
*(call_target_address - 2) = kJnsOffset;
@@ -351,10 +351,12 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
reinterpret_cast<intptr_t>(function_));
function_->PrintName();
- PrintF(" => node=%u, frame=%d->%d]\n",
+ PrintF(" => node=%u, frame=%d->%d, ebp:esp=0x%08x:0x%08x]\n",
ast_id,
input_frame_size,
- output_frame_size);
+ output_frame_size,
+ input_->GetRegister(ebp.code()),
+ input_->GetRegister(esp.code()));
}
// There's only one output frame in the OSR case.
@@ -404,7 +406,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
name = "function";
break;
}
- PrintF(" [esp + %d] <- 0x%08x ; [esp + %d] (fixed part - %s)\n",
+ PrintF(" [sp + %d] <- 0x%08x ; [sp + %d] (fixed part - %s)\n",
output_offset,
input_value,
input_offset,
@@ -415,6 +417,24 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_offset -= kPointerSize;
}
+ // All OSR stack frames are dynamically aligned to an 8-byte boundary.
+ int frame_pointer = input_->GetRegister(ebp.code());
+ if ((frame_pointer & kPointerSize) != 0) {
+ frame_pointer -= kPointerSize;
+ has_alignment_padding_ = 1;
+ }
+
+ int32_t alignment_state = (has_alignment_padding_ == 1) ?
+ kAlignmentPaddingPushed :
+ kNoAlignmentPadding;
+ if (FLAG_trace_osr) {
+ PrintF(" [sp + %d] <- 0x%08x ; (alignment state)\n",
+ output_offset,
+ alignment_state);
+ }
+ output_[0]->SetFrameSlot(output_offset, alignment_state);
+ output_offset -= kPointerSize;
+
// Translate the rest of the frame.
while (ok && input_offset >= 0) {
ok = DoOsrTranslateCommand(&iterator, &input_offset);
@@ -427,7 +447,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
} else {
// Set up the frame pointer and the context pointer.
- output_[0]->SetRegister(ebp.code(), input_->GetRegister(ebp.code()));
+ output_[0]->SetRegister(ebp.code(), frame_pointer);
output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code()));
unsigned pc_offset = data->OsrPcOffset()->value();
@@ -688,24 +708,38 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
ASSERT(output_[frame_index] == NULL);
output_[frame_index] = output_frame;
+ // Compute the incoming parameter translation.
+ int parameter_count = function->shared()->formal_parameter_count() + 1;
+ unsigned output_offset = output_frame_size;
+ unsigned input_offset = input_frame_size;
+
+ unsigned alignment_state_offset =
+ input_offset - parameter_count * kPointerSize -
+ StandardFrameConstants::kFixedFrameSize -
+ kPointerSize;
+ ASSERT(JavaScriptFrameConstants::kDynamicAlignmentStateOffset ==
+ JavaScriptFrameConstants::kLocal0Offset);
+
// The top address for the bottommost output frame can be computed from
// the input frame pointer and the output frame's height. For all
// subsequent output frames, it can be computed from the previous one's
// top address and the current frame's size.
uint32_t top_address;
if (is_bottommost) {
+ int32_t alignment_state = input_->GetFrameSlot(alignment_state_offset);
+ has_alignment_padding_ =
+ (alignment_state == kAlignmentPaddingPushed) ? 1 : 0;
// 2 = context and function in the frame.
- top_address =
- input_->GetRegister(ebp.code()) - (2 * kPointerSize) - height_in_bytes;
+ // If the optimized frame had alignment padding, adjust the frame pointer
+ // to point to the new position of the old frame pointer after padding
+ // is removed. Subtract 2 * kPointerSize for the context and function slots.
+ top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) -
+ height_in_bytes + has_alignment_padding_ * kPointerSize;
} else {
top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
}
output_frame->SetTop(top_address);
- // Compute the incoming parameter translation.
- int parameter_count = function->shared()->formal_parameter_count() + 1;
- unsigned output_offset = output_frame_size;
- unsigned input_offset = input_frame_size;
for (int i = 0; i < parameter_count; ++i) {
output_offset -= kPointerSize;
DoTranslateCommand(iterator, frame_index, output_offset);
@@ -747,13 +781,17 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
}
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
- ASSERT(!is_bottommost || input_->GetRegister(ebp.code()) == fp_value);
+ ASSERT(!is_bottommost ||
+ (input_->GetRegister(ebp.code()) + has_alignment_padding_ * kPointerSize) ==
+ fp_value);
output_frame->SetFp(fp_value);
if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
fp_value, output_offset, value);
}
+ ASSERT(!is_bottommost || !has_alignment_padding_ ||
+ (fp_value & kPointerSize) != 0);
// For the bottommost output frame the context can be gotten from the input
// frame. For all subsequent output frames it can be gotten from the function
@@ -948,6 +986,28 @@ void Deoptimizer::EntryGenerator::Generate() {
}
__ pop(eax);
+ if (type() != OSR) {
+ // If frame was dynamically aligned, pop padding.
+ Label no_padding;
+ __ cmp(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
+ Immediate(0));
+ __ j(equal, &no_padding);
+ __ pop(ecx);
+ if (FLAG_debug_code) {
+ __ cmp(ecx, Immediate(kAlignmentZapValue));
+ __ Assert(equal, "alignment marker expected");
+ }
+ __ bind(&no_padding);
+ } else {
+ // If frame needs dynamic alignment push padding.
+ Label no_padding;
+ __ cmp(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
+ Immediate(0));
+ __ j(equal, &no_padding);
+ __ push(Immediate(kAlignmentZapValue));
+ __ bind(&no_padding);
+ }
+
// Replace the current frame with the output frames.
Label outer_push_loop, inner_push_loop;
// Outer loop state: eax = current FrameDescription**, edx = one past the
diff --git a/deps/v8/src/ia32/frames-ia32.h b/deps/v8/src/ia32/frames-ia32.h
index 9e51857bdb..18915e2e3c 100644
--- a/deps/v8/src/ia32/frames-ia32.h
+++ b/deps/v8/src/ia32/frames-ia32.h
@@ -53,6 +53,10 @@ typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
// Number of registers for which space is reserved in safepoints.
const int kNumSafepointRegisters = 8;
+const int kNoAlignmentPadding = 0;
+const int kAlignmentPaddingPushed = 2;
+const int kAlignmentZapValue = 0x12345678; // Not heap object tagged.
+
// ----------------------------------------------------
@@ -119,6 +123,8 @@ class JavaScriptFrameConstants : public AllStatic {
// Caller SP-relative.
static const int kParam0Offset = -2 * kPointerSize;
static const int kReceiverOffset = -1 * kPointerSize;
+
+ static const int kDynamicAlignmentStateOffset = kLocal0Offset;
};
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index cf16c5b6ea..5a513fd483 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -101,13 +101,6 @@ class JumpPatchSite BASE_EMBEDDED {
};
-// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
-int FullCodeGenerator::self_optimization_header_size() {
- UNREACHABLE();
- return 13;
-}
-
-
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
@@ -269,11 +262,11 @@ void FullCodeGenerator::Generate() {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- VariableProxy* proxy = scope()->function();
- ASSERT(proxy->var()->mode() == CONST ||
- proxy->var()->mode() == CONST_HARMONY);
- ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+ VariableDeclaration* function = scope()->function();
+ ASSERT(function->proxy()->var()->mode() == CONST ||
+ function->proxy()->var()->mode() == CONST_HARMONY);
+ ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+ VisitVariableDeclaration(function);
}
VisitDeclarations(scope()->declarations());
}
@@ -763,60 +756,51 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
}
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+ // The variable in the declaration always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
+ __ cmp(ebx, isolate()->factory()->with_context_map());
+ __ Check(not_equal, "Declaration in with context.");
+ __ cmp(ebx, isolate()->factory()->catch_context_map());
+ __ Check(not_equal, "Declaration in catch context.");
+ }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+ VariableDeclaration* declaration) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
+ VariableProxy* proxy = declaration->proxy();
+ VariableMode mode = declaration->mode();
Variable* variable = proxy->var();
- bool binding_needs_init = (function == NULL) &&
- (mode == CONST || mode == CONST_HARMONY || mode == LET);
+ bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++global_count_;
+ globals_->Add(variable->name(), zone());
+ globals_->Add(variable->binding_needs_init()
+ ? isolate()->factory()->the_hole_value()
+ : isolate()->factory()->undefined_value(), zone());
break;
case Variable::PARAMETER:
case Variable::LOCAL:
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ mov(StackOperand(variable), result_register());
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ mov(StackOperand(variable),
Immediate(isolate()->factory()->the_hole_value()));
}
break;
case Variable::CONTEXT:
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
- __ cmp(ebx, isolate()->factory()->with_context_map());
- __ Check(not_equal, "Declaration in with context.");
- __ cmp(ebx, isolate()->factory()->catch_context_map());
- __ Check(not_equal, "Declaration in catch context.");
- }
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ mov(ContextOperand(esi, variable->index()), result_register());
- // We know that we have written a function, which is not a smi.
- __ RecordWriteContextSlot(esi,
- Context::SlotOffset(variable->index()),
- result_register(),
- ecx,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
__ mov(ContextOperand(esi, variable->index()),
Immediate(isolate()->factory()->the_hole_value()));
// No write barrier since the hole value is in old space.
@@ -825,14 +809,12 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
break;
case Variable::LOOKUP: {
- Comment cmnt(masm_, "[ Declaration");
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ push(esi);
__ push(Immediate(variable->name()));
- // Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ // VariableDeclaration nodes are always introduced in one of four modes.
+ ASSERT(mode == VAR || mode == LET ||
+ mode == CONST || mode == CONST_HARMONY);
PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
? READ_ONLY : NONE;
__ push(Immediate(Smi::FromInt(attr)));
@@ -840,9 +822,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (function != NULL) {
- VisitForStackValue(function);
- } else if (binding_needs_init) {
+ if (hole_init) {
__ push(Immediate(isolate()->factory()->the_hole_value()));
} else {
__ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
@@ -854,6 +834,118 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
+void FullCodeGenerator::VisitFunctionDeclaration(
+ FunctionDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ globals_->Add(variable->name(), zone());
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(declaration->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) return SetStackOverflow();
+ globals_->Add(function, zone());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ VisitForAccumulatorValue(declaration->fun());
+ __ mov(StackOperand(variable), result_register());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ VisitForAccumulatorValue(declaration->fun());
+ __ mov(ContextOperand(esi, variable->index()), result_register());
+ // We know that we have written a function, which is not a smi.
+ __ RecordWriteContextSlot(esi,
+ Context::SlotOffset(variable->index()),
+ result_register(),
+ ecx,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+ break;
+ }
+
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ __ push(esi);
+ __ push(Immediate(variable->name()));
+ __ push(Immediate(Smi::FromInt(NONE)));
+ VisitForStackValue(declaration->fun());
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
+ }
+ }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ Handle<JSModule> instance = declaration->module()->interface()->Instance();
+ ASSERT(!instance.is_null());
+
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ __ mov(ContextOperand(esi, variable->index()), Immediate(instance));
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ // TODO(rossberg)
+ break;
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ImportDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ // TODO(rossberg)
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+ // TODO(rossberg)
+}
+
+
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(esi); // The context is the first argument.
@@ -1194,7 +1286,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
// All extension objects were empty and it is safe to use a global
// load IC call.
- __ mov(eax, GlobalObjectOperand());
+ __ mov(edx, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
@@ -1278,7 +1370,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in ecx and the global
// object in eax.
- __ mov(eax, GlobalObjectOperand());
+ __ mov(edx, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
@@ -1465,7 +1557,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
@@ -1557,7 +1649,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_constant_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1568,7 +1661,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
@@ -1580,10 +1673,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1611,9 +1703,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
- // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
- // transition and don't need to call the runtime stub.
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
+ // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
+ // cannot transition and don't need to call the runtime stub.
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ mov(ebx, Operand(esp, 0)); // Copy of array literal.
__ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
@@ -1672,9 +1764,9 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case NAMED_PROPERTY:
if (expr->is_compound()) {
- // We need the receiver both on the stack and in the accumulator.
- VisitForAccumulatorValue(property->obj());
- __ push(result_register());
+ // We need the receiver both on the stack and in edx.
+ VisitForStackValue(property->obj());
+ __ mov(edx, Operand(esp, 0));
} else {
VisitForStackValue(property->obj());
}
@@ -1682,9 +1774,9 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
case KEYED_PROPERTY: {
if (expr->is_compound()) {
VisitForStackValue(property->obj());
- VisitForAccumulatorValue(property->key());
- __ mov(edx, Operand(esp, 0));
- __ push(eax);
+ VisitForStackValue(property->key());
+ __ mov(edx, Operand(esp, kPointerSize)); // Object.
+ __ mov(ecx, Operand(esp, 0)); // Key.
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
@@ -1927,7 +2019,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(ecx, eax);
- __ pop(edx);
+ __ pop(edx); // Receiver.
__ pop(eax); // Restore value.
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
@@ -2033,6 +2125,9 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a named store IC.
+ // eax : value
+ // esp[0] : receiver
+
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
@@ -2075,6 +2170,9 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Assignment to a property, using a keyed store IC.
+ // eax : value
+ // esp[0] : key
+ // esp[kPointerSize] : receiver
// If the assignment starts a block of assignments to the same object,
// change to slow case to avoid the quadratic behavior of repeatedly
@@ -2087,7 +2185,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ pop(result_register());
}
- __ pop(ecx);
+ __ pop(ecx); // Key.
if (expr->ends_initialization_block()) {
__ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
} else {
@@ -2120,12 +2218,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
if (key->IsPropertyName()) {
VisitForAccumulatorValue(expr->obj());
+ __ mov(edx, result_register());
EmitNamedPropertyLoad(expr);
context()->Plug(eax);
} else {
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
- __ pop(edx);
+ __ pop(edx); // Object.
+ __ mov(ecx, result_register()); // Key.
EmitKeyedPropertyLoad(expr);
context()->Plug(eax);
}
@@ -3924,15 +4024,16 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(Immediate(Smi::FromInt(0)));
}
if (assign_type == NAMED_PROPERTY) {
- // Put the object both on the stack and in the accumulator.
+ // Put the object both on the stack and in edx.
VisitForAccumulatorValue(prop->obj());
__ push(eax);
+ __ mov(edx, eax);
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(edx, Operand(esp, 0));
- __ push(eax);
+ VisitForStackValue(prop->key());
+ __ mov(edx, Operand(esp, kPointerSize)); // Object.
+ __ mov(ecx, Operand(esp, 0)); // Key.
EmitKeyedPropertyLoad(prop);
}
}
@@ -4079,7 +4180,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
- __ mov(eax, GlobalObjectOperand());
+ __ mov(edx, GlobalObjectOperand());
__ mov(ecx, Immediate(proxy->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
@@ -4344,7 +4445,8 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
- if (declaration_scope->is_global_scope()) {
+ if (declaration_scope->is_global_scope() ||
+ declaration_scope->is_module_scope()) {
// Contexts nested in the global context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
@@ -4374,14 +4476,49 @@ void FullCodeGenerator::EnterFinallyBlock() {
STATIC_ASSERT(kSmiTag == 0);
__ SmiTag(edx);
__ push(edx);
+
// Store result register while executing finally block.
__ push(result_register());
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(edx, Operand::StaticVariable(pending_message_obj));
+ __ push(edx);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(edx, Operand::StaticVariable(has_pending_message));
+ __ push(edx);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(edx, Operand::StaticVariable(pending_message_script));
+ __ push(edx);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(edx));
+ // Restore pending message from stack.
+ __ pop(edx);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ mov(Operand::StaticVariable(pending_message_script), edx);
+
+ __ pop(edx);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ mov(Operand::StaticVariable(has_pending_message), edx);
+
+ __ pop(edx);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ mov(Operand::StaticVariable(pending_message_obj), edx);
+
+ // Restore result register from stack.
__ pop(result_register());
+
// Uncook return address.
__ pop(edx);
__ SmiUntag(edx);
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index eac2739520..a091ff1aa6 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -218,13 +218,13 @@ static void GenerateDictionaryStore(MacroAssembler* masm,
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- StubCompiler::GenerateLoadArrayLength(masm, eax, edx, &miss);
+ StubCompiler::GenerateLoadArrayLength(masm, edx, eax, &miss);
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
}
@@ -233,13 +233,13 @@ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
void LoadIC::GenerateStringLength(MacroAssembler* masm,
bool support_wrappers) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- StubCompiler::GenerateLoadStringLength(masm, eax, edx, ebx, &miss,
+ StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss,
support_wrappers);
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
@@ -248,13 +248,13 @@ void LoadIC::GenerateStringLength(MacroAssembler* masm,
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- StubCompiler::GenerateLoadFunctionPrototype(masm, eax, edx, ebx, &miss);
+ StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
}
@@ -443,7 +443,7 @@ static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -451,39 +451,34 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
Label probe_dictionary, check_number_dictionary;
// Check that the key is a smi.
- __ JumpIfNotSmi(eax, &check_string);
+ __ JumpIfNotSmi(ecx, &check_string);
__ bind(&index_smi);
// Now the key is known to be a smi. This place is also jumped to from
// where a numeric string is converted to a smi.
GenerateKeyedLoadReceiverCheck(
- masm, edx, ecx, Map::kHasIndexedInterceptor, &slow);
+ masm, edx, eax, Map::kHasIndexedInterceptor, &slow);
// Check the receiver's map to see if it has fast elements.
- __ CheckFastElements(ecx, &check_number_dictionary);
-
- GenerateFastArrayLoad(masm,
- edx,
- eax,
- ecx,
- eax,
- NULL,
- &slow);
+ __ CheckFastElements(eax, &check_number_dictionary);
+
+ GenerateFastArrayLoad(masm, edx, ecx, eax, eax, NULL, &slow);
Isolate* isolate = masm->isolate();
Counters* counters = isolate->counters();
__ IncrementCounter(counters->keyed_load_generic_smi(), 1);
__ ret(0);
+
__ bind(&check_number_dictionary);
- __ mov(ebx, eax);
+ __ mov(ebx, ecx);
__ SmiUntag(ebx);
- __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
// Check whether the elements is a number dictionary.
// edx: receiver
// ebx: untagged index
- // eax: key
- // ecx: elements
- __ CheckMap(ecx,
+ // ecx: key
+ // eax: elements
+ __ CheckMap(eax,
isolate->factory()->hash_table_map(),
&slow,
DONT_DO_SMI_CHECK);
@@ -491,13 +486,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Push receiver on the stack to free up a register for the dictionary
// probing.
__ push(edx);
- __ LoadFromNumberDictionary(&slow_pop_receiver,
- ecx,
- eax,
- ebx,
- edx,
- edi,
- eax);
+ __ LoadFromNumberDictionary(&slow_pop_receiver, eax, ecx, ebx, edx, edi, eax);
// Pop receiver before returning.
__ pop(edx);
__ ret(0);
@@ -509,15 +498,15 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&slow);
// Slow case: jump to runtime.
// edx: receiver
- // eax: key
+ // ecx: key
__ IncrementCounter(counters->keyed_load_generic_slow(), 1);
GenerateRuntimeGetProperty(masm);
__ bind(&check_string);
- GenerateKeyStringCheck(masm, eax, ecx, ebx, &index_string, &slow);
+ GenerateKeyStringCheck(masm, ecx, eax, ebx, &index_string, &slow);
GenerateKeyedLoadReceiverCheck(
- masm, edx, ecx, Map::kHasNamedInterceptor, &slow);
+ masm, edx, eax, Map::kHasNamedInterceptor, &slow);
// If the receiver is a fast-case object, check the keyed lookup
// cache. Otherwise probe the dictionary.
@@ -526,15 +515,18 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
Immediate(isolate->factory()->hash_table_map()));
__ j(equal, &probe_dictionary);
- // Load the map of the receiver, compute the keyed lookup cache hash
+ // The receiver's map is still in eax, compute the keyed lookup cache hash
// based on 32 bits of the map pointer and the string hash.
- __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
- __ mov(ecx, ebx);
- __ shr(ecx, KeyedLookupCache::kMapHashShift);
- __ mov(edi, FieldOperand(eax, String::kHashFieldOffset));
+ if (FLAG_debug_code) {
+ __ cmp(eax, FieldOperand(edx, HeapObject::kMapOffset));
+ __ Check(equal, "Map is no longer in eax.");
+ }
+ __ mov(ebx, eax); // Keep the map around for later.
+ __ shr(eax, KeyedLookupCache::kMapHashShift);
+ __ mov(edi, FieldOperand(ecx, String::kHashFieldOffset));
__ shr(edi, String::kHashShift);
- __ xor_(ecx, edi);
- __ and_(ecx, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
+ __ xor_(eax, edi);
+ __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
// Load the key (consisting of map and symbol) from the cache and
// check for match.
@@ -546,7 +538,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
Label try_next_entry;
- __ mov(edi, ecx);
+ __ mov(edi, eax);
__ shl(edi, kPointerSizeLog2 + 1);
if (i != 0) {
__ add(edi, Immediate(kPointerSize * i * 2));
@@ -554,25 +546,25 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &try_next_entry);
__ add(edi, Immediate(kPointerSize));
- __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
+ __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(equal, &hit_on_nth_entry[i]);
__ bind(&try_next_entry);
}
- __ lea(edi, Operand(ecx, 1));
+ __ lea(edi, Operand(eax, 1));
__ shl(edi, kPointerSizeLog2 + 1);
__ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
__ add(edi, Immediate(kPointerSize));
- __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
+ __ cmp(ecx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
// Get field offset.
// edx : receiver
// ebx : receiver's map
- // eax : key
- // ecx : lookup cache index
+ // ecx : key
+ // eax : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
@@ -580,12 +572,12 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
__ bind(&hit_on_nth_entry[i]);
if (i != 0) {
- __ add(ecx, Immediate(i));
+ __ add(eax, Immediate(i));
}
__ mov(edi,
- Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
- __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
- __ sub(edi, ecx);
+ Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
+ __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
+ __ sub(edi, eax);
__ j(above_equal, &property_array_property);
if (i != 0) {
__ jmp(&load_in_object_property);
@@ -594,9 +586,9 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Load in-object property.
__ bind(&load_in_object_property);
- __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
- __ add(ecx, edi);
- __ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
+ __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset));
+ __ add(eax, edi);
+ __ mov(eax, FieldOperand(edx, eax, times_pointer_size, 0));
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
@@ -612,16 +604,16 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// exists.
__ bind(&probe_dictionary);
- __ mov(ecx, FieldOperand(edx, JSObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- GenerateGlobalInstanceTypeCheck(masm, ecx, &slow);
+ __ mov(eax, FieldOperand(edx, JSObject::kMapOffset));
+ __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
+ GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
- GenerateDictionaryLoad(masm, &slow, ebx, eax, ecx, edi, eax);
+ GenerateDictionaryLoad(masm, &slow, ebx, ecx, eax, edi, eax);
__ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
__ ret(0);
__ bind(&index_string);
- __ IndexFromHash(ebx, eax);
+ __ IndexFromHash(ebx, ecx);
// Now jump to the place where smi keys are handled.
__ jmp(&index_smi);
}
@@ -629,15 +621,15 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key (index)
+ // -- ecx : key (index)
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
Register receiver = edx;
- Register index = eax;
- Register scratch = ecx;
+ Register index = ecx;
+ Register scratch = ebx;
Register result = eax;
StringCharAtGenerator char_at_generator(receiver,
@@ -661,7 +653,7 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -671,24 +663,24 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
__ JumpIfSmi(edx, &slow);
// Check that the key is an array index, that is Uint32.
- __ test(eax, Immediate(kSmiTagMask | kSmiSignMask));
+ __ test(ecx, Immediate(kSmiTagMask | kSmiSignMask));
__ j(not_zero, &slow);
// Get the map of the receiver.
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ mov(eax, FieldOperand(edx, HeapObject::kMapOffset));
// Check that it has indexed interceptor and access checks
// are not enabled for this object.
- __ movzx_b(ecx, FieldOperand(ecx, Map::kBitFieldOffset));
- __ and_(ecx, Immediate(kSlowCaseBitFieldMask));
- __ cmp(ecx, Immediate(1 << Map::kHasIndexedInterceptor));
+ __ movzx_b(eax, FieldOperand(eax, Map::kBitFieldOffset));
+ __ and_(eax, Immediate(kSlowCaseBitFieldMask));
+ __ cmp(eax, Immediate(1 << Map::kHasIndexedInterceptor));
__ j(not_zero, &slow);
// Everything is fine, call runtime.
- __ pop(ecx);
+ __ pop(eax);
__ push(edx); // receiver
- __ push(eax); // key
- __ push(ecx); // return address
+ __ push(ecx); // key
+ __ push(eax); // return address
// Perform tail call to the entry.
ExternalReference ref =
@@ -703,20 +695,20 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label slow, notin;
Factory* factory = masm->isolate()->factory();
Operand mapped_location =
- GenerateMappedArgumentsLookup(masm, edx, eax, ebx, ecx, &notin, &slow);
+ GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
__ mov(eax, mapped_location);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in ebx.
Operand unmapped_location =
- GenerateUnmappedArgumentsLookup(masm, eax, ebx, ecx, &slow);
+ GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
__ cmp(unmapped_location, factory->the_hole_value());
__ j(equal, &slow);
__ mov(eax, unmapped_location);
@@ -897,25 +889,25 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
&non_double_value,
DONT_DO_SMI_CHECK);
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
+ // and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
ebx,
edi,
&slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
ebx,
edi,
&slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@@ -1308,15 +1300,15 @@ void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
// Probe the stub cache.
Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
- Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, eax, ecx, ebx,
- edx);
+ Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
+ eax);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1325,17 +1317,17 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
void LoadIC::GenerateNormal(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- GenerateStringDictionaryReceiverCheck(masm, eax, edx, ebx, &miss);
+ GenerateStringDictionaryReceiverCheck(masm, edx, eax, ebx, &miss);
- // edx: elements
+ // eax: elements
// Search the dictionary placing the result in eax.
- GenerateDictionaryLoad(masm, &miss, edx, ecx, edi, ebx, eax);
+ GenerateDictionaryLoad(masm, &miss, eax, ecx, edi, ebx, eax);
__ ret(0);
// Cache miss: Jump to runtime.
@@ -1346,15 +1338,15 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) {
void LoadIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
__ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
__ pop(ebx);
- __ push(eax); // receiver
+ __ push(edx); // receiver
__ push(ecx); // name
__ push(ebx); // return address
@@ -1367,7 +1359,7 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -1376,7 +1368,7 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
__ pop(ebx);
__ push(edx); // receiver
- __ push(eax); // name
+ __ push(ecx); // name
__ push(ebx); // return address
// Perform tail call to the entry.
@@ -1390,14 +1382,14 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
__ pop(ebx);
__ push(edx); // receiver
- __ push(eax); // name
+ __ push(ecx); // name
__ push(ebx); // return address
// Perform tail call to the entry.
@@ -1630,7 +1622,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ mov(eax, edx);
__ Ret();
__ bind(&fail);
@@ -1735,12 +1727,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1761,14 +1753,18 @@ void PatchInlinedSmiCode(Address address) {
address, test_instruction_address, delta);
}
- // Patch with a short conditional jump. There must be a
- // short jump-if-carry/not-carry at this position.
+ // Patch with a short conditional jump. Enabling means switching from a short
+ // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
+ // reverse operation of that.
Address jmp_address = test_instruction_address - delta;
- ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
- *jmp_address == Assembler::kJcShortOpcode);
- Condition cc = *jmp_address == Assembler::kJncShortOpcode
- ? not_zero
- : zero;
+ ASSERT((check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ||
+ *jmp_address == Assembler::kJcShortOpcode)
+ : (*jmp_address == Assembler::kJnzShortOpcode ||
+ *jmp_address == Assembler::kJzShortOpcode));
+ Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
+ : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
}
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 8fb4c79196..7fd64ca9c3 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -79,6 +79,10 @@ bool LCodeGen::GenerateCode() {
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::MANUAL);
+ dynamic_frame_alignment_ = (chunk()->num_double_slots() > 2 &&
+ !chunk()->graph()->is_recursive()) ||
+ info()->osr_ast_id() != AstNode::kNoNumber;
+
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
@@ -153,14 +157,52 @@ bool LCodeGen::GeneratePrologue() {
__ bind(&ok);
}
+
+ if (dynamic_frame_alignment_) {
+ // Move state of dynamic frame alignment into edx.
+ __ mov(edx, Immediate(kNoAlignmentPadding));
+
+ Label do_not_pad, align_loop;
+ STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
+ // Align esp + 4 to a multiple of 2 * kPointerSize.
+ __ test(esp, Immediate(kPointerSize));
+ __ j(not_zero, &do_not_pad, Label::kNear);
+ __ push(Immediate(0));
+ __ mov(ebx, esp);
+ __ mov(edx, Immediate(kAlignmentPaddingPushed));
+ // Copy arguments, receiver, and return address.
+ __ mov(ecx, Immediate(scope()->num_parameters() + 2));
+
+ __ bind(&align_loop);
+ __ mov(eax, Operand(ebx, 1 * kPointerSize));
+ __ mov(Operand(ebx, 0), eax);
+ __ add(Operand(ebx), Immediate(kPointerSize));
+ __ dec(ecx);
+ __ j(not_zero, &align_loop, Label::kNear);
+ __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
+ __ bind(&do_not_pad);
+ }
+
__ push(ebp); // Caller's frame pointer.
__ mov(ebp, esp);
__ push(esi); // Callee's context.
__ push(edi); // Callee's JS function.
+ if (dynamic_frame_alignment_ && FLAG_debug_code) {
+ __ test(esp, Immediate(kPointerSize));
+ __ Assert(zero, "frame is expected to be aligned");
+ }
+
// Reserve space for the stack slots needed by the code.
int slots = GetStackSlotCount();
- if (slots > 0) {
+ ASSERT_GE(slots, 1);
+ if (slots == 1) {
+ if (dynamic_frame_alignment_) {
+ __ push(edx);
+ } else {
+ __ push(Immediate(kNoAlignmentPadding));
+ }
+ } else {
if (FLAG_debug_code) {
__ mov(Operand(eax), Immediate(slots));
Label loop;
@@ -170,7 +212,7 @@ bool LCodeGen::GeneratePrologue() {
__ j(not_zero, &loop);
} else {
__ sub(Operand(esp), Immediate(slots * kPointerSize));
-#ifdef _MSC_VER
+ #ifdef _MSC_VER
// On windows, you may not access the stack more than one page below
// the most recently mapped page. To make the allocated area randomly
// accessible, we write to each page in turn (the value is irrelevant).
@@ -180,7 +222,18 @@ bool LCodeGen::GeneratePrologue() {
offset -= kPageSize) {
__ mov(Operand(esp, offset), eax);
}
-#endif
+ #endif
+ }
+
+ // Store dynamic frame alignment state in the first local.
+ if (dynamic_frame_alignment_) {
+ __ mov(Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset),
+ edx);
+ } else {
+ __ mov(Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset),
+ Immediate(kNoAlignmentPadding));
}
}
@@ -536,14 +589,15 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
+ Translation translation(&translations_, frame_count, jsframe_count,
+ zone());
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -565,19 +619,22 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
__ push(eax);
__ push(ebx);
__ mov(ebx, shared);
- __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
+ __ mov(eax,
+ FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset));
__ sub(Operand(eax), Immediate(Smi::FromInt(1)));
__ j(not_zero, &no_deopt, Label::kNear);
if (FLAG_trap_on_deopt) __ int3();
__ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
+ eax);
__ pop(ebx);
__ pop(eax);
__ popfd();
__ jmp(entry, RelocInfo::RUNTIME_ENTRY);
__ bind(&no_deopt);
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
+ __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
+ eax);
__ pop(ebx);
__ pop(eax);
__ popfd();
@@ -638,7 +695,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -683,9 +740,9 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
}
@@ -698,7 +755,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, mode);
}
@@ -1984,7 +2041,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->InputAt(1));
@@ -2059,8 +2116,9 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- ASSERT(instr->HasDeoptimizationEnvironment());
- LEnvironment* env = instr->deoptimization_environment();
+ // Get the deoptimization index of the LLazyBailout-environment that
+ // corresponds to this instruction.
+ LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the eax slot and restore all registers.
@@ -2096,8 +2154,25 @@ void LCodeGen::DoReturn(LReturn* instr) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (dynamic_frame_alignment_) {
+ // Fetch the state of the dynamic frame alignment.
+ __ mov(edx, Operand(ebp,
+ JavaScriptFrameConstants::kDynamicAlignmentStateOffset));
+ }
__ mov(esp, ebp);
__ pop(ebp);
+ if (dynamic_frame_alignment_) {
+ Label no_padding;
+ __ cmp(edx, Immediate(kNoAlignmentPadding));
+ __ j(equal, &no_padding);
+ if (FLAG_debug_code) {
+ __ cmp(Operand(esp, (GetParameterCount() + 2) * kPointerSize),
+ Immediate(kAlignmentZapValue));
+ __ Assert(equal, "expected alignment marker");
+ }
+ __ Ret((GetParameterCount() + 2) * kPointerSize, ecx);
+ __ bind(&no_padding);
+ }
__ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
}
@@ -2114,7 +2189,7 @@ void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
- ASSERT(ToRegister(instr->global_object()).is(eax));
+ ASSERT(ToRegister(instr->global_object()).is(edx));
ASSERT(ToRegister(instr->result()).is(eax));
__ mov(ecx, instr->name());
@@ -2227,12 +2302,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsFound() && lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2244,9 +2319,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ HeapObject* current = HeapObject::cast((*type)->prototype());
+ Heap* heap = type->GetHeap();
+ while (current != heap->null_value()) {
+ Handle<HeapObject> link(current);
+ __ LoadHeapObject(result, link);
+ __ cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Handle<Map>(JSObject::cast(current)->map()));
+ DeoptimizeIf(not_equal, env);
+ current = HeapObject::cast(current->map()->prototype());
+ }
+ __ mov(result, factory()->undefined_value());
}
}
@@ -2268,51 +2357,72 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
}
+// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
+// prototype chain, which causes unbounded code generation.
+static bool CompactEmit(
+ SmallMapList* list, Handle<String> name, int i, Isolate* isolate) {
+ LookupResult lookup(isolate);
+ Handle<Map> map = list->at(i);
+ map->LookupInDescriptors(NULL, *name, &lookup);
+ return lookup.IsFound() &&
+ (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION);
+}
+
+
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
int map_count = instr->hydrogen()->types()->length();
+ bool need_generic = instr->hydrogen()->need_generic();
+
+ if (map_count == 0 && !need_generic) {
+ DeoptimizeIf(no_condition, instr->environment());
+ return;
+ }
Handle<String> name = instr->hydrogen()->name();
- if (map_count == 0) {
- ASSERT(instr->hydrogen()->need_generic());
- __ mov(ecx, name);
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- Label done;
- for (int i = 0; i < map_count - 1; ++i) {
- Handle<Map> map = instr->hydrogen()->types()->at(i);
- Label next;
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
- __ j(not_equal, &next, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
- __ bind(&next);
+ Label done;
+ bool all_are_compact = true;
+ for (int i = 0; i < map_count; ++i) {
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
+ all_are_compact = false;
+ break;
}
- Handle<Map> map = instr->hydrogen()->types()->last();
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
- if (instr->hydrogen()->need_generic()) {
- Label generic;
- __ j(not_equal, &generic, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
- __ bind(&generic);
- __ mov(ecx, name);
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
+ }
+ for (int i = 0; i < map_count; ++i) {
+ bool last = (i == map_count - 1);
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
+ if (last && !need_generic) {
DeoptimizeIf(not_equal, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ } else {
+ Label next;
+ bool compact = all_are_compact ? true :
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
+ __ bind(&next);
}
- __ bind(&done);
}
+ if (need_generic) {
+ __ mov(ecx, name);
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ }
+ __ bind(&done);
}
void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
- ASSERT(ToRegister(instr->object()).is(eax));
+ ASSERT(ToRegister(instr->object()).is(edx));
ASSERT(ToRegister(instr->result()).is(eax));
__ mov(ecx, instr->name());
@@ -2381,8 +2491,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ movzx_b(temp, FieldOperand(temp, Map::kBitField2Offset));
__ and_(temp, Map::kElementsKindMask);
__ shr(temp, Map::kElementsKindShift);
- __ cmp(temp, FAST_ELEMENTS);
- __ j(equal, &ok, Label::kNear);
+ __ cmp(temp, GetInitialFastElementsKind());
+ __ j(less, &fail, Label::kNear);
+ __ cmp(temp, TERMINAL_FAST_ELEMENTS_KIND);
+ __ j(less_equal, &ok, Label::kNear);
__ cmp(temp, FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND);
__ j(less, &fail, Label::kNear);
__ cmp(temp, LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
@@ -2425,14 +2537,21 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
// Load the result.
__ mov(result,
- BuildFastArrayOperand(instr->elements(), instr->key(),
+ BuildFastArrayOperand(instr->elements(),
+ instr->key(),
FAST_ELEMENTS,
- FixedArray::kHeaderSize - kHeapObjectTag));
+ FixedArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index()));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ cmp(result, factory()->the_hole_value());
- DeoptimizeIf(equal, instr->environment());
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ test(result, Immediate(kSmiTagMask));
+ DeoptimizeIf(not_equal, instr->environment());
+ } else {
+ __ cmp(result, factory()->the_hole_value());
+ DeoptimizeIf(equal, instr->environment());
+ }
}
}
@@ -2441,18 +2560,24 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
LLoadKeyedFastDoubleElement* instr) {
XMMRegister result = ToDoubleRegister(instr->result());
- int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
- sizeof(kHoleNanLower32);
- Operand hole_check_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(),
- FAST_DOUBLE_ELEMENTS,
- offset);
- __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr->environment());
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
+ sizeof(kHoleNanLower32);
+ Operand hole_check_operand = BuildFastArrayOperand(
+ instr->elements(), instr->key(),
+ FAST_DOUBLE_ELEMENTS,
+ offset,
+ instr->additional_index());
+ __ cmp(hole_check_operand, Immediate(kHoleNanUpper32));
+ DeoptimizeIf(equal, instr->environment());
+ }
Operand double_load_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag);
+ instr->elements(),
+ instr->key(),
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
__ movdbl(result, double_load_operand);
}
@@ -2461,7 +2586,8 @@ Operand LCodeGen::BuildFastArrayOperand(
LOperand* elements_pointer,
LOperand* key,
ElementsKind elements_kind,
- uint32_t offset) {
+ uint32_t offset,
+ uint32_t additional_index) {
Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
@@ -2470,10 +2596,14 @@ Operand LCodeGen::BuildFastArrayOperand(
Abort("array index constant value too big");
}
return Operand(elements_pointer_reg,
- constant_value * (1 << shift_size) + offset);
+ ((constant_value + additional_index) << shift_size)
+ + offset);
} else {
ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
- return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset);
+ return Operand(elements_pointer_reg,
+ ToRegister(key),
+ scale_factor,
+ offset + (additional_index << shift_size));
}
}
@@ -2482,7 +2612,10 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
LLoadKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(), elements_kind, 0));
+ instr->key(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
XMMRegister result(ToDoubleRegister(instr->result()));
__ movss(result, operand);
@@ -2518,9 +2651,12 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2533,7 +2669,7 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->object()).is(edx));
- ASSERT(ToRegister(instr->key()).is(eax));
+ ASSERT(ToRegister(instr->key()).is(ecx));
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -2543,25 +2679,29 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Register result = ToRegister(instr->result());
- // Check for arguments adapter frame.
- Label done, adapted;
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
- __ cmp(Operand(result),
- Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ j(equal, &adapted, Label::kNear);
-
- // No arguments adaptor frame.
- __ mov(result, Operand(ebp));
- __ jmp(&done, Label::kNear);
+ if (instr->hydrogen()->from_inlined()) {
+ __ lea(result, Operand(esp, -2 * kPointerSize));
+ } else {
+ // Check for arguments adapter frame.
+ Label done, adapted;
+ __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
+ __ cmp(Operand(result),
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ j(equal, &adapted, Label::kNear);
+
+ // No arguments adaptor frame.
+ __ mov(result, Operand(ebp));
+ __ jmp(&done, Label::kNear);
- // Arguments adaptor frame present.
- __ bind(&adapted);
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ // Arguments adaptor frame present.
+ __ bind(&adapted);
+ __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- // Result is the frame pointer for the frame if not adapted and for the real
- // frame below the adaptor frame if adapted.
- __ bind(&done);
+ // Result is the frame pointer for the frame if not adapted and for the real
+ // frame below the adaptor frame if adapted.
+ __ bind(&done);
+ }
}
@@ -2666,7 +2806,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// Invoke the function.
__ bind(&invoke);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -2683,6 +2823,11 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
}
+void LCodeGen::DoDrop(LDrop* instr) {
+ __ Drop(instr->count());
+}
+
+
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
__ LoadHeapObject(result, instr->hydrogen()->closure());
@@ -2729,7 +2874,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind) {
+ CallKind call_kind,
+ EDIState edi_state) {
bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
function->shared()->formal_parameter_count() == arity;
@@ -2737,7 +2883,9 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
if (can_invoke_directly) {
- __ LoadHeapObject(edi, function);
+ if (edi_state == EDI_UNINITIALIZED) {
+ __ LoadHeapObject(edi, function);
+ }
// Change context if needed.
bool change_context =
@@ -2780,7 +2928,8 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
CallKnownFunction(instr->function(),
instr->arity(),
instr,
- CALL_AS_METHOD);
+ CALL_AS_METHOD,
+ EDI_UNINITIALIZED);
}
@@ -2877,7 +3026,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Register input_reg = ToRegister(instr->value());
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -2911,11 +3060,13 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
__ cmp(output_reg, 0x80000000u);
DeoptimizeIf(equal, instr->environment());
} else {
+ Label negative_sign;
Label done;
- // Deoptimize on negative numbers.
+ // Deoptimize on unordered.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
- DeoptimizeIf(below, instr->environment());
+ DeoptimizeIf(parity_even, instr->environment());
+ __ j(below, &negative_sign, Label::kNear);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// Check for negative zero.
@@ -2931,10 +3082,21 @@ void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
// Use truncating instruction (OK because input is positive).
__ cvttsd2si(output_reg, Operand(input_reg));
-
// Overflow is signalled with minint.
__ cmp(output_reg, 0x80000000u);
DeoptimizeIf(equal, instr->environment());
+ __ jmp(&done, Label::kNear);
+
+ // Non-zero negative reaches here
+ __ bind(&negative_sign);
+ // Truncate, then compare and compensate
+ __ cvttsd2si(output_reg, Operand(input_reg));
+ __ cvtsi2sd(xmm_scratch, output_reg);
+ __ ucomisd(input_reg, xmm_scratch);
+ __ j(equal, &done, Label::kNear);
+ __ sub(output_reg, Immediate(1));
+ DeoptimizeIf(overflow, instr->environment());
+
__ bind(&done);
}
}
@@ -3066,7 +3228,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
@@ -3226,13 +3388,21 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->function()).is(edi));
ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- RecordPosition(pointers->position());
- SafepointGenerator generator(
- this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
+
+ if (instr->known_function().is_null()) {
+ LPointerMap* pointers = instr->pointer_map();
+ RecordPosition(pointers->position());
+ SafepointGenerator generator(
+ this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(instr->arity());
+ __ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
+ } else {
+ CallKnownFunction(instr->known_function(),
+ instr->arity(),
+ instr,
+ CALL_AS_METHOD,
+ EDI_CONTAINS_TARGET);
+ }
}
@@ -3287,7 +3457,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(eax));
- CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
+ CallKnownFunction(instr->target(),
+ instr->arity(),
+ instr,
+ CALL_AS_FUNCTION,
+ EDI_UNINITIALIZED);
}
@@ -3313,7 +3487,22 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = instr->offset();
if (!instr->transition().is_null()) {
- __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ } else {
+ Register temp = ToRegister(instr->TempAt(0));
+ Register temp_map = ToRegister(instr->TempAt(1));
+ __ mov(temp_map, instr->transition());
+ __ mov(FieldOperand(object, HeapObject::kMapOffset), temp_map);
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ temp_map,
+ temp,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3381,7 +3570,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
LStoreKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(), elements_kind, 0));
+ instr->key(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
__ movss(operand, xmm0);
@@ -3405,9 +3597,12 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
break;
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3422,31 +3617,21 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register elements = ToRegister(instr->object());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- // Do the store.
- if (instr->key()->IsConstantOperand()) {
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
- int offset =
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
- __ mov(FieldOperand(elements, offset), value);
- } else {
- __ mov(FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize),
- value);
- }
+ Operand operand = BuildFastArrayOperand(
+ instr->object(),
+ instr->key(),
+ FAST_ELEMENTS,
+ FixedArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+ __ mov(operand, value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
+ ASSERT(!instr->key()->IsConstantOperand());
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
// Compute address of modified element and store it into key register.
- __ lea(key,
- FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize));
+ __ lea(key, operand);
__ RecordWrite(elements,
key,
value,
@@ -3460,19 +3645,25 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
void LCodeGen::DoStoreKeyedFastDoubleElement(
LStoreKeyedFastDoubleElement* instr) {
XMMRegister value = ToDoubleRegister(instr->value());
- Label have_value;
- __ ucomisd(value, value);
- __ j(parity_odd, &have_value); // NaN.
+ if (instr->NeedsCanonicalization()) {
+ Label have_value;
- ExternalReference canonical_nan_reference =
- ExternalReference::address_of_canonical_non_hole_nan();
- __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
- __ bind(&have_value);
+ __ ucomisd(value, value);
+ __ j(parity_odd, &have_value); // NaN.
+
+ ExternalReference canonical_nan_reference =
+ ExternalReference::address_of_canonical_non_hole_nan();
+ __ movdbl(value, Operand::StaticVariable(canonical_nan_reference));
+ __ bind(&have_value);
+ }
Operand double_store_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag);
+ instr->elements(),
+ instr->key(),
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
__ movdbl(double_store_operand, value);
}
@@ -3500,25 +3691,34 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
ElementsKind to_kind = to_map->elements_kind();
Label not_applicable;
+ bool is_simple_map_transition =
+ IsSimpleMapChangeTransition(from_kind, to_kind);
+ Label::Distance branch_distance =
+ is_simple_map_transition ? Label::kNear : Label::kFar;
__ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
- __ j(not_equal, &not_applicable);
- __ mov(new_map_reg, to_map);
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ __ j(not_equal, &not_applicable, branch_distance);
+ if (is_simple_map_transition) {
Register object_reg = ToRegister(instr->object());
- __ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
+ Handle<Map> map = instr->hydrogen()->transitioned_map();
+ __ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
+ Immediate(map));
// Write barrier.
ASSERT_NE(instr->temp_reg(), NULL);
- __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
- ToRegister(instr->temp_reg()), kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ __ RecordWriteForMap(object_reg, to_map, new_map_reg,
+ ToRegister(instr->temp_reg()),
+ kDontSaveFPRegs);
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
+ __ mov(new_map_reg, to_map);
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
+ __ mov(new_map_reg, to_map);
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx));
@@ -3544,7 +3744,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
factory(),
@@ -3600,7 +3800,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3675,7 +3875,7 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
ASSERT(input->IsRegister() && input->Equals(instr->result()));
Register reg = ToRegister(input);
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTag(reg);
__ j(overflow, deferred->entry());
__ bind(deferred->exit());
@@ -3743,7 +3943,7 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Register reg = ToRegister(instr->result());
Register tmp = ToRegister(instr->TempAt(0));
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
} else {
@@ -3789,6 +3989,10 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
if (instr->needs_check()) {
__ test(ToRegister(input), Immediate(kSmiTagMask));
DeoptimizeIf(not_zero, instr->environment());
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(ToRegister(input));
+ }
}
__ SmiUntag(ToRegister(input));
}
@@ -3943,7 +4147,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -4181,12 +4385,21 @@ void LCodeGen::DoCheckMapCommon(Register reg,
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Handle<Map> map = instr->hydrogen()->map();
- DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
+ __ j(equal, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4275,7 +4488,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->TempAt(0));
@@ -4297,6 +4511,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
deferred->entry(),
TAG_OBJECT);
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
// Load the initial map.
Register map = scratch;
__ LoadHeapObject(scratch, constructor);
@@ -4331,14 +4553,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ mov(FieldOperand(result, property_offset), scratch);
}
}
-
- __ bind(deferred->exit());
}
void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
Register result = ToRegister(instr->result());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
@@ -4346,8 +4568,9 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
__ Set(result, Immediate(0));
PushSafepointRegistersScope scope(this);
- __ PushHeapObject(constructor);
- CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr, instr->context());
+ __ push(Immediate(Smi::FromInt(instance_size)));
+ CallRuntimeFromDeferred(
+ Runtime::kAllocateInNewSpace, 1, instr, instr->context());
__ StoreToSafepointRegisterSlot(result, eax);
}
@@ -4360,8 +4583,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(eax, instr->hydrogen()->boilerplate_object());
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
// Load the map's "bit field 2". We only need the first byte,
@@ -4415,6 +4639,13 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ LoadHeapObject(ecx, object);
__ cmp(source, ecx);
__ Assert(equal, "Unexpected object literal boilerplate");
+ __ mov(ecx, FieldOperand(source, HeapObject::kMapOffset));
+ __ cmp(ecx, Handle<Map>(object->map()));
+ __ Assert(equal, "Unexpected boilerplate map");
+ __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
+ __ and_(ecx, Map::kElementsKindMask);
+ __ cmp(ecx, object->GetElementsKind() << Map::kElementsKindShift);
+ __ Assert(equal, "Unexpected boilerplate elements kind");
}
// Only elements backing stores for non-COW arrays need to be copied.
@@ -4484,9 +4715,10 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ mov(FieldOperand(result, total_offset + 4), Immediate(value_high));
}
} else if (elements->IsFixedArray()) {
+ Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
for (int i = 0; i < elements_length; i++) {
int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
- Handle<Object> value = JSObject::GetElement(object, i);
+ Handle<Object> value(fast_elements->get(i));
if (value->IsJSObject()) {
Handle<JSObject> value_object = Handle<JSObject>::cast(value);
__ lea(ecx, Operand(result, *offset));
@@ -4510,6 +4742,24 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
int size = instr->hydrogen()->total_size();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate()->GetElementsKind();
+
+ // Deopt if the literal boilerplate ElementsKind is of a type different than
+ // the expected one. The check isn't necessary if the boilerplate has already
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
+ __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
+ __ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
+ // Load the map's "bit field 2". We only need the first byte,
+ // but the following masking takes care of that anyway.
+ __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ and_(ecx, Map::kElementsKindMask);
+ __ cmp(ecx, boilerplate_elements_kind << Map::kElementsKindShift);
+ DeoptimizeIf(not_equal, instr->environment());
+ }
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
@@ -4794,7 +5044,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
LOperand* key = instr->key();
__ push(ToOperand(obj));
EmitPushTaggedOperand(key);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
// Create safepoint generator that will also ensure enough space in the
@@ -4854,7 +5104,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
@@ -4892,7 +5142,7 @@ void LCodeGen::DoIn(LIn* instr) {
LOperand* key = instr->key();
EmitPushTaggedOperand(key);
EmitPushTaggedOperand(obj);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.h b/deps/v8/src/ia32/lithium-codegen-ia32.h
index 52befc6974..b241aafb96 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.h
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.h
@@ -46,21 +46,26 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
- LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
+ LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info,
+ Zone* zone)
: chunk_(chunk),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, zone),
+ deoptimization_literals_(8, zone),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(zone),
+ deferred_(8, zone),
+ dynamic_frame_alignment_(false),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(zone),
+ zone_(zone),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -72,6 +77,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
Operand ToOperand(LOperand* op) const;
@@ -164,7 +170,7 @@ class LCodeGen BASE_EMBEDDED {
void Abort(const char* format, ...);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -206,12 +212,18 @@ class LCodeGen BASE_EMBEDDED {
LInstruction* instr,
LOperand* context);
+ enum EDIState {
+ EDI_UNINITIALIZED,
+ EDI_CONTAINS_TARGET
+ };
+
// Generate a direct call to a known function. Expects the function
// to be in edi.
void CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind);
+ CallKind call_kind,
+ EDIState edi_state);
void RecordSafepointWithLazyDeopt(LInstruction* instr,
SafepointMode safepoint_mode);
@@ -236,7 +248,8 @@ class LCodeGen BASE_EMBEDDED {
Operand BuildFastArrayOperand(LOperand* elements_pointer,
LOperand* key,
ElementsKind elements_kind,
- uint32_t offset);
+ uint32_t offset,
+ uint32_t additional_index = 0);
// Specific math operations - used from DoUnaryMathOperation.
void EmitIntegerMathAbs(LUnaryMathOperation* instr);
@@ -301,7 +314,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -330,6 +344,7 @@ class LCodeGen BASE_EMBEDDED {
Status status_;
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
+ bool dynamic_frame_alignment_;
int osr_pc_offset_;
int last_lazy_deopt_pc_;
@@ -337,6 +352,8 @@ class LCodeGen BASE_EMBEDDED {
// itself is emitted at the end of the generated code.
SafepointTableBuilder safepoints_;
+ Zone* zone_;
+
// Compiler from a set of parallel moves to a sequential list of moves.
LGapResolver resolver_;
diff --git a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
index 510d9f1dc6..6428916fef 100644
--- a/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/deps/v8/src/ia32/lithium-gap-resolver-ia32.cc
@@ -37,7 +37,7 @@ namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner),
- moves_(32),
+ moves_(32, owner->zone()),
source_uses_(),
destination_uses_(),
spilled_register_(-1) {}
@@ -157,7 +157,7 @@ void LGapResolver::AddMove(LMoveOperands move) {
LOperand* destination = move.destination();
if (destination->IsRegister()) ++destination_uses_[destination->index()];
- moves_.Add(move);
+ moves_.Add(move, cgen_->zone());
}
diff --git a/deps/v8/src/ia32/lithium-ia32.cc b/deps/v8/src/ia32/lithium-ia32.cc
index 186b346c70..60366f7447 100644
--- a/deps/v8/src/ia32/lithium-ia32.cc
+++ b/deps/v8/src/ia32/lithium-ia32.cc
@@ -368,7 +368,11 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
int LChunk::GetNextSpillIndex(bool is_double) {
// Skip a slot if for a double-width slot.
- if (is_double) spill_slot_count_++;
+ if (is_double) {
+ spill_slot_count_++;
+ spill_slot_count_ |= 1;
+ num_double_slots_++;
+ }
return spill_slot_count_++;
}
@@ -376,9 +380,9 @@ int LChunk::GetNextSpillIndex(bool is_double) {
LOperand* LChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
+ return LStackSlot::Create(index, zone());
}
}
@@ -474,23 +478,23 @@ void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
- instructions_.Add(gap);
+ instructions_.Add(gap, zone());
index = instructions_.length();
- instructions_.Add(instr);
+ instructions_.Add(instr, zone());
} else {
index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
+ instructions_.Add(instr, zone());
+ instructions_.Add(gap, zone());
}
if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
+ pointer_maps_.Add(instr->pointer_map(), zone());
instr->pointer_map()->set_lithium_position(index);
}
}
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
+ return LConstantOperand::Create(constant->id(), zone());
}
@@ -529,7 +533,8 @@ int LChunk::NearestGapPos(int index) const {
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
+ GetGapAt(index)->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(from, to, zone());
}
@@ -549,6 +554,12 @@ LChunk* LChunkBuilder::Build() {
chunk_ = new(zone()) LChunk(info(), graph());
HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
+
+ // Reserve the first spill slot for the state of dynamic alignment.
+ int alignment_state_index = chunk_->GetNextSpillIndex(false);
+ ASSERT_EQ(alignment_state_index, 0);
+ USE(alignment_state_index);
+
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
HBasicBlock* next = NULL;
@@ -729,22 +740,6 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
}
-LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id) {
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = ast_id;
- return instr;
-}
-
-
-void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
-}
-
-
LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize) {
@@ -757,8 +752,10 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
- instr = SetInstructionPendingDeoptimizationEnvironment(
- instr, sim->ast_id());
+ ASSERT(instruction_pending_deoptimization_environment_ == NULL);
+ ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ instruction_pending_deoptimization_environment_ = instr;
+ pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
@@ -776,15 +773,9 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
}
-LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
- instr->MarkAsSaveDoubles();
- return instr;
-}
-
-
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -1011,7 +1002,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1330,6 +1322,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
LOperand* input = UseRegisterAtStart(instr->value());
LBitNotI* result = new(zone()) LBitNotI(input);
return DefineSameAsFirst(result);
@@ -1354,6 +1347,12 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1557,7 +1556,7 @@ LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
- return new LIsStringAndBranch(UseRegister(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(UseRegister(instr->value()), temp);
}
@@ -1583,7 +1582,7 @@ LInstruction* LChunkBuilder::DoStringCompareAndBranch(
LOperand* left = UseFixed(instr->left(), edx);
LOperand* right = UseFixed(instr->right(), eax);
- LStringCompareAndBranch* result = new
+ LStringCompareAndBranch* result = new(zone())
LStringCompareAndBranch(context, left, right);
return MarkAsCall(result, instr);
@@ -1708,8 +1707,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
- if (needs_check) {
+ if (instr->value()->type().IsSmi()) {
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
+ } else {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp =
(truncating && CpuFeatures::IsSupported(SSE3))
@@ -1717,8 +1717,6 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
: FixedTemp(xmm1);
LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
- } else {
- return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1800,9 +1798,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckMap* result = new(zone()) LCheckMap(value);
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -1862,7 +1860,7 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* context = UseFixed(instr->context(), esi);
- LOperand* global_object = UseFixed(instr->global_object(), eax);
+ LOperand* global_object = UseFixed(instr->global_object(), edx);
LLoadGlobalGeneric* result =
new(zone()) LLoadGlobalGeneric(context, global_object);
return MarkAsCall(DefineFixed(result, eax), instr);
@@ -1922,7 +1920,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* context = UseFixed(instr->context(), esi);
- LOperand* obj = UseFixed(instr->object(), eax);
+ LOperand* obj = UseFixed(instr->object(), edx);
LLoadNamedFieldPolymorphic* result =
new(zone()) LLoadNamedFieldPolymorphic(context, obj);
return MarkAsCall(DefineFixed(result, eax), instr);
@@ -1938,7 +1936,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* context = UseFixed(instr->context(), esi);
- LOperand* object = UseFixed(instr->object(), eax);
+ LOperand* object = UseFixed(instr->object(), edx);
LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(context, object);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -2003,8 +2001,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer,
- key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -2017,7 +2014,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* context = UseFixed(instr->context(), esi);
LOperand* object = UseFixed(instr->object(), edx);
- LOperand* key = UseFixed(instr->key(), eax);
+ LOperand* key = UseFixed(instr->key(), ecx);
LLoadKeyedGeneric* result =
new(zone()) LLoadKeyedGeneric(context, object, key);
@@ -2106,8 +2103,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
@@ -2129,6 +2127,8 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
LOperand* obj;
if (needs_write_barrier) {
@@ -2136,7 +2136,9 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseRegister(instr->object())
: UseTempRegister(instr->object());
} else {
- obj = UseRegisterAtStart(instr->object());
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
}
LOperand* val = needs_write_barrier
@@ -2145,11 +2147,13 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
// We only need a scratch register if we have a write barrier or we
// have a store into the properties array (not in-object-property).
- LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
- ? TempRegister()
- : NULL;
+ LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
+ needs_write_barrier_for_map) ? TempRegister() : NULL;
+
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL;
- return new(zone()) LStoreNamedField(obj, val, temp);
+ return new(zone()) LStoreNamedField(obj, val, temp, temp_map);
}
@@ -2348,9 +2352,12 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
+ // Store the lazy deopt environment with the instruction if needed. Right
+ // now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
- set_deoptimization_environment(result->environment());
- ClearInstructionPendingDeoptimizationEnvironment();
+ SetDeferredLazyDeoptimizationEnvironment(result->environment());
+ instruction_pending_deoptimization_environment_ = NULL;
+ pending_deoptimization_ast_id_ = AstNode::kNoNumber;
return result;
}
@@ -2380,8 +2387,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
undefined,
instr->call_kind(),
instr->is_construct());
- if (instr->arguments() != NULL) {
- inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ if (instr->arguments_var() != NULL) {
+ inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
@@ -2390,10 +2397,20 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
+ LInstruction* pop = NULL;
+
+ HEnvironment* env = current_block_->last_environment();
+
+ if (instr->arguments_pushed()) {
+ int argument_count = env->arguments_environment()->parameter_count();
+ pop = new(zone()) LDrop(argument_count);
+ argument_count_ -= argument_count;
+ }
+
HEnvironment* outer = current_block_->last_environment()->
DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
- return NULL;
+ return pop;
}
diff --git a/deps/v8/src/ia32/lithium-ia32.h b/deps/v8/src/ia32/lithium-ia32.h
index 4ecce96d0f..cd20631805 100644
--- a/deps/v8/src/ia32/lithium-ia32.h
+++ b/deps/v8/src/ia32/lithium-ia32.h
@@ -65,7 +65,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -174,7 +174,8 @@ class LCodeGen;
V(CheckMapValue) \
V(LoadFieldByIndex) \
V(DateField) \
- V(WrapReceiver)
+ V(WrapReceiver) \
+ V(Drop)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -198,8 +199,7 @@ class LInstruction: public ZoneObject {
LInstruction()
: environment_(NULL),
hydrogen_value_(NULL),
- is_call_(false),
- is_save_doubles_(false) { }
+ is_call_(false) { }
virtual ~LInstruction() { }
virtual void CompileToNative(LCodeGen* generator) = 0;
@@ -242,22 +242,12 @@ class LInstruction: public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
- void set_deoptimization_environment(LEnvironment* env) {
- deoptimization_environment_.set(env);
- }
- LEnvironment* deoptimization_environment() const {
- return deoptimization_environment_.get();
- }
- bool HasDeoptimizationEnvironment() const {
- return deoptimization_environment_.is_set();
- }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { is_call_ = true; }
- void MarkAsSaveDoubles() { is_save_doubles_ = true; }
// Interface to the register allocator and iterators.
bool IsMarkedAsCall() const { return is_call_; }
- bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; }
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
@@ -278,9 +268,7 @@ class LInstruction: public ZoneObject {
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
- SetOncePointer<LEnvironment> deoptimization_environment_;
bool is_call_;
- bool is_save_doubles_;
};
@@ -339,8 +327,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -525,9 +515,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
class LArgumentsElements: public LTemplateInstruction<1, 0, 0> {
public:
- LArgumentsElements() { }
-
DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements")
+ DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements)
};
@@ -844,6 +833,15 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 2, 1> {
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
+ LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
+ return lazy_deopt_env_;
+ }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) {
+ lazy_deopt_env_ = env;
+ }
+
+ private:
+ LEnvironment* lazy_deopt_env_;
};
@@ -1242,13 +1240,13 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedFastDoubleElement(LOperand* elements,
- LOperand* key) {
+ LLoadKeyedFastDoubleElement(LOperand* elements, LOperand* key) {
inputs_[0] = elements;
inputs_[1] = key;
}
@@ -1259,13 +1257,13 @@ class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key) {
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
inputs_[0] = external_pointer;
inputs_[1] = key;
}
@@ -1279,6 +1277,7 @@ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1401,6 +1400,19 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
};
+class LDrop: public LTemplateInstruction<0, 0, 0> {
+ public:
+ explicit LDrop(int count) : count_(count) { }
+
+ int count() const { return count_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Drop, "drop")
+
+ private:
+ int count_;
+};
+
+
class LThisFunction: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
@@ -1489,6 +1501,7 @@ class LInvokeFunction: public LTemplateInstruction<1, 2, 0> {
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
+ Handle<JSFunction> known_function() { return hydrogen()->known_function(); }
};
@@ -1704,12 +1717,16 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 2> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val, LOperand* temp) {
+ LStoreNamedField(LOperand* obj,
+ LOperand* val,
+ LOperand* temp,
+ LOperand* temp_map) {
inputs_[0] = obj;
inputs_[1] = val;
temps_[0] = temp;
+ temps_[1] = temp_map;
}
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
@@ -1765,6 +1782,7 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1787,6 +1805,9 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
+
+ bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
};
@@ -1810,6 +1831,7 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1949,14 +1971,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 1> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -2285,11 +2307,12 @@ class LChunk: public ZoneObject {
public:
LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
+ num_double_slots_(0),
info_(info),
graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) { }
+ instructions_(32, graph->zone()),
+ pointer_maps_(8, graph->zone()),
+ inlined_closures_(1, graph->zone()) { }
void AddInstruction(LInstruction* instruction, HBasicBlock* block);
LConstantOperand* DefineConstantOperand(HConstant* constant);
@@ -2302,6 +2325,7 @@ class LChunk: public ZoneObject {
int ParameterAt(int index);
int GetParameterStackSlot(int index) const;
int spill_slot_count() const { return spill_slot_count_; }
+ int num_double_slots() const { return num_double_slots_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
@@ -2334,11 +2358,14 @@ class LChunk: public ZoneObject {
}
void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
+ inlined_closures_.Add(closure, zone());
}
+ Zone* zone() const { return graph_->zone(); }
+
private:
int spill_slot_count_;
+ int num_double_slots_;
CompilationInfo* info_;
HGraph* const graph_;
ZoneList<LInstruction*> instructions_;
@@ -2353,7 +2380,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2383,7 +2410,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2471,11 +2498,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
- LInstruction* MarkAsSaveDoubles(LInstruction* instr);
-
- LInstruction* SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id);
- void ClearInstructionPendingDeoptimizationEnvironment();
LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
int* argument_index_accumulator);
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index 60e38a6c13..2012a5ad9d 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -237,6 +237,68 @@ void MacroAssembler::RecordWriteField(
}
+void MacroAssembler::RecordWriteForMap(
+ Register object,
+ Handle<Map> map,
+ Register scratch1,
+ Register scratch2,
+ SaveFPRegsMode save_fp) {
+ Label done;
+
+ Register address = scratch1;
+ Register value = scratch2;
+ if (emit_debug_code()) {
+ Label ok;
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ test_b(address, (1 << kPointerSizeLog2) - 1);
+ j(zero, &ok, Label::kNear);
+ int3();
+ bind(&ok);
+ }
+
+ ASSERT(!object.is(value));
+ ASSERT(!object.is(address));
+ ASSERT(!value.is(address));
+ if (emit_debug_code()) {
+ AbortIfSmi(object);
+ }
+
+ if (!FLAG_incremental_marking) {
+ return;
+ }
+
+ // A single check of the map's pages interesting flag suffices, since it is
+ // only set during incremental collection, and then it's also guaranteed that
+ // the from object's page's interesting flag is also set. This optimization
+ // relies on the fact that maps can never be in new space.
+ ASSERT(!isolate()->heap()->InNewSpace(*map));
+ CheckPageFlagForMap(map,
+ MemoryChunk::kPointersToHereAreInterestingMask,
+ zero,
+ &done,
+ Label::kNear);
+
+ // Delay the initialization of |address| and |value| for the stub until it's
+ // known that the will be needed. Up until this point their values are not
+ // needed since they are embedded in the operands of instructions that need
+ // them.
+ lea(address, FieldOperand(object, HeapObject::kMapOffset));
+ mov(value, Immediate(map));
+ RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
+ CallStub(&stub);
+
+ bind(&done);
+
+ // Clobber clobbered input registers when running with the debug-code flag
+ // turned on to provoke errors.
+ if (emit_debug_code()) {
+ mov(value, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
+ mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
+ }
+}
+
+
void MacroAssembler::RecordWrite(Register object,
Register address,
Register value,
@@ -382,10 +444,12 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
void MacroAssembler::CheckFastElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
@@ -393,23 +457,26 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastElementValue);
+ Map::kMaximumBitField2FastHoleyElementValue);
j(above, fail, distance);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastSmiOnlyElementValue);
+ Map::kMaximumBitField2FastHoleySmiElementValue);
j(above, fail, distance);
}
@@ -493,24 +560,18 @@ void MacroAssembler::CompareMap(Register obj,
CompareMapMode mode) {
cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- j(equal, early_success, Label::kNear);
- cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ j(equal, early_success, Label::kNear);
+ cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(current_map));
+ }
}
}
}
@@ -2161,27 +2222,38 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- cmp(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ mov(scratch, Operand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmp(map_in_out, FieldOperand(scratch, offset));
j(not_equal, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- mov(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ mov(map_in_out, FieldOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
mov(map_out, FieldOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -2566,7 +2638,7 @@ bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
CodePatcher::CodePatcher(byte* address, int size)
: address_(address),
size_(size),
- masm_(Isolate::Current(), address, size + Assembler::kGap) {
+ masm_(NULL, address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints.
@@ -2608,6 +2680,28 @@ void MacroAssembler::CheckPageFlag(
}
+void MacroAssembler::CheckPageFlagForMap(
+ Handle<Map> map,
+ int mask,
+ Condition cc,
+ Label* condition_met,
+ Label::Distance condition_met_distance) {
+ ASSERT(cc == zero || cc == not_zero);
+ Page* page = Page::FromAddress(map->address());
+ ExternalReference reference(ExternalReference::page_flags(page));
+ // The inlined static address check of the page's flags relies
+ // on maps never being compacted.
+ ASSERT(!isolate()->heap()->mark_compact_collector()->
+ IsOnEvacuationCandidate(*map));
+ if (mask < (1 << kBitsPerByte)) {
+ test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
+ } else {
+ test(Operand::StaticVariable(reference), Immediate(mask));
+ }
+ j(cc, condition_met, condition_met_distance);
+}
+
+
void MacroAssembler::JumpIfBlack(Register object,
Register scratch0,
Register scratch1,
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index 66d1ce7d38..5c7a6d6d26 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -90,6 +90,13 @@ class MacroAssembler: public Assembler {
Label* condition_met,
Label::Distance condition_met_distance = Label::kFar);
+ void CheckPageFlagForMap(
+ Handle<Map> map,
+ int mask,
+ Condition cc,
+ Label* condition_met,
+ Label::Distance condition_met_distance = Label::kFar);
+
// Check if object is in new space. Jumps if the object is not in new space.
// The register scratch can be object itself, but scratch will be clobbered.
void JumpIfNotInNewSpace(Register object,
@@ -194,6 +201,16 @@ class MacroAssembler: public Assembler {
RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
SmiCheck smi_check = INLINE_SMI_CHECK);
+ // For page containing |object| mark the region covering the object's map
+ // dirty. |object| is the object being stored into, |map| is the Map object
+ // that was stored.
+ void RecordWriteForMap(
+ Register object,
+ Handle<Map> map,
+ Register scratch1,
+ Register scratch2,
+ SaveFPRegsMode save_fp);
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
@@ -235,7 +252,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
@@ -357,9 +375,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance = Label::kFar);
+ void CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance = Label::kFar);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
index 04d6b62c80..07782cc809 100644
--- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,28 +42,30 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
- * - edx : current character. Must be loaded using LoadCurrentCharacter
- * before using any of the dispatch methods.
- * - edi : current position in input, as negative offset from end of string.
+ * - edx : Current character. Must be loaded using LoadCurrentCharacter
+ * before using any of the dispatch methods. Temporarily stores the
+ * index of capture start after a matching pass for a global regexp.
+ * - edi : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - esi : end of input (points to byte after last character in input).
- * - ebp : frame pointer. Used to access arguments, local variables and
+ * - ebp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - esp : points to tip of C stack.
- * - ecx : points to tip of backtrack stack
+ * - esp : Points to tip of C stack.
+ * - ecx : Points to tip of backtrack stack
*
* The registers eax and ebx are free to use for computations.
*
* Each call to a public method should retain this convention.
* The stack will have the following structure:
- * - Isolate* isolate (Address of the current isolate)
+ * - Isolate* isolate (address of the current isolate)
* - direct_call (if 1, direct call from JavaScript code, if 0
* call through the runtime system)
- * - stack_area_base (High end of the memory area to use as
+ * - stack_area_base (high end of the memory area to use as
* backtracking stack)
+ * - capture array size (may fit multiple sets of matches)
* - int* capture_array (int[num_saved_registers_], for output).
- * - end of input (Address of end of string)
- * - start of input (Address of first character in string)
+ * - end of input (address of end of string)
+ * - start of input (address of first character in string)
* - start index (character index of start)
* - String* input_string (location of a handle containing the string)
* --- frame alignment (if applicable) ---
@@ -72,9 +74,10 @@ namespace internal {
* - backup of caller esi
* - backup of caller edi
* - backup of caller ebx
+ * - success counter (only for global regexps to count matches).
* - Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a non-position.
- * - register 0 ebp[-4] (Only positions must be stored in the first
+ * - register 0 ebp[-4] (only positions must be stored in the first
* - register 1 ebp[-8] num_saved_registers_ registers)
* - ...
*
@@ -98,8 +101,10 @@ namespace internal {
RegExpMacroAssemblerIA32::RegExpMacroAssemblerIA32(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -482,15 +487,6 @@ void RegExpMacroAssemblerIA32::CheckNotBackReference(
}
-void RegExpMacroAssemblerIA32::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ mov(eax, register_location(reg1));
- __ cmp(eax, register_location(reg2));
- BranchOrBacktrack(not_equal, on_not_equal);
-}
-
-
void RegExpMacroAssemblerIA32::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
__ cmp(current_character(), c);
@@ -501,9 +497,13 @@ void RegExpMacroAssemblerIA32::CheckNotCharacter(uint32_t c,
void RegExpMacroAssemblerIA32::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ mov(eax, current_character());
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(current_character(), Immediate(mask));
+ } else {
+ __ mov(eax, mask);
+ __ and_(eax, current_character());
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(equal, on_equal);
}
@@ -511,9 +511,13 @@ void RegExpMacroAssemblerIA32::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerIA32::CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_not_equal) {
- __ mov(eax, current_character());
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(current_character(), Immediate(mask));
+ } else {
+ __ mov(eax, mask);
+ __ and_(eax, current_character());
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
@@ -525,12 +529,51 @@ void RegExpMacroAssemblerIA32::CheckNotCharacterAfterMinusAnd(
Label* on_not_equal) {
ASSERT(minus < String::kMaxUtf16CodeUnit);
__ lea(eax, Operand(current_character(), -minus));
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(eax, Immediate(mask));
+ } else {
+ __ and_(eax, mask);
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
+void RegExpMacroAssemblerIA32::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ lea(eax, Operand(current_character(), -from));
+ __ cmp(eax, to - from);
+ BranchOrBacktrack(below_equal, on_in_range);
+}
+
+
+void RegExpMacroAssemblerIA32::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ lea(eax, Operand(current_character(), -from));
+ __ cmp(eax, to - from);
+ BranchOrBacktrack(above, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerIA32::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ mov(eax, Immediate(table));
+ Register index = current_character();
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ mov(ebx, kTableSize - 1);
+ __ and_(ebx, current_character());
+ index = ebx;
+ }
+ __ cmpb(FieldOperand(eax, index, times_1, ByteArray::kHeaderSize), 0);
+ BranchOrBacktrack(not_equal, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -659,13 +702,16 @@ bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
void RegExpMacroAssemblerIA32::Fail() {
- ASSERT(FAILURE == 0); // Return value for failure is zero.
- __ Set(eax, Immediate(0));
+ STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
+ if (!global()) {
+ __ Set(eax, Immediate(FAILURE));
+ }
__ jmp(&exit_label_);
}
Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
+ Label return_eax;
// Finalize code - write the entry point code now we know how many
// registers we need.
@@ -684,6 +730,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ push(esi);
__ push(edi);
__ push(ebx); // Callee-save on MacOS.
+ __ push(Immediate(0)); // Number of successful matches in a global regexp.
__ push(Immediate(0)); // Make room for "input start - 1" constant.
// Check if we have space on the stack for registers.
@@ -703,13 +750,13 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ mov(eax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_eax);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(ebx);
__ or_(eax, eax);
// If returned value is non-zero, we exit with the returned value as result.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_eax);
__ bind(&stack_ok);
// Load start index for later use.
@@ -736,19 +783,8 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// position registers.
__ mov(Operand(ebp, kInputStartMinusOne), eax);
- if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
- // Fill saved registers with initial value = start offset - 1
- // Fill in stack push order, to avoid accessing across an unwritten
- // page (a problem on Windows).
- __ mov(ecx, kRegisterZero);
- Label init_loop;
- __ bind(&init_loop);
- __ mov(Operand(ebp, ecx, times_1, +0), eax);
- __ sub(ecx, Immediate(kPointerSize));
- __ cmp(ecx, kRegisterZero - num_saved_registers_ * kPointerSize);
- __ j(greater, &init_loop);
- }
- // Ensure that we have written to each stack page, in order. Skipping a page
+#ifdef WIN32
+ // Ensure that we write to each stack page, in order. Skipping a page
// on Windows can cause segmentation faults. Assuming page size is 4k.
const int kPageSize = 4096;
const int kRegistersPerPage = kPageSize / kPointerSize;
@@ -757,20 +793,45 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
i += kRegistersPerPage) {
__ mov(register_location(i), eax); // One write every page.
}
+#endif // WIN32
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmp(Operand(ebp, kStartIndex), Immediate(0));
+ __ j(not_equal, &load_char_start_regexp, Label::kNear);
+ __ mov(current_character(), '\n');
+ __ jmp(&start_regexp, Label::kNear);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+ // Initialize on-stack registers.
+ if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
+ // Fill saved registers with initial value = start offset - 1
+ // Fill in stack push order, to avoid accessing across an unwritten
+ // page (a problem on Windows).
+ if (num_saved_registers_ > 8) {
+ __ mov(ecx, kRegisterZero);
+ Label init_loop;
+ __ bind(&init_loop);
+ __ mov(Operand(ebp, ecx, times_1, 0), eax);
+ __ sub(ecx, Immediate(kPointerSize));
+ __ cmp(ecx, kRegisterZero - num_saved_registers_ * kPointerSize);
+ __ j(greater, &init_loop);
+ } else { // Unroll the loop.
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ mov(register_location(i), eax);
+ }
+ }
+ }
// Initialize backtrack stack pointer.
__ mov(backtrack_stackpointer(), Operand(ebp, kStackHighEnd));
- // Load previous char as initial value of current-character.
- Label at_start;
- __ cmp(Operand(ebp, kStartIndex), Immediate(0));
- __ j(equal, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ mov(current_character(), '\n');
- __ jmp(&start_label_);
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -789,6 +850,10 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
}
for (int i = 0; i < num_saved_registers_; i++) {
__ mov(eax, register_location(i));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in edx for the zero-length check later.
+ __ mov(edx, eax);
+ }
// Convert to index from start of string, not end.
__ add(eax, ecx);
if (mode_ == UC16) {
@@ -797,10 +862,57 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ mov(Operand(ebx, i * kPointerSize), eax);
}
}
- __ mov(eax, Immediate(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ // Increment success counter.
+ __ inc(Operand(ebp, kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ mov(ecx, Operand(ebp, kNumOutputRegisters));
+ __ sub(ecx, Immediate(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmp(ecx, Immediate(num_saved_registers_));
+ __ j(less, &exit_label_);
+
+ __ mov(Operand(ebp, kNumOutputRegisters), ecx);
+ // Advance the location for output.
+ __ add(Operand(ebp, kRegisterOutput),
+ Immediate(num_saved_registers_ * kPointerSize));
+
+ // Prepare eax to initialize registers with its value in the next run.
+ __ mov(eax, Operand(ebp, kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // edx: capture start index
+ __ cmp(edi, edx);
+ // Not a zero-length match, restart.
+ __ j(not_equal, &load_char_start_regexp);
+ // edi (offset from the end) is zero if we already reached the end.
+ __ test(edi, edi);
+ __ j(zero, &exit_label_, Label::kNear);
+ // Advance current position after a zero-length match.
+ if (mode_ == UC16) {
+ __ add(edi, Immediate(2));
+ } else {
+ __ inc(edi);
+ }
+ }
+
+ __ jmp(&load_char_start_regexp);
+ } else {
+ __ mov(eax, Immediate(SUCCESS));
+ }
}
- // Exit and return eax
+
__ bind(&exit_label_);
+ if (global()) {
+ // Return the number of successful captures.
+ __ mov(eax, Operand(ebp, kSuccessfulCaptures));
+ }
+
+ __ bind(&return_eax);
// Skip esp past regexp registers.
__ lea(esp, Operand(ebp, kBackup_ebx));
// Restore callee-save registers.
@@ -830,7 +942,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ or_(eax, eax);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_eax);
__ pop(edi);
__ pop(backtrack_stackpointer());
@@ -877,7 +989,7 @@ Handle<HeapObject> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ mov(eax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_eax);
}
CodeDesc code_desc;
@@ -996,8 +1108,9 @@ void RegExpMacroAssemblerIA32::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerIA32::Succeed() {
+bool RegExpMacroAssemblerIA32::Succeed() {
__ jmp(&success_label_);
+ return global();
}
diff --git a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
index d504470280..760fadc77d 100644
--- a/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/regexp-macro-assembler-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2008-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,7 +44,7 @@ class RegExpMacroAssemblerIA32: public RegExpMacroAssembler {
#else // V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerIA32(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerIA32(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerIA32();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -69,7 +69,6 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -78,6 +77,14 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
@@ -103,7 +110,7 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -127,7 +134,11 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value.
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
// Below the frame pointer - local stack variables.
@@ -136,7 +147,8 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kBackup_esi = kFramePointer - kPointerSize;
static const int kBackup_edi = kBackup_esi - kPointerSize;
static const int kBackup_ebx = kBackup_edi - kPointerSize;
- static const int kInputStartMinusOne = kBackup_ebx - kPointerSize;
+ static const int kSuccessfulCaptures = kBackup_ebx - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
diff --git a/deps/v8/src/ia32/simulator-ia32.h b/deps/v8/src/ia32/simulator-ia32.h
index 13ddf35cae..478d4ce5cb 100644
--- a/deps/v8/src/ia32/simulator-ia32.h
+++ b/deps/v8/src/ia32/simulator-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,12 +40,12 @@ namespace internal {
typedef int (*regexp_matcher)(String*, int, const byte*,
- const byte*, int*, Address, int, Isolate*);
+ const byte*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address should
// expect eight int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
- (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index eb86b2f523..0e4ce20bd6 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -406,6 +406,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ push(receiver);
__ push(holder);
__ push(FieldOperand(scratch, InterceptorInfo::kDataOffset));
+ __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
}
@@ -419,12 +420,12 @@ static void CompileCallLoadPropertyWithInterceptor(
__ CallExternalReference(
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
masm->isolate()),
- 5);
+ 6);
}
// Number of pointers to be reserved on stack for fast API call.
-static const int kFastApiCallArguments = 3;
+static const int kFastApiCallArguments = 4;
// Reserves space for the extra arguments to API function in the
@@ -472,10 +473,11 @@ static void GenerateFastApiCall(MacroAssembler* masm,
// -- esp[8] : api function
// (first fast api call extra argument)
// -- esp[12] : api call data
- // -- esp[16] : last argument
+ // -- esp[16] : isolate
+ // -- esp[20] : last argument
// -- ...
- // -- esp[(argc + 3) * 4] : first argument
- // -- esp[(argc + 4) * 4] : receiver
+ // -- esp[(argc + 4) * 4] : first argument
+ // -- esp[(argc + 5) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -493,9 +495,11 @@ static void GenerateFastApiCall(MacroAssembler* masm,
} else {
__ mov(Operand(esp, 3 * kPointerSize), Immediate(call_data));
}
+ __ mov(Operand(esp, 4 * kPointerSize),
+ Immediate(reinterpret_cast<int>(masm->isolate())));
// Prepare arguments.
- __ lea(eax, Operand(esp, 3 * kPointerSize));
+ __ lea(eax, Operand(esp, 4 * kPointerSize));
const int kApiArgc = 1; // API function gets reference to the v8::Arguments.
@@ -679,7 +683,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ CallExternalReference(
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
masm->isolate()),
- 5);
+ 6);
// Restore the name_ register.
__ pop(name_);
@@ -741,10 +745,22 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
@@ -753,7 +769,32 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -764,11 +805,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ pop(scratch); // Return address.
+ __ pop(scratch1); // Return address.
__ push(receiver_reg);
__ push(Immediate(transition));
__ push(eax);
- __ push(scratch);
+ __ push(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -778,10 +819,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
- Immediate(transition));
+ // Update the map of the object.
+ __ mov(scratch1, Immediate(transition));
+ __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -800,19 +850,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array (optimistically).
- __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ mov(FieldOperand(scratch, offset), eax);
+ __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ mov(FieldOperand(scratch1, offset), eax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ mov(name_reg, eax);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -1034,6 +1084,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
} else {
__ push(Immediate(Handle<Object>(callback->data())));
}
+ __ push(Immediate(reinterpret_cast<int>(isolate())));
// Save a pointer to where we pushed the arguments pointer.
// This will be passed as the const AccessorInfo& to the C++ callback.
@@ -1044,9 +1095,9 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ push(scratch3); // Restore return address.
- // 3 elements array for v8::Arguments::values_, handler for name and pointer
+ // 4 elements array for v8::Arguments::values_, handler for name and pointer
// to the values (it considered as smi in GC).
- const int kStackSpace = 5;
+ const int kStackSpace = 6;
const int kApiArgc = 2;
__ PrepareCallApiFunction(kApiArgc);
@@ -1110,8 +1161,9 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1213,6 +1265,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
__ push(holder_reg);
__ mov(holder_reg, Immediate(callback));
__ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
+ __ push(Immediate(reinterpret_cast<int>(isolate())));
__ push(holder_reg);
__ push(name_reg);
__ push(scratch2); // restore return address
@@ -1220,7 +1273,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
masm()->isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
@@ -1236,7 +1289,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
}
@@ -1456,16 +1509,31 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(ebx, &call_builtin);
+ __ CheckFastSmiElements(ebx, &call_builtin);
// edi: elements array
// edx: receiver
// ebx: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
ebx,
edi,
+ &try_holey_map);
+
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ // Restore edi.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ ebx,
+ edi,
&call_builtin);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
// Restore edi.
__ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
__ bind(&fast_object);
@@ -2174,7 +2242,7 @@ Handle<Code> CallStubCompiler::CompileFastApiCall(
name, depth, &miss);
// Move the return address on top of the stack.
- __ mov(eax, Operand(esp, 3 * kPointerSize));
+ __ mov(eax, Operand(esp, 4 * kPointerSize));
__ mov(Operand(esp, 0 * kPointerSize), eax);
// esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
@@ -2454,8 +2522,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Generate store field code. Trashes the name register.
- GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
-
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ edx, ecx, ebx, edi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
__ mov(ecx, Immediate(name)); // restore name
@@ -2514,6 +2587,52 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
}
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ Handle<String> name) {
+ // ----------- S t a t e -------------
+ // -- eax : value
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(edx, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(eax);
+
+ // Call the JavaScript getter with the receiver and the value on the stack.
+ __ push(edx);
+ __ push(eax);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(eax);
+
+ // Restore context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+
+ __ bind(&miss);
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> receiver,
Handle<String> name) {
@@ -2628,7 +2747,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ j(not_equal, &miss);
// Generate store field code. Trashes the name register.
- GenerateStoreField(masm(), object, index, transition, edx, ecx, ebx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ edx, ecx, ebx, edi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2703,27 +2828,27 @@ Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name,
Handle<JSObject> object,
Handle<JSObject> last) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
// Check that the receiver isn't a smi.
- __ JumpIfSmi(eax, &miss);
+ __ JumpIfSmi(edx, &miss);
ASSERT(last->IsGlobalObject() || last->HasFastProperties());
// Check the maps of the full prototype chain. Also check that
// global property cells up to (but not including) the last object
// in the prototype chain are empty.
- CheckPrototypes(object, eax, last, ebx, edx, edi, name, &miss);
+ CheckPrototypes(object, edx, last, ebx, eax, edi, name, &miss);
// If the last object in the prototype chain is a global object,
// check that the global property cell is empty.
if (last->IsGlobalObject()) {
GenerateCheckPropertyCell(
- masm(), Handle<GlobalObject>::cast(last), name, edx, &miss);
+ masm(), Handle<GlobalObject>::cast(last), name, eax, &miss);
}
// Return undefined if maps of the full prototype chain are still the
@@ -2744,13 +2869,13 @@ Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object,
int index,
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- GenerateLoadField(object, holder, eax, ebx, edx, edi, index, name, &miss);
+ GenerateLoadField(object, holder, edx, ebx, eax, edi, index, name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -2765,13 +2890,13 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
Handle<JSObject> holder,
Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, edi, callback,
+ GenerateLoadCallback(object, holder, edx, ecx, ebx, eax, edi, callback,
name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -2781,18 +2906,56 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
}
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(edx, &miss);
+ CheckPrototypes(receiver, edx, holder, ebx, eax, edi, name, &miss);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(edx);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // Restore context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
- GenerateLoadConstant(object, holder, eax, ebx, edx, edi, value, name, &miss);
+ GenerateLoadConstant(object, holder, edx, ebx, eax, edi, value, name, &miss);
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -2805,8 +2968,8 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
Handle<JSObject> holder,
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
@@ -2816,7 +2979,7 @@ Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver,
// TODO(368): Compile in the whole chain: all the interceptors in
// prototypes and ultimate answer.
- GenerateLoadInterceptor(receiver, holder, &lookup, eax, ecx, edx, ebx, edi,
+ GenerateLoadInterceptor(receiver, holder, &lookup, edx, ecx, eax, ebx, edi,
name, &miss);
__ bind(&miss);
@@ -2834,15 +2997,15 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
Handle<String> name,
bool is_dont_delete) {
// ----------- S t a t e -------------
- // -- eax : receiver
// -- ecx : name
+ // -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label miss;
// Check that the maps haven't changed.
- __ JumpIfSmi(eax, &miss);
- CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss);
+ __ JumpIfSmi(edx, &miss);
+ CheckPrototypes(object, edx, holder, ebx, eax, edi, name, &miss);
// Get the value from the cell.
if (Serializer::enabled()) {
@@ -2880,7 +3043,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
Handle<JSObject> holder,
int index) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -2890,10 +3053,10 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name,
__ IncrementCounter(counters->keyed_load_field(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
+ GenerateLoadField(receiver, holder, edx, ebx, eax, edi, index, name, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_field(), 1);
@@ -2910,7 +3073,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
Handle<JSObject> holder,
Handle<AccessorInfo> callback) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -2920,10 +3083,10 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback(
__ IncrementCounter(counters->keyed_load_callback(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadCallback(receiver, holder, edx, eax, ebx, ecx, edi, callback,
+ GenerateLoadCallback(receiver, holder, edx, ecx, ebx, eax, edi, callback,
name, &miss);
__ bind(&miss);
@@ -2941,7 +3104,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
Handle<JSObject> holder,
Handle<JSFunction> value) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -2951,11 +3114,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
__ IncrementCounter(counters->keyed_load_constant_function(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
GenerateLoadConstant(
- receiver, holder, edx, ebx, ecx, edi, value, name, &miss);
+ receiver, holder, edx, ebx, eax, edi, value, name, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_constant_function(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
@@ -2970,7 +3133,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
Handle<JSObject> holder,
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -2980,12 +3143,12 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
__ IncrementCounter(counters->keyed_load_interceptor(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
LookupResult lookup(isolate());
LookupPostInterceptor(holder, name, &lookup);
- GenerateLoadInterceptor(receiver, holder, &lookup, edx, eax, ecx, ebx, edi,
+ GenerateLoadInterceptor(receiver, holder, &lookup, edx, ecx, eax, ebx, edi,
name, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_interceptor(), 1);
@@ -2999,7 +3162,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor(
Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3009,10 +3172,10 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
__ IncrementCounter(counters->keyed_load_array_length(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadArrayLength(masm(), edx, ecx, &miss);
+ GenerateLoadArrayLength(masm(), edx, eax, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_array_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
@@ -3025,7 +3188,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength(
Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3035,10 +3198,10 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
__ IncrementCounter(counters->keyed_load_string_length(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
+ GenerateLoadStringLength(masm(), edx, eax, ebx, &miss, true);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_string_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
@@ -3051,7 +3214,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength(
Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
Handle<String> name) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3061,10 +3224,10 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
__ IncrementCounter(counters->keyed_load_function_prototype(), 1);
// Check that the name has not changed.
- __ cmp(eax, Immediate(name));
+ __ cmp(ecx, Immediate(name));
__ j(not_equal, &miss);
- GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
+ GenerateLoadFunctionPrototype(masm(), edx, eax, ebx, &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_load_function_prototype(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
@@ -3077,7 +3240,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype(
Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
Handle<Map> receiver_map) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3098,7 +3261,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic(
MapHandleList* receiver_maps,
CodeHandleList* handler_ics) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3262,7 +3425,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3270,21 +3433,15 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- __ JumpIfNotSmi(eax, &miss_force_generic);
- __ mov(ebx, eax);
+ __ JumpIfNotSmi(ecx, &miss_force_generic);
+ __ mov(ebx, ecx);
__ SmiUntag(ebx);
- __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
// Push receiver on the stack to free up a register for the dictionary
// probing.
__ push(edx);
- __ LoadFromNumberDictionary(&slow,
- ecx,
- eax,
- ebx,
- edx,
- edi,
- eax);
+ __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
// Pop receiver before returning.
__ pop(edx);
__ ret(0);
@@ -3293,7 +3450,6 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
__ pop(edx);
// ----------- S t a t e -------------
- // -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
@@ -3305,7 +3461,6 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
__ bind(&miss_force_generic);
// ----------- S t a t e -------------
- // -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
@@ -3317,11 +3472,44 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
}
+static void GenerateSmiKeyCheck(MacroAssembler* masm,
+ Register key,
+ Register scratch,
+ XMMRegister xmm_scratch0,
+ XMMRegister xmm_scratch1,
+ Label* fail) {
+ // Check that key is a smi and if SSE2 is available a heap number
+ // containing a smi and branch if the check fails.
+ if (CpuFeatures::IsSupported(SSE2)) {
+ CpuFeatures::Scope use_sse2(SSE2);
+ Label key_ok;
+ __ JumpIfSmi(key, &key_ok);
+ __ cmp(FieldOperand(key, HeapObject::kMapOffset),
+ Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
+ __ j(not_equal, fail);
+ __ movdbl(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
+ __ cvttsd2si(scratch, Operand(xmm_scratch0));
+ __ cvtsi2sd(xmm_scratch1, scratch);
+ __ ucomisd(xmm_scratch1, xmm_scratch0);
+ __ j(not_equal, fail);
+ __ j(parity_even, fail); // NaN.
+ // Check if the key fits in the smi range.
+ __ cmp(scratch, 0xc0000000);
+ __ j(sign, fail);
+ __ SmiTag(scratch);
+ __ mov(key, scratch);
+ __ bind(&key_ok);
+ } else {
+ __ JumpIfNotSmi(key, fail);
+ }
+}
+
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3330,41 +3518,41 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(eax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, eax, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
- __ cmp(eax, FieldOperand(ebx, ExternalArray::kLengthOffset));
+ __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
__ j(above_equal, &miss_force_generic);
__ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
// ebx: base pointer of external storage
switch (elements_kind) {
case EXTERNAL_BYTE_ELEMENTS:
- __ SmiUntag(eax); // Untag the index.
- __ movsx_b(eax, Operand(ebx, eax, times_1, 0));
+ __ SmiUntag(ecx); // Untag the index.
+ __ movsx_b(eax, Operand(ebx, ecx, times_1, 0));
break;
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS:
- __ SmiUntag(eax); // Untag the index.
- __ movzx_b(eax, Operand(ebx, eax, times_1, 0));
+ __ SmiUntag(ecx); // Untag the index.
+ __ movzx_b(eax, Operand(ebx, ecx, times_1, 0));
break;
case EXTERNAL_SHORT_ELEMENTS:
- __ movsx_w(eax, Operand(ebx, eax, times_1, 0));
+ __ movsx_w(eax, Operand(ebx, ecx, times_1, 0));
break;
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ movzx_w(eax, Operand(ebx, eax, times_1, 0));
+ __ movzx_w(eax, Operand(ebx, ecx, times_1, 0));
break;
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_INT_ELEMENTS:
- __ mov(ecx, Operand(ebx, eax, times_2, 0));
+ __ mov(eax, Operand(ebx, ecx, times_2, 0));
break;
case EXTERNAL_FLOAT_ELEMENTS:
- __ fld_s(Operand(ebx, eax, times_2, 0));
+ __ fld_s(Operand(ebx, ecx, times_2, 0));
break;
case EXTERNAL_DOUBLE_ELEMENTS:
- __ fld_d(Operand(ebx, eax, times_4, 0));
+ __ fld_d(Operand(ebx, ecx, times_4, 0));
break;
default:
UNREACHABLE();
@@ -3372,7 +3560,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
// For integer array types:
- // ecx: value
+ // eax: value
// For floating-point array type:
// FP(0): value
@@ -3383,18 +3571,17 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// it to a HeapNumber.
Label box_int;
if (elements_kind == EXTERNAL_INT_ELEMENTS) {
- __ cmp(ecx, 0xC0000000);
+ __ cmp(eax, 0xc0000000);
__ j(sign, &box_int);
} else {
ASSERT_EQ(EXTERNAL_UNSIGNED_INT_ELEMENTS, elements_kind);
// The test is different for unsigned int values. Since we need
// the value to be in the range of a positive smi, we can't
// handle either of the top two bits being set in the value.
- __ test(ecx, Immediate(0xC0000000));
+ __ test(eax, Immediate(0xc0000000));
__ j(not_zero, &box_int);
}
- __ mov(eax, ecx);
__ SmiTag(eax);
__ ret(0);
@@ -3403,33 +3590,31 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// Allocate a HeapNumber for the int and perform int-to-double
// conversion.
if (elements_kind == EXTERNAL_INT_ELEMENTS) {
- __ push(ecx);
+ __ push(eax);
__ fild_s(Operand(esp, 0));
- __ pop(ecx);
+ __ pop(eax);
} else {
ASSERT_EQ(EXTERNAL_UNSIGNED_INT_ELEMENTS, elements_kind);
// Need to zero-extend the value.
// There's no fild variant for unsigned values, so zero-extend
// to a 64-bit int manually.
__ push(Immediate(0));
- __ push(ecx);
+ __ push(eax);
__ fild_d(Operand(esp, 0));
- __ pop(ecx);
- __ pop(ecx);
+ __ pop(eax);
+ __ pop(eax);
}
// FP(0): value
- __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
+ __ AllocateHeapNumber(eax, ebx, edi, &failed_allocation);
// Set the value.
- __ mov(eax, ecx);
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ ret(0);
} else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
// For the floating-point array type, we need to always allocate a
// HeapNumber.
- __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
+ __ AllocateHeapNumber(eax, ebx, edi, &failed_allocation);
// Set the value.
- __ mov(eax, ecx);
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ ret(0);
} else {
@@ -3449,7 +3634,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3458,7 +3643,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ jmp(ic, RelocInfo::CODE_TARGET);
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3475,7 +3660,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- eax : value
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3484,8 +3670,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(ecx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
@@ -3580,12 +3766,39 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// (code-stubs-ia32.cc) is roughly what is needed here though the
// conversion failure case does not need to be handled.
if (CpuFeatures::IsSupported(SSE2)) {
- if (elements_kind != EXTERNAL_INT_ELEMENTS &&
- elements_kind != EXTERNAL_UNSIGNED_INT_ELEMENTS) {
+ if ((elements_kind == EXTERNAL_INT_ELEMENTS ||
+ elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) &&
+ CpuFeatures::IsSupported(SSE3)) {
+ CpuFeatures::Scope scope(SSE3);
+ // fisttp stores values as signed integers. To represent the
+ // entire range of int and unsigned int arrays, store as a
+ // 64-bit int and discard the high 32 bits.
+ __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
+ __ sub(esp, Immediate(2 * kPointerSize));
+ __ fisttp_d(Operand(esp, 0));
+
+ // If conversion failed (NaN, infinity, or a number outside
+ // signed int64 range), the result is 0x8000000000000000, and
+ // we must handle this case in the runtime.
+ Label ok;
+ __ cmp(Operand(esp, kPointerSize), Immediate(0x80000000u));
+ __ j(not_equal, &ok);
+ __ cmp(Operand(esp, 0), Immediate(0));
+ __ j(not_equal, &ok);
+ __ add(esp, Immediate(2 * kPointerSize)); // Restore the stack.
+ __ jmp(&slow);
+
+ __ bind(&ok);
+ __ pop(ebx);
+ __ add(esp, Immediate(kPointerSize));
+ __ mov(Operand(edi, ecx, times_2, 0), ebx);
+ } else {
ASSERT(CpuFeatures::IsSupported(SSE2));
CpuFeatures::Scope scope(SSE2);
__ cvttsd2si(ebx, FieldOperand(eax, HeapNumber::kValueOffset));
- // ecx: untagged integer value
+ __ cmp(ebx, 0x80000000u);
+ __ j(equal, &slow);
+ // ebx: untagged integer value
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS:
__ ClampUint8(ebx);
@@ -3599,41 +3812,14 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
__ mov_w(Operand(edi, ecx, times_1, 0), ebx);
break;
+ case EXTERNAL_INT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ __ mov(Operand(edi, ecx, times_2, 0), ebx);
+ break;
default:
UNREACHABLE();
break;
}
- } else {
- if (CpuFeatures::IsSupported(SSE3)) {
- CpuFeatures::Scope scope(SSE3);
- // fisttp stores values as signed integers. To represent the
- // entire range of int and unsigned int arrays, store as a
- // 64-bit int and discard the high 32 bits.
- // If the value is NaN or +/-infinity, the result is 0x80000000,
- // which is automatically zero when taken mod 2^n, n < 32.
- __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
- __ sub(esp, Immediate(2 * kPointerSize));
- __ fisttp_d(Operand(esp, 0));
- __ pop(ebx);
- __ add(esp, Immediate(kPointerSize));
- } else {
- ASSERT(CpuFeatures::IsSupported(SSE2));
- CpuFeatures::Scope scope(SSE2);
- // We can easily implement the correct rounding behavior for the
- // range [0, 2^31-1]. For the time being, to keep this code simple,
- // make the slow runtime call for values outside this range.
- // Note: we could do better for signed int arrays.
- __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
- // We will need the key if we have to make the slow runtime call.
- __ push(ebx);
- __ LoadPowerOf2(xmm1, ebx, 31);
- __ pop(ebx);
- __ ucomisd(xmm1, xmm0);
- __ j(above_equal, &slow);
- __ cvttsd2si(ebx, Operand(xmm0));
- }
- // ebx: untagged integer value
- __ mov(Operand(edi, ecx, times_2, 0), ebx);
}
__ ret(0); // Return original value.
}
@@ -3671,7 +3857,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3680,19 +3866,19 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(eax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, eax, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
- __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
- __ AssertFastElements(ecx);
+ __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
+ __ AssertFastElements(eax);
// Check that the key is within bounds.
- __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
+ __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
__ j(above_equal, &miss_force_generic);
// Load the result and make sure it's not the hole.
- __ mov(ebx, Operand(ecx, eax, times_2,
+ __ mov(ebx, Operand(eax, ecx, times_2,
FixedArray::kHeaderSize - kHeapObjectTag));
__ cmp(ebx, masm->isolate()->factory()->the_hole_value());
__ j(equal, &miss_force_generic);
@@ -3709,7 +3895,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
MacroAssembler* masm) {
// ----------- S t a t e -------------
- // -- eax : key
+ // -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
@@ -3718,39 +3904,38 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(eax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, eax, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
- __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
- __ AssertFastElements(ecx);
+ __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
+ __ AssertFastElements(eax);
// Check that the key is within bounds.
- __ cmp(eax, FieldOperand(ecx, FixedDoubleArray::kLengthOffset));
+ __ cmp(ecx, FieldOperand(eax, FixedDoubleArray::kLengthOffset));
__ j(above_equal, &miss_force_generic);
// Check for the hole
uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
- __ cmp(FieldOperand(ecx, eax, times_4, offset), Immediate(kHoleNanUpper32));
+ __ cmp(FieldOperand(eax, ecx, times_4, offset), Immediate(kHoleNanUpper32));
__ j(equal, &miss_force_generic);
// Always allocate a heap number for the result.
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
- __ movdbl(xmm0, FieldOperand(ecx, eax, times_4,
+ __ movdbl(xmm0, FieldOperand(eax, ecx, times_4,
FixedDoubleArray::kHeaderSize));
} else {
- __ fld_d(FieldOperand(ecx, eax, times_4, FixedDoubleArray::kHeaderSize));
+ __ fld_d(FieldOperand(eax, ecx, times_4, FixedDoubleArray::kHeaderSize));
}
- __ AllocateHeapNumber(ecx, ebx, edi, &slow_allocate_heapnumber);
+ __ AllocateHeapNumber(eax, ebx, edi, &slow_allocate_heapnumber);
// Set the value.
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
- __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm0);
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
} else {
- __ fstp_d(FieldOperand(ecx, HeapNumber::kValueOffset));
+ __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
}
- __ mov(eax, ecx);
__ ret(0);
__ bind(&slow_allocate_heapnumber);
@@ -3787,10 +3972,10 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(ecx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(eax, &transition_elements_kind);
}
@@ -3815,7 +4000,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &miss_force_generic);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
// ecx is a smi, use times_half_pointer_size instead of
// times_pointer_size
__ mov(FieldOperand(edi,
@@ -3823,7 +4008,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
times_half_pointer_size,
FixedArray::kHeaderSize), eax);
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
// Do the store and update the write barrier.
// ecx is a smi, use times_half_pointer_size instead of
// times_pointer_size
@@ -3942,8 +4127,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(ecx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, ecx, ebx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
@@ -4004,6 +4189,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
__ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
+
// Restore the key, which is known to be the array length.
__ mov(ecx, Immediate(0));
diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc
index b8d4b40bc9..d7f0f3251c 100644
--- a/deps/v8/src/ic.cc
+++ b/deps/v8/src/ic.cc
@@ -352,9 +352,9 @@ void IC::Clear(Address address) {
return KeyedStoreIC::Clear(address, target);
case Code::CALL_IC: return CallIC::Clear(address, target);
case Code::KEYED_CALL_IC: return KeyedCallIC::Clear(address, target);
+ case Code::COMPARE_IC: return CompareIC::Clear(address, target);
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
- case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
// make any performance difference.
@@ -365,9 +365,8 @@ void IC::Clear(Address address) {
void CallICBase::Clear(Address address, Code* target) {
+ if (target->ic_state() == UNINITIALIZED) return;
bool contextual = CallICBase::Contextual::decode(target->extra_ic_state());
- State state = target->ic_state();
- if (state == UNINITIALIZED) return;
Code* code =
Isolate::Current()->stub_cache()->FindCallInitialize(
target->arguments_count(),
@@ -410,6 +409,17 @@ void KeyedStoreIC::Clear(Address address, Code* target) {
}
+void CompareIC::Clear(Address address, Code* target) {
+ // Only clear ICCompareStubs, we currently cannot clear generic CompareStubs.
+ if (target->major_key() != CodeStub::CompareIC) return;
+ // Only clear CompareICs that can retain objects.
+ if (target->compare_state() != KNOWN_OBJECTS) return;
+ Token::Value op = CompareIC::ComputeOperation(target);
+ SetTargetAtAddress(address, GetRawUninitialized(op));
+ PatchInlinedSmiCode(address, DISABLE_INLINED_SMI_CHECK);
+}
+
+
static bool HasInterceptorGetter(JSObject* object) {
return !object->GetNamedInterceptor()->getter()->IsUndefined();
}
@@ -978,13 +988,25 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
}
break;
case CALLBACKS: {
- Handle<Object> callback_object(lookup->GetCallbackObject());
- if (!callback_object->IsAccessorInfo()) return;
- Handle<AccessorInfo> callback =
- Handle<AccessorInfo>::cast(callback_object);
- if (v8::ToCData<Address>(callback->getter()) == 0) return;
- code = isolate()->stub_cache()->ComputeLoadCallback(
- name, receiver, holder, callback);
+ Handle<Object> callback(lookup->GetCallbackObject());
+ if (callback->IsAccessorInfo()) {
+ Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(callback);
+ if (v8::ToCData<Address>(info->getter()) == 0) return;
+ if (!info->IsCompatibleReceiver(*receiver)) return;
+ code = isolate()->stub_cache()->ComputeLoadCallback(
+ name, receiver, holder, info);
+ } else if (callback->IsAccessorPair()) {
+ Handle<Object> getter(Handle<AccessorPair>::cast(callback)->getter());
+ if (!getter->IsJSFunction()) return;
+ if (holder->IsGlobalObject()) return;
+ if (!receiver->HasFastProperties()) return;
+ code = isolate()->stub_cache()->ComputeLoadViaGetter(
+ name, receiver, holder, Handle<JSFunction>::cast(getter));
+ } else {
+ ASSERT(callback->IsForeign());
+ // No IC support for old-style native accessors.
+ return;
+ }
break;
}
case INTERCEPTOR:
@@ -1053,18 +1075,33 @@ Handle<Code> KeyedLoadIC::ComputePolymorphicStub(
}
+static Handle<Object> TryConvertKey(Handle<Object> key, Isolate* isolate) {
+ // This helper implements a few common fast cases for converting
+ // non-smi keys of keyed loads/stores to a smi or a string.
+ if (key->IsHeapNumber()) {
+ double value = Handle<HeapNumber>::cast(key)->value();
+ if (isnan(value)) {
+ key = isolate->factory()->nan_symbol();
+ } else {
+ int int_value = FastD2I(value);
+ if (value == int_value && Smi::IsValid(int_value)) {
+ key = Handle<Smi>(Smi::FromInt(int_value));
+ }
+ }
+ } else if (key->IsUndefined()) {
+ key = isolate->factory()->undefined_symbol();
+ }
+ return key;
+}
+
+
MaybeObject* KeyedLoadIC::Load(State state,
Handle<Object> object,
Handle<Object> key,
bool force_generic_stub) {
- // Check for values that can be converted into a symbol.
- // TODO(1295): Remove this code.
- if (key->IsHeapNumber() &&
- isnan(Handle<HeapNumber>::cast(key)->value())) {
- key = isolate()->factory()->nan_symbol();
- } else if (key->IsUndefined()) {
- key = isolate()->factory()->undefined_symbol();
- }
+ // Check for values that can be converted into a symbol directly or
+ // is representable as a smi.
+ key = TryConvertKey(key, isolate());
if (key->IsSymbol()) {
Handle<String> name = Handle<String>::cast(key);
@@ -1231,6 +1268,7 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
Handle<AccessorInfo> callback =
Handle<AccessorInfo>::cast(callback_object);
if (v8::ToCData<Address>(callback->getter()) == 0) return;
+ if (!callback->IsCompatibleReceiver(*receiver)) return;
code = isolate()->stub_cache()->ComputeKeyedLoadCallback(
name, receiver, holder, callback);
break;
@@ -1383,7 +1421,11 @@ MaybeObject* StoreIC::Store(State state,
}
// Set the property.
- return receiver->SetProperty(*name, *value, NONE, strict_mode);
+ return receiver->SetProperty(*name,
+ *value,
+ NONE,
+ strict_mode,
+ JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED);
}
@@ -1408,6 +1450,7 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
// Compute the code stub for this store; used for rewriting to
// monomorphic state and making sure that the code stub is in the
// stub cache.
+ Handle<JSObject> holder(lookup->holder());
Handle<Code> code;
switch (type) {
case FIELD:
@@ -1435,18 +1478,30 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
code = isolate()->stub_cache()->ComputeStoreGlobal(
name, global, cell, strict_mode);
} else {
- if (lookup->holder() != *receiver) return;
+ if (!holder.is_identical_to(receiver)) return;
code = isolate()->stub_cache()->ComputeStoreNormal(strict_mode);
}
break;
case CALLBACKS: {
- Handle<Object> callback_object(lookup->GetCallbackObject());
- if (!callback_object->IsAccessorInfo()) return;
- Handle<AccessorInfo> callback =
- Handle<AccessorInfo>::cast(callback_object);
- if (v8::ToCData<Address>(callback->setter()) == 0) return;
- code = isolate()->stub_cache()->ComputeStoreCallback(
- name, receiver, callback, strict_mode);
+ Handle<Object> callback(lookup->GetCallbackObject());
+ if (callback->IsAccessorInfo()) {
+ Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(callback);
+ if (v8::ToCData<Address>(info->setter()) == 0) return;
+ ASSERT(info->IsCompatibleReceiver(*receiver));
+ code = isolate()->stub_cache()->ComputeStoreCallback(
+ name, receiver, info, strict_mode);
+ } else if (callback->IsAccessorPair()) {
+ Handle<Object> setter(Handle<AccessorPair>::cast(callback)->setter());
+ if (!setter->IsJSFunction()) return;
+ if (holder->IsGlobalObject()) return;
+ if (!receiver->HasFastProperties()) return;
+ code = isolate()->stub_cache()->ComputeStoreViaSetter(
+ name, receiver, Handle<JSFunction>::cast(setter), strict_mode);
+ } else {
+ ASSERT(callback->IsForeign());
+ // No IC support for old-style native accessors.
+ return;
+ }
break;
}
case INTERCEPTOR:
@@ -1456,7 +1511,6 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
break;
case CONSTANT_FUNCTION:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
return;
case HANDLER:
case NULL_DESCRIPTOR:
@@ -1538,38 +1592,43 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
}
bool monomorphic = false;
+ bool is_transition_stub = IsTransitionStubKind(stub_kind);
+ Handle<Map> receiver_map(receiver->map());
+ Handle<Map> monomorphic_map = receiver_map;
MapHandleList target_receiver_maps;
- if (ic_state != UNINITIALIZED && ic_state != PREMONOMORPHIC) {
+ if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
+ // Optimistically assume that ICs that haven't reached the MONOMORPHIC state
+ // yet will do so and stay there.
+ monomorphic = true;
+ } else {
GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps);
- }
- if (!IsTransitionStubKind(stub_kind)) {
- if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
- monomorphic = true;
- } else {
- if (ic_state == MONOMORPHIC) {
- // The first time a receiver is seen that is a transitioned version of
- // the previous monomorphic receiver type, assume the new ElementsKind
- // is the monomorphic type. This benefits global arrays that only
- // transition once, and all call sites accessing them are faster if they
- // remain monomorphic. If this optimistic assumption is not true, the IC
- // will miss again and it will become polymorphic and support both the
- // untransitioned and transitioned maps.
- monomorphic = IsMoreGeneralElementsKindTransition(
- target_receiver_maps.at(0)->elements_kind(),
- receiver->GetElementsKind());
- }
+ if (ic_state == MONOMORPHIC && is_transition_stub) {
+ // The first time a receiver is seen that is a transitioned version of the
+ // previous monomorphic receiver type, assume the new ElementsKind is the
+ // monomorphic type. This benefits global arrays that only transition
+ // once, and all call sites accessing them are faster if they remain
+ // monomorphic. If this optimistic assumption is not true, the IC will
+ // miss again and it will become polymorphic and support both the
+ // untransitioned and transitioned maps.
+ monomorphic = IsMoreGeneralElementsKindTransition(
+ target_receiver_maps.at(0)->elements_kind(),
+ receiver->GetElementsKind());
}
}
if (monomorphic) {
+ if (is_transition_stub) {
+ monomorphic_map = ComputeTransitionedMap(receiver, stub_kind);
+ ASSERT(*monomorphic_map != *receiver_map);
+ stub_kind = GetNoTransitionStubKind(stub_kind);
+ }
return ComputeMonomorphicStub(
- receiver, stub_kind, strict_mode, generic_stub);
+ monomorphic_map, stub_kind, strict_mode, generic_stub);
}
ASSERT(target() != *generic_stub);
// Determine the list of receiver maps that this call site has seen,
// adding the map that was just encountered.
- Handle<Map> receiver_map(receiver->map());
bool map_added =
AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
if (IsTransitionStubKind(stub_kind)) {
@@ -1619,8 +1678,7 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
return string_stub();
} else {
ASSERT(receiver_map->has_dictionary_elements() ||
- receiver_map->has_fast_elements() ||
- receiver_map->has_fast_smi_only_elements() ||
+ receiver_map->has_fast_smi_or_object_elements() ||
receiver_map->has_fast_double_elements() ||
receiver_map->has_external_array_elements());
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
@@ -1631,17 +1689,16 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
}
-Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<JSObject> receiver,
+Handle<Code> KeyedIC::ComputeMonomorphicStub(Handle<Map> receiver_map,
StubKind stub_kind,
StrictModeFlag strict_mode,
Handle<Code> generic_stub) {
- if (receiver->HasFastElements() ||
- receiver->HasFastSmiOnlyElements() ||
- receiver->HasExternalArrayElements() ||
- receiver->HasFastDoubleElements() ||
- receiver->HasDictionaryElements()) {
+ ElementsKind elements_kind = receiver_map->elements_kind();
+ if (IsFastElementsKind(elements_kind) ||
+ IsExternalArrayElementsKind(elements_kind) ||
+ IsDictionaryElementsKind(elements_kind)) {
return isolate()->stub_cache()->ComputeKeyedLoadOrStoreElement(
- receiver, stub_kind, strict_mode);
+ receiver_map, stub_kind, strict_mode);
} else {
return generic_stub;
}
@@ -1656,15 +1713,26 @@ Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver,
case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT:
case KeyedIC::STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT:
return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS);
- break;
case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE:
case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE:
return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS);
- break;
- default:
+ case KeyedIC::STORE_TRANSITION_HOLEY_SMI_TO_OBJECT:
+ case KeyedIC::STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
+ return JSObject::GetElementsTransitionMap(receiver,
+ FAST_HOLEY_ELEMENTS);
+ case KeyedIC::STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE:
+ return JSObject::GetElementsTransitionMap(receiver,
+ FAST_HOLEY_DOUBLE_ELEMENTS);
+ case KeyedIC::LOAD:
+ case KeyedIC::STORE_NO_TRANSITION:
+ case KeyedIC::STORE_AND_GROW_NO_TRANSITION:
UNREACHABLE();
- return Handle<Map>::null();
+ break;
}
+ return Handle<Map>::null();
}
@@ -1724,30 +1792,54 @@ KeyedIC::StubKind KeyedStoreIC::GetStubKind(Handle<JSObject> receiver,
if (allow_growth) {
// Handle growing array in stub if necessary.
- if (receiver->HasFastSmiOnlyElements()) {
+ if (receiver->HasFastSmiElements()) {
if (value->IsHeapNumber()) {
- return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE;
+ } else {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
+ }
}
if (value->IsHeapObject()) {
- return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT;
+ } else {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
+ }
}
} else if (receiver->HasFastDoubleElements()) {
if (!value->IsSmi() && !value->IsHeapNumber()) {
- return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
+ } else {
+ return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
+ }
}
}
return STORE_AND_GROW_NO_TRANSITION;
} else {
// Handle only in-bounds elements accesses.
- if (receiver->HasFastSmiOnlyElements()) {
+ if (receiver->HasFastSmiElements()) {
if (value->IsHeapNumber()) {
- return STORE_TRANSITION_SMI_TO_DOUBLE;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE;
+ } else {
+ return STORE_TRANSITION_SMI_TO_DOUBLE;
+ }
} else if (value->IsHeapObject()) {
- return STORE_TRANSITION_SMI_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_SMI_TO_OBJECT;
+ } else {
+ return STORE_TRANSITION_SMI_TO_OBJECT;
+ }
}
} else if (receiver->HasFastDoubleElements()) {
if (!value->IsSmi() && !value->IsHeapNumber()) {
- return STORE_TRANSITION_DOUBLE_TO_OBJECT;
+ if (receiver->HasFastHoleyElements()) {
+ return STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
+ } else {
+ return STORE_TRANSITION_DOUBLE_TO_OBJECT;
+ }
}
}
return STORE_NO_TRANSITION;
@@ -1761,6 +1853,10 @@ MaybeObject* KeyedStoreIC::Store(State state,
Handle<Object> key,
Handle<Object> value,
bool force_generic) {
+ // Check for values that can be converted into a symbol directly or
+ // is representable as a smi.
+ key = TryConvertKey(key, isolate());
+
if (key->IsSymbol()) {
Handle<String> name = Handle<String>::cast(key);
@@ -1878,7 +1974,6 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
case CALLBACKS:
case INTERCEPTOR:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
// Always rewrite to the generic case so that we do not
// repeatedly try to rewrite.
code = (strict_mode == kStrictMode)
@@ -2377,7 +2472,7 @@ RUNTIME_FUNCTION(MaybeObject*, BinaryOp_Patch) {
// Activate inlined smi code.
if (previous_type == BinaryOpIC::UNINITIALIZED) {
- PatchInlinedSmiCode(ic.address());
+ PatchInlinedSmiCode(ic.address(), ENABLE_INLINED_SMI_CHECK);
}
}
@@ -2438,6 +2533,14 @@ RUNTIME_FUNCTION(MaybeObject*, BinaryOp_Patch) {
}
+Code* CompareIC::GetRawUninitialized(Token::Value op) {
+ ICCompareStub stub(op, UNINITIALIZED);
+ Code* code = NULL;
+ CHECK(stub.FindCodeInCache(&code));
+ return code;
+}
+
+
Handle<Code> CompareIC::GetUninitialized(Token::Value op) {
ICCompareStub stub(op, UNINITIALIZED);
return stub.GetCode();
@@ -2452,6 +2555,12 @@ CompareIC::State CompareIC::ComputeState(Code* target) {
}
+Token::Value CompareIC::ComputeOperation(Code* target) {
+ ASSERT(target->major_key() == CodeStub::CompareIC);
+ return static_cast<Token::Value>(target->compare_operation());
+}
+
+
const char* CompareIC::GetStateName(State state) {
switch (state) {
case UNINITIALIZED: return "UNINITIALIZED";
diff --git a/deps/v8/src/ic.h b/deps/v8/src/ic.h
index 56625525d4..c86f316ef3 100644
--- a/deps/v8/src/ic.h
+++ b/deps/v8/src/ic.h
@@ -378,10 +378,16 @@ class KeyedIC: public IC {
STORE_TRANSITION_SMI_TO_OBJECT,
STORE_TRANSITION_SMI_TO_DOUBLE,
STORE_TRANSITION_DOUBLE_TO_OBJECT,
+ STORE_TRANSITION_HOLEY_SMI_TO_OBJECT,
+ STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE,
+ STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT,
STORE_AND_GROW_NO_TRANSITION,
STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT,
STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE,
- STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT
+ STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT,
+ STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT,
+ STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE,
+ STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT
};
static const int kGrowICDelta = STORE_AND_GROW_NO_TRANSITION -
@@ -445,7 +451,7 @@ class KeyedIC: public IC {
private:
void GetReceiverMapsForStub(Handle<Code> stub, MapHandleList* result);
- Handle<Code> ComputeMonomorphicStub(Handle<JSObject> receiver,
+ Handle<Code> ComputeMonomorphicStub(Handle<Map> receiver_map,
StubKind stub_kind,
StrictModeFlag strict_mode,
Handle<Code> default_stub);
@@ -461,6 +467,12 @@ class KeyedIC: public IC {
static bool IsGrowStubKind(StubKind stub_kind) {
return stub_kind >= STORE_AND_GROW_NO_TRANSITION;
}
+
+ static StubKind GetNoTransitionStubKind(StubKind stub_kind) {
+ if (!IsTransitionStubKind(stub_kind)) return stub_kind;
+ if (IsGrowStubKind(stub_kind)) return STORE_AND_GROW_NO_TRANSITION;
+ return STORE_NO_TRANSITION;
+ }
};
@@ -794,6 +806,9 @@ class CompareIC: public IC {
// Helper function for determining the state of a compare IC.
static State ComputeState(Code* target);
+ // Helper function for determining the operation a compare IC is for.
+ static Token::Value ComputeOperation(Code* target);
+
static const char* GetStateName(State state);
private:
@@ -804,7 +819,13 @@ class CompareIC: public IC {
Condition GetCondition() const { return ComputeCondition(op_); }
State GetState() { return ComputeState(target()); }
+ static Code* GetRawUninitialized(Token::Value op);
+
+ static void Clear(Address address, Code* target);
+
Token::Value op_;
+
+ friend class IC;
};
@@ -817,7 +838,8 @@ class ToBooleanIC: public IC {
// Helper for BinaryOpIC and CompareIC.
-void PatchInlinedSmiCode(Address address);
+enum InlinedSmiCheck { ENABLE_INLINED_SMI_CHECK, DISABLE_INLINED_SMI_CHECK };
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check);
} } // namespace v8::internal
diff --git a/deps/v8/src/incremental-marking-inl.h b/deps/v8/src/incremental-marking-inl.h
index 3e3d6c43fd..2dae6f207d 100644
--- a/deps/v8/src/incremental-marking-inl.h
+++ b/deps/v8/src/incremental-marking-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -100,7 +100,7 @@ void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
int64_t old_bytes_rescanned = bytes_rescanned_;
bytes_rescanned_ = old_bytes_rescanned + obj_size;
if ((bytes_rescanned_ >> 20) != (old_bytes_rescanned >> 20)) {
- if (bytes_rescanned_ > 2 * heap_->PromotedSpaceSize()) {
+ if (bytes_rescanned_ > 2 * heap_->PromotedSpaceSizeOfObjects()) {
// If we have queued twice the heap size for rescanning then we are
// going around in circles, scanning the same objects again and again
// as the program mutates the heap faster than we can incrementally
@@ -118,13 +118,29 @@ void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
- WhiteToGrey(obj, mark_bit);
+ Marking::WhiteToGrey(mark_bit);
marking_deque_.PushGrey(obj);
}
-void IncrementalMarking::WhiteToGrey(HeapObject* obj, MarkBit mark_bit) {
- Marking::WhiteToGrey(mark_bit);
+bool IncrementalMarking::MarkObjectAndPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ WhiteToGreyAndPush(obj, mark_bit);
+ return true;
+ }
+ return false;
+}
+
+
+bool IncrementalMarking::MarkObjectWithoutPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ mark_bit.Set();
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
+ return true;
+ }
+ return false;
}
diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc
index 7bbd5218b1..94afffa733 100644
--- a/deps/v8/src/incremental-marking.cc
+++ b/deps/v8/src/incremental-marking.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,6 +42,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
state_(STOPPED),
marking_deque_memory_(NULL),
marking_deque_memory_committed_(false),
+ marker_(this, heap->mark_compact_collector()),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
@@ -663,6 +664,22 @@ void IncrementalMarking::Hurry() {
} else if (map == global_context_map) {
// Global contexts have weak fields.
VisitGlobalContext(Context::cast(obj), &marking_visitor);
+ } else if (map->instance_type() == MAP_TYPE) {
+ Map* map = Map::cast(obj);
+ heap_->ClearCacheOnMap(map);
+
+ // When map collection is enabled we have to mark through map's
+ // transitions and back pointers in a special way to make these links
+ // weak. Only maps for subclasses of JSReceiver can have transitions.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (FLAG_collect_maps &&
+ map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
+ } else {
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+ }
} else {
obj->Iterate(&marking_visitor);
}
@@ -807,12 +824,6 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
Map* map = obj->map();
if (map == filler_map) continue;
- if (obj->IsMap()) {
- Map* map = Map::cast(obj);
- heap_->ClearCacheOnMap(map);
- }
-
-
int size = obj->SizeFromMap(map);
bytes_to_process -= size;
MarkBit map_mark_bit = Marking::MarkBitFrom(map);
@@ -830,6 +841,35 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
VisitGlobalContext(ctx, &marking_visitor);
+ } else if (map->instance_type() == MAP_TYPE) {
+ Map* map = Map::cast(obj);
+ heap_->ClearCacheOnMap(map);
+
+ // When map collection is enabled we have to mark through map's
+ // transitions and back pointers in a special way to make these links
+ // weak. Only maps for subclasses of JSReceiver can have transitions.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ if (FLAG_collect_maps &&
+ map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
+ } else {
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
+ HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
+ }
+ } else if (map->instance_type() == JS_FUNCTION_TYPE) {
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
+ HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
+
+ marking_visitor.VisitCodeEntry(
+ obj->address() + JSFunction::kCodeEntryOffset);
+
+ marking_visitor.VisitPointers(
+ HeapObject::RawField(obj,
+ JSFunction::kCodeEntryOffset + kPointerSize),
+ HeapObject::RawField(obj,
+ JSFunction::kNonWeakFieldsEndOffset));
} else {
obj->IterateBody(map->instance_type(), size, &marking_visitor);
}
@@ -938,7 +978,7 @@ void IncrementalMarking::ResetStepCounters() {
int64_t IncrementalMarking::SpaceLeftInOldSpace() {
- return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSize();
+ return heap_->MaxOldGenerationSize() - heap_->PromotedSpaceSizeOfObjects();
}
} } // namespace v8::internal
diff --git a/deps/v8/src/incremental-marking.h b/deps/v8/src/incremental-marking.h
index 8cbe6c18e7..39e8daed68 100644
--- a/deps/v8/src/incremental-marking.h
+++ b/deps/v8/src/incremental-marking.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -154,8 +154,6 @@ class IncrementalMarking {
inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
- inline void WhiteToGrey(HeapObject* obj, MarkBit mark_bit);
-
// Does white->black or keeps gray or black color. Returns true if converting
// white to black.
inline bool MarkBlackOrKeepGrey(MarkBit mark_bit) {
@@ -169,6 +167,16 @@ class IncrementalMarking {
return true;
}
+ // Marks the object grey and pushes it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for incremental marking only.
+ INLINE(bool MarkObjectAndPush(HeapObject* obj));
+
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for incremental marking only.
+ INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
+
inline int steps_count() {
return steps_count_;
}
@@ -260,6 +268,7 @@ class IncrementalMarking {
VirtualMemory* marking_deque_memory_;
bool marking_deque_memory_committed_;
MarkingDeque marking_deque_;
+ Marker<IncrementalMarking> marker_;
int steps_count_;
double steps_took_;
diff --git a/deps/v8/src/interface.cc b/deps/v8/src/interface.cc
index e344b86150..86bb9d0bf4 100644
--- a/deps/v8/src/interface.cc
+++ b/deps/v8/src/interface.cc
@@ -41,11 +41,13 @@ static bool Match(void* key1, void* key2) {
}
-Interface* Interface::Lookup(Handle<String> name) {
+Interface* Interface::Lookup(Handle<String> name, Zone* zone) {
ASSERT(IsModule());
ZoneHashMap* map = Chase()->exports_;
if (map == NULL) return NULL;
- ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false);
+ ZoneAllocationPolicy allocator(zone);
+ ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false,
+ allocator);
if (p == NULL) return NULL;
ASSERT(*static_cast<String**>(p->key) == *name);
ASSERT(p->value != NULL);
@@ -69,7 +71,7 @@ int Nesting::current_ = 0;
void Interface::DoAdd(
- void* name, uint32_t hash, Interface* interface, bool* ok) {
+ void* name, uint32_t hash, Interface* interface, Zone* zone, bool* ok) {
MakeModule(ok);
if (!*ok) return;
@@ -79,15 +81,19 @@ void Interface::DoAdd(
PrintF("%*sthis = ", Nesting::current(), "");
this->Print(Nesting::current());
PrintF("%*s%s : ", Nesting::current(), "",
- (*reinterpret_cast<String**>(name))->ToAsciiArray());
+ (*static_cast<String**>(name))->ToAsciiArray());
interface->Print(Nesting::current());
}
#endif
ZoneHashMap** map = &Chase()->exports_;
- if (*map == NULL) *map = new ZoneHashMap(Match, 8);
+ ZoneAllocationPolicy allocator(zone);
- ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen());
+ if (*map == NULL)
+ *map = new ZoneHashMap(Match, ZoneHashMap::kDefaultHashMapCapacity,
+ allocator);
+
+ ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen(), allocator);
if (p == NULL) {
// This didn't have name but was frozen already, that's an error.
*ok = false;
@@ -97,7 +103,7 @@ void Interface::DoAdd(
#ifdef DEBUG
Nesting nested;
#endif
- reinterpret_cast<Interface*>(p->value)->Unify(interface, ok);
+ static_cast<Interface*>(p->value)->Unify(interface, zone, ok);
}
#ifdef DEBUG
@@ -110,9 +116,9 @@ void Interface::DoAdd(
}
-void Interface::Unify(Interface* that, bool* ok) {
- if (this->forward_) return this->Chase()->Unify(that, ok);
- if (that->forward_) return this->Unify(that->Chase(), ok);
+void Interface::Unify(Interface* that, Zone* zone, bool* ok) {
+ if (this->forward_) return this->Chase()->Unify(that, zone, ok);
+ if (that->forward_) return this->Unify(that->Chase(), zone, ok);
ASSERT(this->forward_ == NULL);
ASSERT(that->forward_ == NULL);
@@ -134,9 +140,9 @@ void Interface::Unify(Interface* that, bool* ok) {
// Merge the smaller interface into the larger, for performance.
if (this->exports_ != NULL && (that->exports_ == NULL ||
this->exports_->occupancy() >= that->exports_->occupancy())) {
- this->DoUnify(that, ok);
+ this->DoUnify(that, ok, zone);
} else {
- that->DoUnify(this, ok);
+ that->DoUnify(this, ok, zone);
}
#ifdef DEBUG
@@ -151,7 +157,7 @@ void Interface::Unify(Interface* that, bool* ok) {
}
-void Interface::DoUnify(Interface* that, bool* ok) {
+void Interface::DoUnify(Interface* that, bool* ok, Zone* zone) {
ASSERT(this->forward_ == NULL);
ASSERT(that->forward_ == NULL);
ASSERT(!this->IsValue());
@@ -166,7 +172,7 @@ void Interface::DoUnify(Interface* that, bool* ok) {
ZoneHashMap* map = that->exports_;
if (map != NULL) {
for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
- this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), ok);
+ this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), zone, ok);
if (!*ok) return;
}
}
@@ -180,6 +186,15 @@ void Interface::DoUnify(Interface* that, bool* ok) {
return;
}
+ // Merge instance.
+ if (!that->instance_.is_null()) {
+ if (!this->instance_.is_null() && *this->instance_ != *that->instance_) {
+ *ok = false;
+ return;
+ }
+ this->instance_ = that->instance_;
+ }
+
// Merge interfaces.
this->flags_ |= that->flags_;
that->forward_ = this;
diff --git a/deps/v8/src/interface.h b/deps/v8/src/interface.h
index c2991cbd63..2670e7428d 100644
--- a/deps/v8/src/interface.h
+++ b/deps/v8/src/interface.h
@@ -53,12 +53,12 @@ class Interface : public ZoneObject {
return &value_interface;
}
- static Interface* NewUnknown() {
- return new Interface(NONE);
+ static Interface* NewUnknown(Zone* zone) {
+ return new(zone) Interface(NONE);
}
- static Interface* NewModule() {
- return new Interface(MODULE);
+ static Interface* NewModule(Zone* zone) {
+ return new(zone) Interface(MODULE);
}
// ---------------------------------------------------------------------------
@@ -66,13 +66,13 @@ class Interface : public ZoneObject {
// Add a name to the list of exports. If it already exists, unify with
// interface, otherwise insert unless this is closed.
- void Add(Handle<String> name, Interface* interface, bool* ok) {
- DoAdd(name.location(), name->Hash(), interface, ok);
+ void Add(Handle<String> name, Interface* interface, Zone* zone, bool* ok) {
+ DoAdd(name.location(), name->Hash(), interface, zone, ok);
}
// Unify with another interface. If successful, both interface objects will
// represent the same type, and changes to one are reflected in the other.
- void Unify(Interface* that, bool* ok);
+ void Unify(Interface* that, Zone* zone, bool* ok);
// Determine this interface to be a value interface.
void MakeValue(bool* ok) {
@@ -86,6 +86,12 @@ class Interface : public ZoneObject {
if (*ok) Chase()->flags_ |= MODULE;
}
+ // Set associated instance object.
+ void MakeSingleton(Handle<JSModule> instance, bool* ok) {
+ *ok = IsModule() && Chase()->instance_.is_null();
+ if (*ok) Chase()->instance_ = instance;
+ }
+
// Do not allow any further refinements, directly or through unification.
void Freeze(bool* ok) {
*ok = IsValue() || IsModule();
@@ -95,9 +101,6 @@ class Interface : public ZoneObject {
// ---------------------------------------------------------------------------
// Accessors.
- // Look up an exported name. Returns NULL if not (yet) defined.
- Interface* Lookup(Handle<String> name);
-
// Check whether this is still a fully undetermined type.
bool IsUnknown() { return Chase()->flags_ == NONE; }
@@ -110,6 +113,42 @@ class Interface : public ZoneObject {
// Check whether this is closed (i.e. fully determined).
bool IsFrozen() { return Chase()->flags_ & FROZEN; }
+ Handle<JSModule> Instance() { return Chase()->instance_; }
+
+ // Look up an exported name. Returns NULL if not (yet) defined.
+ Interface* Lookup(Handle<String> name, Zone* zone);
+
+ // ---------------------------------------------------------------------------
+ // Iterators.
+
+ // Use like:
+ // for (auto it = interface->iterator(); !it.done(); it.Advance()) {
+ // ... it.name() ... it.interface() ...
+ // }
+ class Iterator {
+ public:
+ bool done() const { return entry_ == NULL; }
+ Handle<String> name() const {
+ ASSERT(!done());
+ return Handle<String>(*static_cast<String**>(entry_->key));
+ }
+ Interface* interface() const {
+ ASSERT(!done());
+ return static_cast<Interface*>(entry_->value);
+ }
+ void Advance() { entry_ = exports_->Next(entry_); }
+
+ private:
+ friend class Interface;
+ explicit Iterator(const ZoneHashMap* exports)
+ : exports_(exports), entry_(exports ? exports->Start() : NULL) {}
+
+ const ZoneHashMap* exports_;
+ ZoneHashMap::Entry* entry_;
+ };
+
+ Iterator iterator() const { return Iterator(this->exports_); }
+
// ---------------------------------------------------------------------------
// Debugging.
#ifdef DEBUG
@@ -129,6 +168,7 @@ class Interface : public ZoneObject {
int flags_;
Interface* forward_; // Unification link
ZoneHashMap* exports_; // Module exports and their types (allocated lazily)
+ Handle<JSModule> instance_;
explicit Interface(int flags)
: flags_(flags),
@@ -147,8 +187,9 @@ class Interface : public ZoneObject {
return result;
}
- void DoAdd(void* name, uint32_t hash, Interface* interface, bool* ok);
- void DoUnify(Interface* that, bool* ok);
+ void DoAdd(void* name, uint32_t hash, Interface* interface, Zone* zone,
+ bool* ok);
+ void DoUnify(Interface* that, bool* ok, Zone* zone);
};
} } // namespace v8::internal
diff --git a/deps/v8/src/interpreter-irregexp.cc b/deps/v8/src/interpreter-irregexp.cc
index b337e88452..3a92b84554 100644
--- a/deps/v8/src/interpreter-irregexp.cc
+++ b/deps/v8/src/interpreter-irregexp.cc
@@ -33,8 +33,9 @@
#include "utils.h"
#include "ast.h"
#include "bytecodes-irregexp.h"
-#include "jsregexp.h"
#include "interpreter-irregexp.h"
+#include "jsregexp.h"
+#include "regexp-macro-assembler.h"
namespace v8 {
namespace internal {
@@ -449,6 +450,37 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate,
}
break;
}
+ BYTECODE(CHECK_CHAR_IN_RANGE) {
+ uint32_t from = Load16Aligned(pc + 4);
+ uint32_t to = Load16Aligned(pc + 6);
+ if (from <= current_char && current_char <= to) {
+ pc = code_base + Load32Aligned(pc + 8);
+ } else {
+ pc += BC_CHECK_CHAR_IN_RANGE_LENGTH;
+ }
+ break;
+ }
+ BYTECODE(CHECK_CHAR_NOT_IN_RANGE) {
+ uint32_t from = Load16Aligned(pc + 4);
+ uint32_t to = Load16Aligned(pc + 6);
+ if (from > current_char || current_char > to) {
+ pc = code_base + Load32Aligned(pc + 8);
+ } else {
+ pc += BC_CHECK_CHAR_NOT_IN_RANGE_LENGTH;
+ }
+ break;
+ }
+ BYTECODE(CHECK_BIT_IN_TABLE) {
+ int mask = RegExpMacroAssembler::kTableMask;
+ byte b = pc[8 + ((current_char & mask) >> kBitsPerByteLog2)];
+ int bit = (current_char & (kBitsPerByte - 1));
+ if ((b & (1 << bit)) != 0) {
+ pc = code_base + Load32Aligned(pc + 4);
+ } else {
+ pc += BC_CHECK_BIT_IN_TABLE_LENGTH;
+ }
+ break;
+ }
BYTECODE(CHECK_LT) {
uint32_t limit = (insn >> BYTECODE_SHIFT);
if (current_char < limit) {
@@ -488,59 +520,6 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate,
pc += BC_CHECK_REGISTER_EQ_POS_LENGTH;
}
break;
- BYTECODE(LOOKUP_MAP1) {
- // Look up character in a bitmap. If we find a 0, then jump to the
- // location at pc + 8. Otherwise fall through!
- int index = current_char - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + (index >> 3)];
- map = ((map >> (index & 7)) & 1);
- if (map == 0) {
- pc = code_base + Load32Aligned(pc + 8);
- } else {
- pc += BC_LOOKUP_MAP1_LENGTH;
- }
- break;
- }
- BYTECODE(LOOKUP_MAP2) {
- // Look up character in a half-nibble map. If we find 00, then jump to
- // the location at pc + 8. If we find 01 then jump to location at
- // pc + 11, etc.
- int index = (current_char - (insn >> BYTECODE_SHIFT)) << 1;
- byte map = code_base[Load32Aligned(pc + 3) + (index >> 3)];
- map = ((map >> (index & 7)) & 3);
- if (map < 2) {
- if (map == 0) {
- pc = code_base + Load32Aligned(pc + 8);
- } else {
- pc = code_base + Load32Aligned(pc + 12);
- }
- } else {
- if (map == 2) {
- pc = code_base + Load32Aligned(pc + 16);
- } else {
- pc = code_base + Load32Aligned(pc + 20);
- }
- }
- break;
- }
- BYTECODE(LOOKUP_MAP8) {
- // Look up character in a byte map. Use the byte as an index into a
- // table that follows this instruction immediately.
- int index = current_char - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + index];
- const byte* new_pc = code_base + Load32Aligned(pc + 8) + (map << 2);
- pc = code_base + Load32Aligned(new_pc);
- break;
- }
- BYTECODE(LOOKUP_HI_MAP8) {
- // Look up high byte of this character in a byte map. Use the byte as
- // an index into a table that follows this instruction immediately.
- int index = (current_char >> 8) - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + index];
- const byte* new_pc = code_base + Load32Aligned(pc + 8) + (map << 2);
- pc = code_base + Load32Aligned(new_pc);
- break;
- }
BYTECODE(CHECK_NOT_REGS_EQUAL)
if (registers[insn >> BYTECODE_SHIFT] ==
registers[Load32Aligned(pc + 4)]) {
diff --git a/deps/v8/src/isolate.cc b/deps/v8/src/isolate.cc
index e80512239d..8fcb370c3e 100644
--- a/deps/v8/src/isolate.cc
+++ b/deps/v8/src/isolate.cc
@@ -256,7 +256,7 @@ void Isolate::PreallocatedStorageInit(size_t size) {
void* Isolate::PreallocatedStorageNew(size_t size) {
if (!preallocated_storage_preallocated_) {
- return FreeStoreAllocationPolicy::New(size);
+ return FreeStoreAllocationPolicy().New(size);
}
ASSERT(free_list_.next_ != &free_list_);
ASSERT(free_list_.previous_ != &free_list_);
@@ -921,7 +921,7 @@ Failure* Isolate::Throw(Object* exception, MessageLocation* location) {
}
-Failure* Isolate::ReThrow(MaybeObject* exception, MessageLocation* location) {
+Failure* Isolate::ReThrow(MaybeObject* exception) {
bool can_be_caught_externally = false;
bool catchable_by_javascript = is_catchable_by_javascript(exception);
ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
@@ -1131,8 +1131,18 @@ void Isolate::DoThrow(Object* exception, MessageLocation* location) {
// to the console for easier debugging.
int line_number = GetScriptLineNumberSafe(location->script(),
location->start_pos());
- OS::PrintError("Extension or internal compilation error at line %d.\n",
- line_number);
+ if (exception->IsString()) {
+ OS::PrintError(
+ "Extension or internal compilation error: %s in %s at line %d.\n",
+ *String::cast(exception)->ToCString(),
+ *String::cast(location->script()->name())->ToCString(),
+ line_number);
+ } else {
+ OS::PrintError(
+ "Extension or internal compilation error in %s at line %d.\n",
+ *String::cast(location->script()->name())->ToCString(),
+ line_number);
+ }
}
}
@@ -1430,6 +1440,7 @@ void Isolate::ThreadDataTable::RemoveAllThreads(Isolate* isolate) {
Isolate::Isolate()
: state_(UNINITIALIZED),
+ embedder_data_(NULL),
entry_stack_(NULL),
stack_trace_nesting_level_(0),
incomplete_message_(NULL),
@@ -1472,7 +1483,6 @@ Isolate::Isolate()
string_tracker_(NULL),
regexp_stack_(NULL),
date_cache_(NULL),
- embedder_data_(NULL),
context_exit_happened_(false) {
TRACE_ISOLATE(constructor);
@@ -1768,7 +1778,7 @@ bool Isolate::Init(Deserializer* des) {
global_handles_ = new GlobalHandles(this);
bootstrapper_ = new Bootstrapper();
handle_scope_implementer_ = new HandleScopeImplementer(this);
- stub_cache_ = new StubCache(this);
+ stub_cache_ = new StubCache(this, zone());
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
date_cache_ = new DateCache();
@@ -1857,6 +1867,13 @@ bool Isolate::Init(Deserializer* des) {
LOG(this, LogCompiledFunctions());
}
+ CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, state_)),
+ Internals::kIsolateStateOffset);
+ CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
+ Internals::kIsolateEmbedderDataOffset);
+ CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
+ Internals::kIsolateRootsOffset);
+
state_ = INITIALIZED;
time_millis_at_init_ = OS::TimeCurrentMillis();
return true;
diff --git a/deps/v8/src/isolate.h b/deps/v8/src/isolate.h
index 2ff131840f..5ca2b87f0e 100644
--- a/deps/v8/src/isolate.h
+++ b/deps/v8/src/isolate.h
@@ -315,7 +315,7 @@ class ThreadLocalTop BASE_EMBEDDED {
V(uint32_t, private_random_seed, 2) \
ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
-typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
+typedef List<HeapObject*, PreallocatedStorageAllocationPolicy> DebugObjectCache;
#define ISOLATE_INIT_LIST(V) \
/* SerializerDeserializer state. */ \
@@ -422,7 +422,7 @@ class Isolate {
enum AddressId {
#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
-#undef C
+#undef DECLARE_ENUM
kIsolateAddressCount
};
@@ -578,6 +578,20 @@ class Isolate {
MaybeObject** scheduled_exception_address() {
return &thread_local_top_.scheduled_exception_;
}
+
+ Address pending_message_obj_address() {
+ return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
+ }
+
+ Address has_pending_message_address() {
+ return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
+ }
+
+ Address pending_message_script_address() {
+ return reinterpret_cast<Address>(
+ &thread_local_top_.pending_message_script_);
+ }
+
MaybeObject* scheduled_exception() {
ASSERT(has_scheduled_exception());
return thread_local_top_.scheduled_exception_;
@@ -708,7 +722,7 @@ class Isolate {
// Re-throw an exception. This involves no error reporting since
// error reporting was handled when the exception was thrown
// originally.
- Failure* ReThrow(MaybeObject* exception, MessageLocation* location = NULL);
+ Failure* ReThrow(MaybeObject* exception);
void ScheduleThrow(Object* exception);
void ReportPendingMessages();
Failure* ThrowIllegalOperation();
@@ -965,7 +979,7 @@ class Isolate {
// SerializerDeserializer state.
static const int kPartialSnapshotCacheCapacity = 1400;
- static const int kJSRegexpStaticOffsetsVectorSize = 50;
+ static const int kJSRegexpStaticOffsetsVectorSize = 128;
Address external_callback() {
return thread_local_top_.external_callback_;
@@ -1038,6 +1052,18 @@ class Isolate {
friend struct GlobalState;
friend struct InitializeGlobalState;
+ enum State {
+ UNINITIALIZED, // Some components may not have been allocated.
+ INITIALIZED // All components are fully initialized.
+ };
+
+ // These fields are accessed through the API, offsets must be kept in sync
+ // with v8::internal::Internals (in include/v8.h) constants. This is also
+ // verified in Isolate::Init() using runtime checks.
+ State state_; // Will be padded to kApiPointerSize.
+ void* embedder_data_;
+ Heap heap_;
+
// The per-process lock should be acquired before the ThreadDataTable is
// modified.
class ThreadDataTable {
@@ -1095,14 +1121,6 @@ class Isolate {
static void SetIsolateThreadLocals(Isolate* isolate,
PerIsolateThreadData* data);
- enum State {
- UNINITIALIZED, // Some components may not have been allocated.
- INITIALIZED // All components are fully initialized.
- };
-
- State state_;
- EntryStackItem* entry_stack_;
-
// Allocate and insert PerIsolateThreadData into the ThreadDataTable
// (regardless of whether such data already exists).
PerIsolateThreadData* AllocatePerIsolateThreadData(ThreadId thread_id);
@@ -1146,13 +1164,13 @@ class Isolate {
// the Error object.
bool IsErrorObject(Handle<Object> obj);
+ EntryStackItem* entry_stack_;
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.
PreallocatedMemoryThread* preallocated_memory_thread_;
Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT
NoAllocationStringAllocator* preallocated_message_space_;
-
Bootstrapper* bootstrapper_;
RuntimeProfiler* runtime_profiler_;
CompilationCache* compilation_cache_;
@@ -1161,7 +1179,6 @@ class Isolate {
Mutex* break_access_;
Atomic32 debugger_initialized_;
Mutex* debugger_access_;
- Heap heap_;
Logger* logger_;
StackGuard stack_guard_;
StatsTable* stats_table_;
@@ -1202,11 +1219,8 @@ class Isolate {
unibrow::Mapping<unibrow::Ecma262Canonicalize>
regexp_macro_assembler_canonicalize_;
RegExpStack* regexp_stack_;
-
DateCache* date_cache_;
-
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
- void* embedder_data_;
// The garbage collector should be a little more aggressive when it knows
// that a context was recently exited.
@@ -1394,7 +1408,6 @@ class PostponeInterruptsScope BASE_EMBEDDED {
#define HEAP (v8::internal::Isolate::Current()->heap())
#define FACTORY (v8::internal::Isolate::Current()->factory())
#define ISOLATE (v8::internal::Isolate::Current())
-#define ZONE (v8::internal::Isolate::Current()->zone())
#define LOGGER (v8::internal::Isolate::Current()->logger())
diff --git a/deps/v8/src/json-parser.h b/deps/v8/src/json-parser.h
index d22cd0da3a..7265165ac1 100644
--- a/deps/v8/src/json-parser.h
+++ b/deps/v8/src/json-parser.h
@@ -43,15 +43,15 @@ namespace internal {
template <bool seq_ascii>
class JsonParser BASE_EMBEDDED {
public:
- static Handle<Object> Parse(Handle<String> source) {
- return JsonParser().ParseJson(source);
+ static Handle<Object> Parse(Handle<String> source, Zone* zone) {
+ return JsonParser().ParseJson(source, zone);
}
static const int kEndOfString = -1;
private:
// Parse a string containing a single JSON value.
- Handle<Object> ParseJson(Handle<String> source);
+ Handle<Object> ParseJson(Handle<String> source, Zone* zone);
inline void Advance() {
position_++;
@@ -149,6 +149,7 @@ class JsonParser BASE_EMBEDDED {
}
inline Isolate* isolate() { return isolate_; }
+ inline Zone* zone() const { return zone_; }
static const int kInitialSpecialStringLength = 1024;
@@ -161,11 +162,14 @@ class JsonParser BASE_EMBEDDED {
Isolate* isolate_;
uc32 c0_;
int position_;
+ Zone* zone_;
};
template <bool seq_ascii>
-Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
+Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source,
+ Zone* zone) {
isolate_ = source->map()->GetHeap()->isolate();
+ zone_ = zone;
FlattenString(source);
source_ = source;
source_length_ = source_->length();
@@ -323,7 +327,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJsonArray() {
ZoneScope zone_scope(isolate(), DELETE_ON_EXIT);
- ZoneList<Handle<Object> > elements(4);
+ ZoneList<Handle<Object> > elements(4, zone());
ASSERT_EQ(c0_, '[');
AdvanceSkipWhitespace();
@@ -331,7 +335,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonArray() {
do {
Handle<Object> element = ParseJsonValue();
if (element.is_null()) return ReportUnexpectedCharacter();
- elements.Add(element);
+ elements.Add(element, zone());
} while (MatchSkipWhiteSpace(','));
if (c0_ != ']') {
return ReportUnexpectedCharacter();
diff --git a/deps/v8/src/jsregexp.cc b/deps/v8/src/jsregexp.cc
index 8ccbae49ce..cd51db80a4 100644
--- a/deps/v8/src/jsregexp.cc
+++ b/deps/v8/src/jsregexp.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -108,6 +108,60 @@ static inline void ThrowRegExpException(Handle<JSRegExp> re,
}
+ContainedInLattice AddRange(ContainedInLattice containment,
+ const int* ranges,
+ int ranges_length,
+ Interval new_range) {
+ ASSERT((ranges_length & 1) == 1);
+ ASSERT(ranges[ranges_length - 1] == String::kMaxUtf16CodeUnit + 1);
+ if (containment == kLatticeUnknown) return containment;
+ bool inside = false;
+ int last = 0;
+ for (int i = 0; i < ranges_length; inside = !inside, last = ranges[i], i++) {
+ // Consider the range from last to ranges[i].
+ // We haven't got to the new range yet.
+ if (ranges[i] <= new_range.from()) continue;
+ // New range is wholly inside last-ranges[i]. Note that new_range.to() is
+ // inclusive, but the values in ranges are not.
+ if (last <= new_range.from() && new_range.to() < ranges[i]) {
+ return Combine(containment, inside ? kLatticeIn : kLatticeOut);
+ }
+ return kLatticeUnknown;
+ }
+ return containment;
+}
+
+
+// More makes code generation slower, less makes V8 benchmark score lower.
+const int kMaxLookaheadForBoyerMoore = 8;
+// In a 3-character pattern you can maximally step forwards 3 characters
+// at a time, which is not always enough to pay for the extra logic.
+const int kPatternTooShortForBoyerMoore = 2;
+
+
+// Identifies the sort of regexps where the regexp engine is faster
+// than the code used for atom matches.
+static bool HasFewDifferentCharacters(Handle<String> pattern) {
+ int length = Min(kMaxLookaheadForBoyerMoore, pattern->length());
+ if (length <= kPatternTooShortForBoyerMoore) return false;
+ const int kMod = 128;
+ bool character_found[kMod];
+ int different = 0;
+ memset(&character_found[0], 0, sizeof(character_found));
+ for (int i = 0; i < length; i++) {
+ int ch = (pattern->Get(i) & (kMod - 1));
+ if (!character_found[ch]) {
+ character_found[ch] = true;
+ different++;
+ // We declare a regexp low-alphabet if it has at least 3 times as many
+ // characters as it has different characters.
+ if (different * 3 > length) return false;
+ }
+ }
+ return true;
+}
+
+
// Generic RegExp methods. Dispatches to implementation specific methods.
@@ -141,9 +195,14 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
return Handle<Object>::null();
}
- if (parse_result.simple && !flags.is_ignore_case()) {
+ bool has_been_compiled = false;
+
+ if (parse_result.simple &&
+ !flags.is_ignore_case() &&
+ !HasFewDifferentCharacters(pattern)) {
// Parse-tree is a single atom that is equal to the pattern.
AtomCompile(re, pattern, flags, pattern);
+ has_been_compiled = true;
} else if (parse_result.tree->IsAtom() &&
!flags.is_ignore_case() &&
parse_result.capture_count == 0) {
@@ -151,8 +210,12 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
Vector<const uc16> atom_pattern = atom->data();
Handle<String> atom_string =
isolate->factory()->NewStringFromTwoByte(atom_pattern);
- AtomCompile(re, pattern, flags, atom_string);
- } else {
+ if (!HasFewDifferentCharacters(atom_string)) {
+ AtomCompile(re, pattern, flags, atom_string);
+ has_been_compiled = true;
+ }
+ }
+ if (!has_been_compiled) {
IrregexpInitialize(re, pattern, flags, parse_result.capture_count);
}
ASSERT(re->data()->IsFixedArray());
@@ -168,13 +231,14 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
Handle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
- Handle<JSArray> last_match_info) {
+ Handle<JSArray> last_match_info,
+ Zone* zone) {
switch (regexp->TypeTag()) {
case JSRegExp::ATOM:
return AtomExec(regexp, subject, index, last_match_info);
case JSRegExp::IRREGEXP: {
Handle<Object> result =
- IrregexpExec(regexp, subject, index, last_match_info);
+ IrregexpExec(regexp, subject, index, last_match_info, zone);
ASSERT(!result.is_null() ||
regexp->GetIsolate()->has_pending_exception());
return result;
@@ -261,7 +325,7 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
index)));
if (index == -1) return isolate->factory()->null_value();
}
- ASSERT(last_match_info->HasFastElements());
+ ASSERT(last_match_info->HasFastObjectElements());
{
NoHandleAllocation no_handles;
@@ -280,7 +344,9 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
// from the source pattern.
// If compilation fails, an exception is thrown and this function
// returns false.
-bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
+bool RegExpImpl::EnsureCompiledIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii,
+ Zone* zone) {
Object* compiled_code = re->DataAt(JSRegExp::code_index(is_ascii));
#ifdef V8_INTERPRETED_REGEXP
if (compiled_code->IsByteArray()) return true;
@@ -296,7 +362,7 @@ bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
ASSERT(compiled_code->IsSmi());
return true;
}
- return CompileIrregexp(re, is_ascii);
+ return CompileIrregexp(re, sample_subject, is_ascii, zone);
}
@@ -316,7 +382,10 @@ static bool CreateRegExpErrorObjectAndThrow(Handle<JSRegExp> re,
}
-bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
+bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re,
+ Handle<String> sample_subject,
+ bool is_ascii,
+ Zone* zone) {
// Compile the RegExp.
Isolate* isolate = re->GetIsolate();
ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
@@ -363,9 +432,12 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
RegExpEngine::CompilationResult result =
RegExpEngine::Compile(&compile_data,
flags.is_ignore_case(),
+ flags.is_global(),
flags.is_multiline(),
pattern,
- is_ascii);
+ sample_subject,
+ is_ascii,
+ zone);
if (result.error_message != NULL) {
// Unable to compile regexp.
Handle<String> error_message =
@@ -430,12 +502,13 @@ void RegExpImpl::IrregexpInitialize(Handle<JSRegExp> re,
int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
- Handle<String> subject) {
+ Handle<String> subject,
+ Zone* zone) {
if (!subject->IsFlat()) FlattenString(subject);
// Check the asciiness of the underlying storage.
bool is_ascii = subject->IsAsciiRepresentationUnderneath();
- if (!EnsureCompiledIrregexp(regexp, is_ascii)) return -1;
+ if (!EnsureCompiledIrregexp(regexp, subject, is_ascii, zone)) return -1;
#ifdef V8_INTERPRETED_REGEXP
// Byte-code regexp needs space allocated for all its registers.
@@ -448,11 +521,28 @@ int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
}
-RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
+int RegExpImpl::GlobalOffsetsVectorSize(Handle<JSRegExp> regexp,
+ int registers_per_match,
+ int* max_matches) {
+#ifdef V8_INTERPRETED_REGEXP
+ // Global loop in interpreted regexp is not implemented. Therefore we choose
+ // the size of the offsets vector so that it can only store one match.
+ *max_matches = 1;
+ return registers_per_match;
+#else // V8_INTERPRETED_REGEXP
+ int size = Max(registers_per_match, OffsetsVector::kStaticOffsetsVectorSize);
+ *max_matches = size / registers_per_match;
+ return size;
+#endif // V8_INTERPRETED_REGEXP
+}
+
+
+int RegExpImpl::IrregexpExecRaw(
Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
- Vector<int> output) {
+ Vector<int> output,
+ Zone* zone) {
Isolate* isolate = regexp->GetIsolate();
Handle<FixedArray> irregexp(FixedArray::cast(regexp->data()), isolate);
@@ -466,7 +556,7 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
#ifndef V8_INTERPRETED_REGEXP
ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
do {
- EnsureCompiledIrregexp(regexp, is_ascii);
+ EnsureCompiledIrregexp(regexp, subject, is_ascii, zone);
Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
NativeRegExpMacroAssembler::Result res =
NativeRegExpMacroAssembler::Match(code,
@@ -492,7 +582,7 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
// the, potentially, different subject (the string can switch between
// being internal and external, and even between being ASCII and UC16,
// but the characters are always the same).
- IrregexpPrepare(regexp, subject);
+ IrregexpPrepare(regexp, subject, zone);
is_ascii = subject->IsAsciiRepresentationUnderneath();
} while (true);
UNREACHABLE();
@@ -527,7 +617,8 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
Handle<String> subject,
int previous_index,
- Handle<JSArray> last_match_info) {
+ Handle<JSArray> last_match_info,
+ Zone* zone) {
Isolate* isolate = jsregexp->GetIsolate();
ASSERT_EQ(jsregexp->TypeTag(), JSRegExp::IRREGEXP);
@@ -541,7 +632,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
}
#endif
#endif
- int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject);
+ int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject, zone);
if (required_registers < 0) {
// Compiling failed with an exception.
ASSERT(isolate->has_pending_exception());
@@ -550,9 +641,10 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
OffsetsVector registers(required_registers, isolate);
- IrregexpResult res = RegExpImpl::IrregexpExecOnce(
- jsregexp, subject, previous_index, Vector<int>(registers.vector(),
- registers.length()));
+ int res = RegExpImpl::IrregexpExecRaw(jsregexp, subject, previous_index,
+ Vector<int>(registers.vector(),
+ registers.length()),
+ zone);
if (res == RE_SUCCESS) {
int capture_register_count =
(IrregexpNumberOfCaptures(FixedArray::cast(jsregexp->data())) + 1) * 2;
@@ -728,24 +820,24 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
// the event that code generation is requested for an identical trace.
-void RegExpTree::AppendToText(RegExpText* text) {
+void RegExpTree::AppendToText(RegExpText* text, Zone* zone) {
UNREACHABLE();
}
-void RegExpAtom::AppendToText(RegExpText* text) {
- text->AddElement(TextElement::Atom(this));
+void RegExpAtom::AppendToText(RegExpText* text, Zone* zone) {
+ text->AddElement(TextElement::Atom(this), zone);
}
-void RegExpCharacterClass::AppendToText(RegExpText* text) {
- text->AddElement(TextElement::CharClass(this));
+void RegExpCharacterClass::AppendToText(RegExpText* text, Zone* zone) {
+ text->AddElement(TextElement::CharClass(this), zone);
}
-void RegExpText::AppendToText(RegExpText* text) {
+void RegExpText::AppendToText(RegExpText* text, Zone* zone) {
for (int i = 0; i < elements()->length(); i++)
- text->AddElement(elements()->at(i));
+ text->AddElement(elements()->at(i), zone);
}
@@ -776,17 +868,65 @@ int TextElement::length() {
DispatchTable* ChoiceNode::GetTable(bool ignore_case) {
if (table_ == NULL) {
- table_ = new DispatchTable();
- DispatchTableConstructor cons(table_, ignore_case);
+ table_ = new(zone()) DispatchTable(zone());
+ DispatchTableConstructor cons(table_, ignore_case, zone());
cons.BuildTable(this);
}
return table_;
}
+class FrequencyCollator {
+ public:
+ FrequencyCollator() : total_samples_(0) {
+ for (int i = 0; i < RegExpMacroAssembler::kTableSize; i++) {
+ frequencies_[i] = CharacterFrequency(i);
+ }
+ }
+
+ void CountCharacter(int character) {
+ int index = (character & RegExpMacroAssembler::kTableMask);
+ frequencies_[index].Increment();
+ total_samples_++;
+ }
+
+ // Does not measure in percent, but rather per-128 (the table size from the
+ // regexp macro assembler).
+ int Frequency(int in_character) {
+ ASSERT((in_character & RegExpMacroAssembler::kTableMask) == in_character);
+ if (total_samples_ < 1) return 1; // Division by zero.
+ int freq_in_per128 =
+ (frequencies_[in_character].counter() * 128) / total_samples_;
+ return freq_in_per128;
+ }
+
+ private:
+ class CharacterFrequency {
+ public:
+ CharacterFrequency() : counter_(0), character_(-1) { }
+ explicit CharacterFrequency(int character)
+ : counter_(0), character_(character) { }
+
+ void Increment() { counter_++; }
+ int counter() { return counter_; }
+ int character() { return character_; }
+
+ private:
+ int counter_;
+ int character_;
+ };
+
+
+ private:
+ CharacterFrequency frequencies_[RegExpMacroAssembler::kTableSize];
+ int total_samples_;
+};
+
+
class RegExpCompiler {
public:
- RegExpCompiler(int capture_count, bool ignore_case, bool is_ascii);
+ RegExpCompiler(int capture_count, bool ignore_case, bool is_ascii,
+ Zone* zone);
int AllocateRegister() {
if (next_register_ >= RegExpMacroAssembler::kMaxRegister) {
@@ -819,12 +959,15 @@ class RegExpCompiler {
inline bool ignore_case() { return ignore_case_; }
inline bool ascii() { return ascii_; }
+ FrequencyCollator* frequency_collator() { return &frequency_collator_; }
int current_expansion_factor() { return current_expansion_factor_; }
void set_current_expansion_factor(int value) {
current_expansion_factor_ = value;
}
+ Zone* zone() const { return zone_; }
+
static const int kNoRegister = -1;
private:
@@ -837,6 +980,8 @@ class RegExpCompiler {
bool ascii_;
bool reg_exp_too_big_;
int current_expansion_factor_;
+ FrequencyCollator frequency_collator_;
+ Zone* zone_;
};
@@ -858,15 +1003,18 @@ static RegExpEngine::CompilationResult IrregexpRegExpTooBig() {
// Attempts to compile the regexp using an Irregexp code generator. Returns
// a fixed array or a null handle depending on whether it succeeded.
-RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii)
+RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii,
+ Zone* zone)
: next_register_(2 * (capture_count + 1)),
work_list_(NULL),
recursion_depth_(0),
ignore_case_(ignore_case),
ascii_(ascii),
reg_exp_too_big_(false),
- current_expansion_factor_(1) {
- accept_ = new EndNode(EndNode::ACCEPT);
+ current_expansion_factor_(1),
+ frequency_collator_(),
+ zone_(zone) {
+ accept_ = new(zone) EndNode(EndNode::ACCEPT, zone);
ASSERT(next_register_ - 1 <= RegExpMacroAssembler::kMaxRegister);
}
@@ -962,7 +1110,8 @@ bool Trace::GetStoredPosition(int reg, int* cp_offset) {
}
-int Trace::FindAffectedRegisters(OutSet* affected_registers) {
+int Trace::FindAffectedRegisters(OutSet* affected_registers,
+ Zone* zone) {
int max_register = RegExpCompiler::kNoRegister;
for (DeferredAction* action = actions_;
action != NULL;
@@ -970,10 +1119,10 @@ int Trace::FindAffectedRegisters(OutSet* affected_registers) {
if (action->type() == ActionNode::CLEAR_CAPTURES) {
Interval range = static_cast<DeferredClearCaptures*>(action)->range();
for (int i = range.from(); i <= range.to(); i++)
- affected_registers->Set(i);
+ affected_registers->Set(i, zone);
if (range.to() > max_register) max_register = range.to();
} else {
- affected_registers->Set(action->reg());
+ affected_registers->Set(action->reg(), zone);
if (action->reg() > max_register) max_register = action->reg();
}
}
@@ -1002,7 +1151,8 @@ void Trace::PerformDeferredActions(RegExpMacroAssembler* assembler,
int max_register,
OutSet& affected_registers,
OutSet* registers_to_pop,
- OutSet* registers_to_clear) {
+ OutSet* registers_to_clear,
+ Zone* zone) {
// The "+1" is to avoid a push_limit of zero if stack_limit_slack() is 1.
const int push_limit = (assembler->stack_limit_slack() + 1) / 2;
@@ -1108,9 +1258,9 @@ void Trace::PerformDeferredActions(RegExpMacroAssembler* assembler,
}
assembler->PushRegister(reg, stack_check);
- registers_to_pop->Set(reg);
+ registers_to_pop->Set(reg, zone);
} else if (undo_action == CLEAR) {
- registers_to_clear->Set(reg);
+ registers_to_clear->Set(reg, zone);
}
// Perform the chronologically last action (or accumulated increment)
// for the register.
@@ -1156,14 +1306,16 @@ void Trace::Flush(RegExpCompiler* compiler, RegExpNode* successor) {
assembler->PushCurrentPosition();
}
- int max_register = FindAffectedRegisters(&affected_registers);
+ int max_register = FindAffectedRegisters(&affected_registers,
+ compiler->zone());
OutSet registers_to_pop;
OutSet registers_to_clear;
PerformDeferredActions(assembler,
max_register,
affected_registers,
&registers_to_pop,
- &registers_to_clear);
+ &registers_to_clear,
+ compiler->zone());
if (cp_offset_ != 0) {
assembler->AdvanceCurrentPosition(cp_offset_);
}
@@ -1240,17 +1392,18 @@ void EndNode::Emit(RegExpCompiler* compiler, Trace* trace) {
}
-void GuardedAlternative::AddGuard(Guard* guard) {
+void GuardedAlternative::AddGuard(Guard* guard, Zone* zone) {
if (guards_ == NULL)
- guards_ = new ZoneList<Guard*>(1);
- guards_->Add(guard);
+ guards_ = new(zone) ZoneList<Guard*>(1, zone);
+ guards_->Add(guard, zone);
}
ActionNode* ActionNode::SetRegister(int reg,
int val,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(SET_REGISTER, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(SET_REGISTER, on_success);
result->data_.u_store_register.reg = reg;
result->data_.u_store_register.value = val;
return result;
@@ -1258,7 +1411,8 @@ ActionNode* ActionNode::SetRegister(int reg,
ActionNode* ActionNode::IncrementRegister(int reg, RegExpNode* on_success) {
- ActionNode* result = new ActionNode(INCREMENT_REGISTER, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(INCREMENT_REGISTER, on_success);
result->data_.u_increment_register.reg = reg;
return result;
}
@@ -1267,7 +1421,8 @@ ActionNode* ActionNode::IncrementRegister(int reg, RegExpNode* on_success) {
ActionNode* ActionNode::StorePosition(int reg,
bool is_capture,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(STORE_POSITION, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(STORE_POSITION, on_success);
result->data_.u_position_register.reg = reg;
result->data_.u_position_register.is_capture = is_capture;
return result;
@@ -1276,7 +1431,8 @@ ActionNode* ActionNode::StorePosition(int reg,
ActionNode* ActionNode::ClearCaptures(Interval range,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(CLEAR_CAPTURES, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(CLEAR_CAPTURES, on_success);
result->data_.u_clear_captures.range_from = range.from();
result->data_.u_clear_captures.range_to = range.to();
return result;
@@ -1286,7 +1442,8 @@ ActionNode* ActionNode::ClearCaptures(Interval range,
ActionNode* ActionNode::BeginSubmatch(int stack_reg,
int position_reg,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(BEGIN_SUBMATCH, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(BEGIN_SUBMATCH, on_success);
result->data_.u_submatch.stack_pointer_register = stack_reg;
result->data_.u_submatch.current_position_register = position_reg;
return result;
@@ -1298,7 +1455,8 @@ ActionNode* ActionNode::PositiveSubmatchSuccess(int stack_reg,
int clear_register_count,
int clear_register_from,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(POSITIVE_SUBMATCH_SUCCESS, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(POSITIVE_SUBMATCH_SUCCESS, on_success);
result->data_.u_submatch.stack_pointer_register = stack_reg;
result->data_.u_submatch.current_position_register = position_reg;
result->data_.u_submatch.clear_register_count = clear_register_count;
@@ -1311,7 +1469,8 @@ ActionNode* ActionNode::EmptyMatchCheck(int start_register,
int repetition_register,
int repetition_limit,
RegExpNode* on_success) {
- ActionNode* result = new ActionNode(EMPTY_MATCH_CHECK, on_success);
+ ActionNode* result =
+ new(on_success->zone()) ActionNode(EMPTY_MATCH_CHECK, on_success);
result->data_.u_empty_match_check.start_register = start_register;
result->data_.u_empty_match_check.repetition_register = repetition_register;
result->data_.u_empty_match_check.repetition_limit = repetition_limit;
@@ -1534,14 +1693,370 @@ static inline bool EmitAtomLetter(Isolate* isolate,
}
+static void EmitBoundaryTest(RegExpMacroAssembler* masm,
+ int border,
+ Label* fall_through,
+ Label* above_or_equal,
+ Label* below) {
+ if (below != fall_through) {
+ masm->CheckCharacterLT(border, below);
+ if (above_or_equal != fall_through) masm->GoTo(above_or_equal);
+ } else {
+ masm->CheckCharacterGT(border - 1, above_or_equal);
+ }
+}
+
+
+static void EmitDoubleBoundaryTest(RegExpMacroAssembler* masm,
+ int first,
+ int last,
+ Label* fall_through,
+ Label* in_range,
+ Label* out_of_range) {
+ if (in_range == fall_through) {
+ if (first == last) {
+ masm->CheckNotCharacter(first, out_of_range);
+ } else {
+ masm->CheckCharacterNotInRange(first, last, out_of_range);
+ }
+ } else {
+ if (first == last) {
+ masm->CheckCharacter(first, in_range);
+ } else {
+ masm->CheckCharacterInRange(first, last, in_range);
+ }
+ if (out_of_range != fall_through) masm->GoTo(out_of_range);
+ }
+}
+
+
+// even_label is for ranges[i] to ranges[i + 1] where i - start_index is even.
+// odd_label is for ranges[i] to ranges[i + 1] where i - start_index is odd.
+static void EmitUseLookupTable(
+ RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int min_char,
+ Label* fall_through,
+ Label* even_label,
+ Label* odd_label) {
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ static const int kMask = RegExpMacroAssembler::kTableMask;
+
+ int base = (min_char & ~kMask);
+ USE(base);
+
+ // Assert that everything is on one kTableSize page.
+ for (int i = start_index; i <= end_index; i++) {
+ ASSERT_EQ(ranges->at(i) & ~kMask, base);
+ }
+ ASSERT(start_index == 0 || (ranges->at(start_index - 1) & ~kMask) <= base);
+
+ char templ[kSize];
+ Label* on_bit_set;
+ Label* on_bit_clear;
+ int bit;
+ if (even_label == fall_through) {
+ on_bit_set = odd_label;
+ on_bit_clear = even_label;
+ bit = 1;
+ } else {
+ on_bit_set = even_label;
+ on_bit_clear = odd_label;
+ bit = 0;
+ }
+ for (int i = 0; i < (ranges->at(start_index) & kMask) && i < kSize; i++) {
+ templ[i] = bit;
+ }
+ int j = 0;
+ bit ^= 1;
+ for (int i = start_index; i < end_index; i++) {
+ for (j = (ranges->at(i) & kMask); j < (ranges->at(i + 1) & kMask); j++) {
+ templ[j] = bit;
+ }
+ bit ^= 1;
+ }
+ for (int i = j; i < kSize; i++) {
+ templ[i] = bit;
+ }
+ // TODO(erikcorry): Cache these.
+ Handle<ByteArray> ba = FACTORY->NewByteArray(kSize, TENURED);
+ for (int i = 0; i < kSize; i++) {
+ ba->set(i, templ[i]);
+ }
+ masm->CheckBitInTable(ba, on_bit_set);
+ if (on_bit_clear != fall_through) masm->GoTo(on_bit_clear);
+}
+
+
+static void CutOutRange(RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int cut_index,
+ Label* even_label,
+ Label* odd_label) {
+ bool odd = (((cut_index - start_index) & 1) == 1);
+ Label* in_range_label = odd ? odd_label : even_label;
+ Label dummy;
+ EmitDoubleBoundaryTest(masm,
+ ranges->at(cut_index),
+ ranges->at(cut_index + 1) - 1,
+ &dummy,
+ in_range_label,
+ &dummy);
+ ASSERT(!dummy.is_linked());
+ // Cut out the single range by rewriting the array. This creates a new
+ // range that is a merger of the two ranges on either side of the one we
+ // are cutting out. The oddity of the labels is preserved.
+ for (int j = cut_index; j > start_index; j--) {
+ ranges->at(j) = ranges->at(j - 1);
+ }
+ for (int j = cut_index + 1; j < end_index; j++) {
+ ranges->at(j) = ranges->at(j + 1);
+ }
+}
+
+
+// Unicode case. Split the search space into kSize spaces that are handled
+// with recursion.
+static void SplitSearchSpace(ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int* new_start_index,
+ int* new_end_index,
+ int* border) {
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ static const int kMask = RegExpMacroAssembler::kTableMask;
+
+ int first = ranges->at(start_index);
+ int last = ranges->at(end_index) - 1;
+
+ *new_start_index = start_index;
+ *border = (ranges->at(start_index) & ~kMask) + kSize;
+ while (*new_start_index < end_index) {
+ if (ranges->at(*new_start_index) > *border) break;
+ (*new_start_index)++;
+ }
+ // new_start_index is the index of the first edge that is beyond the
+ // current kSize space.
+
+ // For very large search spaces we do a binary chop search of the non-ASCII
+ // space instead of just going to the end of the current kSize space. The
+ // heuristics are complicated a little by the fact that any 128-character
+ // encoding space can be quickly tested with a table lookup, so we don't
+ // wish to do binary chop search at a smaller granularity than that. A
+ // 128-character space can take up a lot of space in the ranges array if,
+ // for example, we only want to match every second character (eg. the lower
+ // case characters on some Unicode pages).
+ int binary_chop_index = (end_index + start_index) / 2;
+ // The first test ensures that we get to the code that handles the ASCII
+ // range with a single not-taken branch, speeding up this important
+ // character range (even non-ASCII charset-based text has spaces and
+ // punctuation).
+ if (*border - 1 > String::kMaxAsciiCharCode && // ASCII case.
+ end_index - start_index > (*new_start_index - start_index) * 2 &&
+ last - first > kSize * 2 &&
+ binary_chop_index > *new_start_index &&
+ ranges->at(binary_chop_index) >= first + 2 * kSize) {
+ int scan_forward_for_section_border = binary_chop_index;;
+ int new_border = (ranges->at(binary_chop_index) | kMask) + 1;
+
+ while (scan_forward_for_section_border < end_index) {
+ if (ranges->at(scan_forward_for_section_border) > new_border) {
+ *new_start_index = scan_forward_for_section_border;
+ *border = new_border;
+ break;
+ }
+ scan_forward_for_section_border++;
+ }
+ }
+
+ ASSERT(*new_start_index > start_index);
+ *new_end_index = *new_start_index - 1;
+ if (ranges->at(*new_end_index) == *border) {
+ (*new_end_index)--;
+ }
+ if (*border >= ranges->at(end_index)) {
+ *border = ranges->at(end_index);
+ *new_start_index = end_index; // Won't be used.
+ *new_end_index = end_index - 1;
+ }
+}
+
+
+// Gets a series of segment boundaries representing a character class. If the
+// character is in the range between an even and an odd boundary (counting from
+// start_index) then go to even_label, otherwise go to odd_label. We already
+// know that the character is in the range of min_char to max_char inclusive.
+// Either label can be NULL indicating backtracking. Either label can also be
+// equal to the fall_through label.
+static void GenerateBranches(RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ uc16 min_char,
+ uc16 max_char,
+ Label* fall_through,
+ Label* even_label,
+ Label* odd_label) {
+ int first = ranges->at(start_index);
+ int last = ranges->at(end_index) - 1;
+
+ ASSERT_LT(min_char, first);
+
+ // Just need to test if the character is before or on-or-after
+ // a particular character.
+ if (start_index == end_index) {
+ EmitBoundaryTest(masm, first, fall_through, even_label, odd_label);
+ return;
+ }
+
+ // Another almost trivial case: There is one interval in the middle that is
+ // different from the end intervals.
+ if (start_index + 1 == end_index) {
+ EmitDoubleBoundaryTest(
+ masm, first, last, fall_through, even_label, odd_label);
+ return;
+ }
+
+ // It's not worth using table lookup if there are very few intervals in the
+ // character class.
+ if (end_index - start_index <= 6) {
+ // It is faster to test for individual characters, so we look for those
+ // first, then try arbitrary ranges in the second round.
+ static int kNoCutIndex = -1;
+ int cut = kNoCutIndex;
+ for (int i = start_index; i < end_index; i++) {
+ if (ranges->at(i) == ranges->at(i + 1) - 1) {
+ cut = i;
+ break;
+ }
+ }
+ if (cut == kNoCutIndex) cut = start_index;
+ CutOutRange(
+ masm, ranges, start_index, end_index, cut, even_label, odd_label);
+ ASSERT_GE(end_index - start_index, 2);
+ GenerateBranches(masm,
+ ranges,
+ start_index + 1,
+ end_index - 1,
+ min_char,
+ max_char,
+ fall_through,
+ even_label,
+ odd_label);
+ return;
+ }
+
+ // If there are a lot of intervals in the regexp, then we will use tables to
+ // determine whether the character is inside or outside the character class.
+ static const int kBits = RegExpMacroAssembler::kTableSizeBits;
+
+ if ((max_char >> kBits) == (min_char >> kBits)) {
+ EmitUseLookupTable(masm,
+ ranges,
+ start_index,
+ end_index,
+ min_char,
+ fall_through,
+ even_label,
+ odd_label);
+ return;
+ }
+
+ if ((min_char >> kBits) != (first >> kBits)) {
+ masm->CheckCharacterLT(first, odd_label);
+ GenerateBranches(masm,
+ ranges,
+ start_index + 1,
+ end_index,
+ first,
+ max_char,
+ fall_through,
+ odd_label,
+ even_label);
+ return;
+ }
+
+ int new_start_index = 0;
+ int new_end_index = 0;
+ int border = 0;
+
+ SplitSearchSpace(ranges,
+ start_index,
+ end_index,
+ &new_start_index,
+ &new_end_index,
+ &border);
+
+ Label handle_rest;
+ Label* above = &handle_rest;
+ if (border == last + 1) {
+ // We didn't find any section that started after the limit, so everything
+ // above the border is one of the terminal labels.
+ above = (end_index & 1) != (start_index & 1) ? odd_label : even_label;
+ ASSERT(new_end_index == end_index - 1);
+ }
+
+ ASSERT_LE(start_index, new_end_index);
+ ASSERT_LE(new_start_index, end_index);
+ ASSERT_LT(start_index, new_start_index);
+ ASSERT_LT(new_end_index, end_index);
+ ASSERT(new_end_index + 1 == new_start_index ||
+ (new_end_index + 2 == new_start_index &&
+ border == ranges->at(new_end_index + 1)));
+ ASSERT_LT(min_char, border - 1);
+ ASSERT_LT(border, max_char);
+ ASSERT_LT(ranges->at(new_end_index), border);
+ ASSERT(border < ranges->at(new_start_index) ||
+ (border == ranges->at(new_start_index) &&
+ new_start_index == end_index &&
+ new_end_index == end_index - 1 &&
+ border == last + 1));
+ ASSERT(new_start_index == 0 || border >= ranges->at(new_start_index - 1));
+
+ masm->CheckCharacterGT(border - 1, above);
+ Label dummy;
+ GenerateBranches(masm,
+ ranges,
+ start_index,
+ new_end_index,
+ min_char,
+ border - 1,
+ &dummy,
+ even_label,
+ odd_label);
+ if (handle_rest.is_linked()) {
+ masm->Bind(&handle_rest);
+ bool flip = (new_start_index & 1) != (start_index & 1);
+ GenerateBranches(masm,
+ ranges,
+ new_start_index,
+ end_index,
+ border,
+ max_char,
+ &dummy,
+ flip ? odd_label : even_label,
+ flip ? even_label : odd_label);
+ }
+}
+
+
static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
RegExpCharacterClass* cc,
bool ascii,
Label* on_failure,
int cp_offset,
bool check_offset,
- bool preloaded) {
- ZoneList<CharacterRange>* ranges = cc->ranges();
+ bool preloaded,
+ Zone* zone) {
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone);
+ if (!CharacterRange::IsCanonical(ranges)) {
+ CharacterRange::Canonicalize(ranges);
+ }
+
int max_char;
if (ascii) {
max_char = String::kMaxAsciiCharCode;
@@ -1549,11 +2064,6 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
max_char = String::kMaxUtf16CodeUnit;
}
- Label success;
-
- Label* char_is_in_class =
- cc->is_negated() ? on_failure : &success;
-
int range_count = ranges->length();
int last_valid_range = range_count - 1;
@@ -1567,8 +2077,6 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
if (last_valid_range < 0) {
if (!cc->is_negated()) {
- // TODO(plesner): We can remove this when the node level does our
- // ASCII optimizations for us.
macro_assembler->GoTo(on_failure);
}
if (check_offset) {
@@ -1578,6 +2086,18 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
}
if (last_valid_range == 0 &&
+ ranges->at(0).IsEverything(max_char)) {
+ if (cc->is_negated()) {
+ macro_assembler->GoTo(on_failure);
+ } else {
+ // This is a common case hit by non-anchored expressions.
+ if (check_offset) {
+ macro_assembler->CheckPosition(cp_offset, on_failure);
+ }
+ }
+ return;
+ }
+ if (last_valid_range == 0 &&
!cc->is_negated() &&
ranges->at(0).IsEverything(max_char)) {
// This is a common case hit by non-anchored expressions.
@@ -1591,70 +2111,50 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
macro_assembler->LoadCurrentCharacter(cp_offset, on_failure, check_offset);
}
- if (cc->is_standard() &&
+ if (cc->is_standard(zone) &&
macro_assembler->CheckSpecialCharacterClass(cc->standard_type(),
on_failure)) {
return;
}
- for (int i = 0; i < last_valid_range; i++) {
- CharacterRange& range = ranges->at(i);
- Label next_range;
- uc16 from = range.from();
- uc16 to = range.to();
- if (from > max_char) {
- continue;
- }
- if (to > max_char) to = max_char;
- if (to == from) {
- macro_assembler->CheckCharacter(to, char_is_in_class);
- } else {
- if (from != 0) {
- macro_assembler->CheckCharacterLT(from, &next_range);
- }
- if (to != max_char) {
- macro_assembler->CheckCharacterLT(to + 1, char_is_in_class);
- } else {
- macro_assembler->GoTo(char_is_in_class);
- }
- }
- macro_assembler->Bind(&next_range);
- }
- CharacterRange& range = ranges->at(last_valid_range);
- uc16 from = range.from();
- uc16 to = range.to();
+ // A new list with ascending entries. Each entry is a code unit
+ // where there is a boundary between code units that are part of
+ // the class and code units that are not. Normally we insert an
+ // entry at zero which goes to the failure label, but if there
+ // was already one there we fall through for success on that entry.
+ // Subsequent entries have alternating meaning (success/failure).
+ ZoneList<int>* range_boundaries =
+ new(zone) ZoneList<int>(last_valid_range, zone);
- if (to > max_char) to = max_char;
- ASSERT(to >= from);
+ bool zeroth_entry_is_failure = !cc->is_negated();
- if (to == from) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacter(to, on_failure);
- } else {
- macro_assembler->CheckNotCharacter(to, on_failure);
- }
- } else {
- if (from != 0) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacterLT(from, &success);
- } else {
- macro_assembler->CheckCharacterLT(from, on_failure);
- }
- }
- if (to != String::kMaxUtf16CodeUnit) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacterLT(to + 1, on_failure);
- } else {
- macro_assembler->CheckCharacterGT(to, on_failure);
- }
+ for (int i = 0; i <= last_valid_range; i++) {
+ CharacterRange& range = ranges->at(i);
+ if (range.from() == 0) {
+ ASSERT_EQ(i, 0);
+ zeroth_entry_is_failure = !zeroth_entry_is_failure;
} else {
- if (cc->is_negated()) {
- macro_assembler->GoTo(on_failure);
- }
+ range_boundaries->Add(range.from(), zone);
}
+ range_boundaries->Add(range.to() + 1, zone);
}
- macro_assembler->Bind(&success);
+ int end_index = range_boundaries->length() - 1;
+ if (range_boundaries->at(end_index) > max_char) {
+ end_index--;
+ }
+
+ Label fall_through;
+ GenerateBranches(macro_assembler,
+ range_boundaries,
+ 0, // start_index.
+ end_index,
+ 0, // min_char.
+ max_char,
+ &fall_through,
+ zeroth_entry_is_failure ? &fall_through : on_failure,
+ zeroth_entry_is_failure ? on_failure : &fall_through);
+ macro_assembler->Bind(&fall_through);
}
@@ -1717,6 +2217,21 @@ int ActionNode::EatsAtLeast(int still_to_find,
}
+void ActionNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ if (type_ == BEGIN_SUBMATCH) {
+ bm->SetRest(offset);
+ } else if (type_ != POSITIVE_SUBMATCH_SUCCESS) {
+ on_success()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
+ }
+ SaveBMInfo(bm, not_at_start, offset);
+}
+
+
int AssertionNode::EatsAtLeast(int still_to_find,
int recursion_depth,
bool not_at_start) {
@@ -1733,6 +2248,19 @@ int AssertionNode::EatsAtLeast(int still_to_find,
}
+void AssertionNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ // Match the behaviour of EatsAtLeast on this node.
+ if (type() == AT_START && not_at_start) return;
+ on_success()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
+ SaveBMInfo(bm, not_at_start, offset);
+}
+
+
int BackReferenceNode::EatsAtLeast(int still_to_find,
int recursion_depth,
bool not_at_start) {
@@ -2007,7 +2535,7 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
QuickCheckDetails::Position* pos =
details->positions(characters_filled_in);
RegExpCharacterClass* tree = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = tree->ranges();
+ ZoneList<CharacterRange>* ranges = tree->ranges(zone());
if (tree->is_negated()) {
// A quick check uses multi-character mask and compare. There is no
// useful way to incorporate a negative char class into this scheme
@@ -2071,10 +2599,12 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
}
}
ASSERT(characters_filled_in != details->characters());
- on_success()-> GetQuickCheckDetails(details,
- compiler,
- characters_filled_in,
- true);
+ if (!details->cannot_match()) {
+ on_success()-> GetQuickCheckDetails(details,
+ compiler,
+ characters_filled_in,
+ true);
+ }
}
@@ -2152,6 +2682,157 @@ class VisitMarker {
};
+RegExpNode* SeqRegExpNode::FilterASCII(int depth) {
+ if (info()->replacement_calculated) return replacement();
+ if (depth < 0) return this;
+ ASSERT(!info()->visited);
+ VisitMarker marker(info());
+ return FilterSuccessor(depth - 1);
+}
+
+
+RegExpNode* SeqRegExpNode::FilterSuccessor(int depth) {
+ RegExpNode* next = on_success_->FilterASCII(depth - 1);
+ if (next == NULL) return set_replacement(NULL);
+ on_success_ = next;
+ return set_replacement(this);
+}
+
+
+RegExpNode* TextNode::FilterASCII(int depth) {
+ if (info()->replacement_calculated) return replacement();
+ if (depth < 0) return this;
+ ASSERT(!info()->visited);
+ VisitMarker marker(info());
+ int element_count = elms_->length();
+ for (int i = 0; i < element_count; i++) {
+ TextElement elm = elms_->at(i);
+ if (elm.type == TextElement::ATOM) {
+ Vector<const uc16> quarks = elm.data.u_atom->data();
+ for (int j = 0; j < quarks.length(); j++) {
+ // We don't need special handling for case independence
+ // because of the rule that case independence cannot make
+ // a non-ASCII character match an ASCII character.
+ if (quarks[j] > String::kMaxAsciiCharCode) {
+ return set_replacement(NULL);
+ }
+ }
+ } else {
+ ASSERT(elm.type == TextElement::CHAR_CLASS);
+ RegExpCharacterClass* cc = elm.data.u_char_class;
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone());
+ if (!CharacterRange::IsCanonical(ranges)) {
+ CharacterRange::Canonicalize(ranges);
+ }
+ // Now they are in order so we only need to look at the first.
+ int range_count = ranges->length();
+ if (cc->is_negated()) {
+ if (range_count != 0 &&
+ ranges->at(0).from() == 0 &&
+ ranges->at(0).to() >= String::kMaxAsciiCharCode) {
+ return set_replacement(NULL);
+ }
+ } else {
+ if (range_count == 0 ||
+ ranges->at(0).from() > String::kMaxAsciiCharCode) {
+ return set_replacement(NULL);
+ }
+ }
+ }
+ }
+ return FilterSuccessor(depth - 1);
+}
+
+
+RegExpNode* LoopChoiceNode::FilterASCII(int depth) {
+ if (info()->replacement_calculated) return replacement();
+ if (depth < 0) return this;
+ if (info()->visited) return this;
+ {
+ VisitMarker marker(info());
+
+ RegExpNode* continue_replacement = continue_node_->FilterASCII(depth - 1);
+ // If we can't continue after the loop then there is no sense in doing the
+ // loop.
+ if (continue_replacement == NULL) return set_replacement(NULL);
+ }
+
+ return ChoiceNode::FilterASCII(depth - 1);
+}
+
+
+RegExpNode* ChoiceNode::FilterASCII(int depth) {
+ if (info()->replacement_calculated) return replacement();
+ if (depth < 0) return this;
+ if (info()->visited) return this;
+ VisitMarker marker(info());
+ int choice_count = alternatives_->length();
+
+ for (int i = 0; i < choice_count; i++) {
+ GuardedAlternative alternative = alternatives_->at(i);
+ if (alternative.guards() != NULL && alternative.guards()->length() != 0) {
+ set_replacement(this);
+ return this;
+ }
+ }
+
+ int surviving = 0;
+ RegExpNode* survivor = NULL;
+ for (int i = 0; i < choice_count; i++) {
+ GuardedAlternative alternative = alternatives_->at(i);
+ RegExpNode* replacement = alternative.node()->FilterASCII(depth - 1);
+ ASSERT(replacement != this); // No missing EMPTY_MATCH_CHECK.
+ if (replacement != NULL) {
+ alternatives_->at(i).set_node(replacement);
+ surviving++;
+ survivor = replacement;
+ }
+ }
+ if (surviving < 2) return set_replacement(survivor);
+
+ set_replacement(this);
+ if (surviving == choice_count) {
+ return this;
+ }
+ // Only some of the nodes survived the filtering. We need to rebuild the
+ // alternatives list.
+ ZoneList<GuardedAlternative>* new_alternatives =
+ new(zone()) ZoneList<GuardedAlternative>(surviving, zone());
+ for (int i = 0; i < choice_count; i++) {
+ RegExpNode* replacement =
+ alternatives_->at(i).node()->FilterASCII(depth - 1);
+ if (replacement != NULL) {
+ alternatives_->at(i).set_node(replacement);
+ new_alternatives->Add(alternatives_->at(i), zone());
+ }
+ }
+ alternatives_ = new_alternatives;
+ return this;
+}
+
+
+RegExpNode* NegativeLookaheadChoiceNode::FilterASCII(int depth) {
+ if (info()->replacement_calculated) return replacement();
+ if (depth < 0) return this;
+ if (info()->visited) return this;
+ VisitMarker marker(info());
+ // Alternative 0 is the negative lookahead, alternative 1 is what comes
+ // afterwards.
+ RegExpNode* node = alternatives_->at(1).node();
+ RegExpNode* replacement = node->FilterASCII(depth - 1);
+ if (replacement == NULL) return set_replacement(NULL);
+ alternatives_->at(1).set_node(replacement);
+
+ RegExpNode* neg_node = alternatives_->at(0).node();
+ RegExpNode* neg_replacement = neg_node->FilterASCII(depth - 1);
+ // If the negative lookahead is always going to fail then
+ // we don't need to check it.
+ if (neg_replacement == NULL) return set_replacement(replacement);
+ alternatives_->at(0).set_node(neg_replacement);
+ return set_replacement(this);
+}
+
+
void LoopChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
RegExpCompiler* compiler,
int characters_filled_in,
@@ -2165,6 +2846,24 @@ void LoopChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
}
+void LoopChoiceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ if (body_can_be_zero_length_ ||
+ recursion_depth > RegExpCompiler::kMaxRecursion ||
+ budget <= 0) {
+ bm->SetRest(offset);
+ SaveBMInfo(bm, not_at_start, offset);
+ return;
+ }
+ ChoiceNode::FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
+ SaveBMInfo(bm, not_at_start, offset);
+}
+
+
void ChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
RegExpCompiler* compiler,
int characters_filled_in,
@@ -2249,110 +2948,83 @@ static void EmitHat(RegExpCompiler* compiler,
}
-// Emit the code to handle \b and \B (word-boundary or non-word-boundary)
-// when we know whether the next character must be a word character or not.
-static void EmitHalfBoundaryCheck(AssertionNode::AssertionNodeType type,
- RegExpCompiler* compiler,
- RegExpNode* on_success,
- Trace* trace) {
+// Emit the code to handle \b and \B (word-boundary or non-word-boundary).
+void AssertionNode::EmitBoundaryCheck(RegExpCompiler* compiler, Trace* trace) {
RegExpMacroAssembler* assembler = compiler->macro_assembler();
- Label done;
-
- Trace new_trace(*trace);
-
- bool expect_word_character = (type == AssertionNode::AFTER_WORD_CHARACTER);
- Label* on_word = expect_word_character ? &done : new_trace.backtrack();
- Label* on_non_word = expect_word_character ? new_trace.backtrack() : &done;
-
- // Check whether previous character was a word character.
- switch (trace->at_start()) {
- case Trace::TRUE:
- if (expect_word_character) {
- assembler->GoTo(on_non_word);
- }
- break;
- case Trace::UNKNOWN:
- ASSERT_EQ(0, trace->cp_offset());
- assembler->CheckAtStart(on_non_word);
- // Fall through.
- case Trace::FALSE:
- int prev_char_offset = trace->cp_offset() - 1;
- assembler->LoadCurrentCharacter(prev_char_offset, NULL, false, 1);
- EmitWordCheck(assembler, on_word, on_non_word, expect_word_character);
- // We may or may not have loaded the previous character.
- new_trace.InvalidateCurrentCharacter();
+ Trace::TriBool next_is_word_character = Trace::UNKNOWN;
+ bool not_at_start = (trace->at_start() == Trace::FALSE);
+ BoyerMooreLookahead* lookahead = bm_info(not_at_start);
+ if (lookahead == NULL) {
+ int eats_at_least =
+ Min(kMaxLookaheadForBoyerMoore,
+ EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
+ if (eats_at_least >= 1) {
+ BoyerMooreLookahead* bm =
+ new(zone()) BoyerMooreLookahead(eats_at_least, compiler, zone());
+ FillInBMInfo(0, 0, kFillInBMBudget, bm, not_at_start);
+ if (bm->at(0)->is_non_word()) next_is_word_character = Trace::FALSE;
+ if (bm->at(0)->is_word()) next_is_word_character = Trace::TRUE;
+ }
+ } else {
+ if (lookahead->at(0)->is_non_word()) next_is_word_character = Trace::FALSE;
+ if (lookahead->at(0)->is_word()) next_is_word_character = Trace::TRUE;
+ }
+ bool at_boundary = (type_ == AssertionNode::AT_BOUNDARY);
+ if (next_is_word_character == Trace::UNKNOWN) {
+ Label before_non_word;
+ Label before_word;
+ if (trace->characters_preloaded() != 1) {
+ assembler->LoadCurrentCharacter(trace->cp_offset(), &before_non_word);
+ }
+ // Fall through on non-word.
+ EmitWordCheck(assembler, &before_word, &before_non_word, false);
+ // Next character is not a word character.
+ assembler->Bind(&before_non_word);
+ Label ok;
+ BacktrackIfPrevious(compiler, trace, at_boundary ? kIsNonWord : kIsWord);
+ assembler->GoTo(&ok);
+
+ assembler->Bind(&before_word);
+ BacktrackIfPrevious(compiler, trace, at_boundary ? kIsWord : kIsNonWord);
+ assembler->Bind(&ok);
+ } else if (next_is_word_character == Trace::TRUE) {
+ BacktrackIfPrevious(compiler, trace, at_boundary ? kIsWord : kIsNonWord);
+ } else {
+ ASSERT(next_is_word_character == Trace::FALSE);
+ BacktrackIfPrevious(compiler, trace, at_boundary ? kIsNonWord : kIsWord);
}
-
- assembler->Bind(&done);
-
- on_success->Emit(compiler, &new_trace);
}
-// Emit the code to handle \b and \B (word-boundary or non-word-boundary).
-static void EmitBoundaryCheck(AssertionNode::AssertionNodeType type,
- RegExpCompiler* compiler,
- RegExpNode* on_success,
- Trace* trace) {
+void AssertionNode::BacktrackIfPrevious(
+ RegExpCompiler* compiler,
+ Trace* trace,
+ AssertionNode::IfPrevious backtrack_if_previous) {
RegExpMacroAssembler* assembler = compiler->macro_assembler();
- Label before_non_word;
- Label before_word;
- if (trace->characters_preloaded() != 1) {
- assembler->LoadCurrentCharacter(trace->cp_offset(), &before_non_word);
- }
- // Fall through on non-word.
- EmitWordCheck(assembler, &before_word, &before_non_word, false);
-
- // We will be loading the previous character into the current character
- // register.
Trace new_trace(*trace);
new_trace.InvalidateCurrentCharacter();
- Label ok;
- Label* boundary;
- Label* not_boundary;
- if (type == AssertionNode::AT_BOUNDARY) {
- boundary = &ok;
- not_boundary = new_trace.backtrack();
- } else {
- not_boundary = &ok;
- boundary = new_trace.backtrack();
- }
+ Label fall_through, dummy;
- // Next character is not a word character.
- assembler->Bind(&before_non_word);
- if (new_trace.cp_offset() == 0) {
- // The start of input counts as a non-word character, so the question is
- // decided if we are at the start.
- assembler->CheckAtStart(not_boundary);
- }
- // We already checked that we are not at the start of input so it must be
- // OK to load the previous character.
- assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1,
- &ok, // Unused dummy label in this call.
- false);
- // Fall through on non-word.
- EmitWordCheck(assembler, boundary, not_boundary, false);
- assembler->GoTo(not_boundary);
+ Label* non_word = backtrack_if_previous == kIsNonWord ?
+ new_trace.backtrack() :
+ &fall_through;
+ Label* word = backtrack_if_previous == kIsNonWord ?
+ &fall_through :
+ new_trace.backtrack();
- // Next character is a word character.
- assembler->Bind(&before_word);
if (new_trace.cp_offset() == 0) {
// The start of input counts as a non-word character, so the question is
// decided if we are at the start.
- assembler->CheckAtStart(boundary);
+ assembler->CheckAtStart(non_word);
}
// We already checked that we are not at the start of input so it must be
// OK to load the previous character.
- assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1,
- &ok, // Unused dummy label in this call.
- false);
- bool fall_through_on_word = (type == AssertionNode::AT_NON_BOUNDARY);
- EmitWordCheck(assembler, not_boundary, boundary, fall_through_on_word);
+ assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1, &dummy, false);
+ EmitWordCheck(assembler, word, non_word, backtrack_if_previous == kIsNonWord);
- assembler->Bind(&ok);
-
- on_success->Emit(compiler, &new_trace);
+ assembler->Bind(&fall_through);
+ on_success()->Emit(compiler, &new_trace);
}
@@ -2400,13 +3072,9 @@ void AssertionNode::Emit(RegExpCompiler* compiler, Trace* trace) {
return;
case AT_BOUNDARY:
case AT_NON_BOUNDARY: {
- EmitBoundaryCheck(type_, compiler, on_success(), trace);
+ EmitBoundaryCheck(compiler, trace);
return;
}
- case AFTER_WORD_CHARACTER:
- case AFTER_NONWORD_CHARACTER: {
- EmitHalfBoundaryCheck(type_, compiler, on_success(), trace);
- }
}
on_success()->Emit(compiler, trace);
}
@@ -2519,7 +3187,8 @@ void TextNode::TextEmitPass(RegExpCompiler* compiler,
backtrack,
cp_offset,
*checked_up_to < cp_offset,
- preloaded);
+ preloaded,
+ zone());
UpdateBoundsCheck(cp_offset, checked_up_to);
}
}
@@ -2640,11 +3309,11 @@ void TextNode::MakeCaseIndependent(bool is_ascii) {
RegExpCharacterClass* cc = elm.data.u_char_class;
// None of the standard character classes is different in the case
// independent case and it slows us down if we don't know that.
- if (cc->is_standard()) continue;
- ZoneList<CharacterRange>* ranges = cc->ranges();
+ if (cc->is_standard(zone())) continue;
+ ZoneList<CharacterRange>* ranges = cc->ranges(zone());
int range_count = ranges->length();
for (int j = 0; j < range_count; j++) {
- ranges->at(j).AddCaseEquivalents(ranges, is_ascii);
+ ranges->at(j).AddCaseEquivalents(ranges, is_ascii, zone());
}
}
}
@@ -2661,6 +3330,30 @@ int TextNode::GreedyLoopTextLength() {
}
+RegExpNode* TextNode::GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler) {
+ if (elms_->length() != 1) return NULL;
+ TextElement elm = elms_->at(0);
+ if (elm.type != TextElement::CHAR_CLASS) return NULL;
+ RegExpCharacterClass* node = elm.data.u_char_class;
+ ZoneList<CharacterRange>* ranges = node->ranges(zone());
+ if (!CharacterRange::IsCanonical(ranges)) {
+ CharacterRange::Canonicalize(ranges);
+ }
+ if (node->is_negated()) {
+ return ranges->length() == 0 ? on_success() : NULL;
+ }
+ if (ranges->length() != 1) return NULL;
+ uint32_t max_char;
+ if (compiler->ascii()) {
+ max_char = String::kMaxAsciiCharCode;
+ } else {
+ max_char = String::kMaxUtf16CodeUnit;
+ }
+ return ranges->at(0).IsEverything(max_char) ? on_success() : NULL;
+}
+
+
// Finds the fixed match length of a sequence of nodes that goes from
// this alternative and back to this choice node. If there are variable
// length nodes or other complications in the way then return a sentinel
@@ -2725,8 +3418,8 @@ void LoopChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
int ChoiceNode::CalculatePreloadCharacters(RegExpCompiler* compiler,
- bool not_at_start) {
- int preload_characters = EatsAtLeast(4, 0, not_at_start);
+ int eats_at_least) {
+ int preload_characters = Min(4, eats_at_least);
if (compiler->macro_assembler()->CanReadUnaligned()) {
bool ascii = compiler->ascii();
if (ascii) {
@@ -2765,13 +3458,13 @@ class AlternativeGeneration: public Malloced {
// size then it is on the stack, otherwise the excess is on the heap.
class AlternativeGenerationList {
public:
- explicit AlternativeGenerationList(int count)
- : alt_gens_(count) {
+ AlternativeGenerationList(int count, Zone* zone)
+ : alt_gens_(count, zone) {
for (int i = 0; i < count && i < kAFew; i++) {
- alt_gens_.Add(a_few_alt_gens_ + i);
+ alt_gens_.Add(a_few_alt_gens_ + i, zone);
}
for (int i = kAFew; i < count; i++) {
- alt_gens_.Add(new AlternativeGeneration());
+ alt_gens_.Add(new AlternativeGeneration(), zone);
}
}
~AlternativeGenerationList() {
@@ -2792,6 +3485,250 @@ class AlternativeGenerationList {
};
+// The '2' variant is has inclusive from and exclusive to.
+static const int kSpaceRanges[] = { '\t', '\r' + 1, ' ', ' ' + 1, 0x00A0,
+ 0x00A1, 0x1680, 0x1681, 0x180E, 0x180F, 0x2000, 0x200B, 0x2028, 0x202A,
+ 0x202F, 0x2030, 0x205F, 0x2060, 0x3000, 0x3001, 0xFEFF, 0xFF00, 0x10000 };
+static const int kSpaceRangeCount = ARRAY_SIZE(kSpaceRanges);
+
+static const int kWordRanges[] = {
+ '0', '9' + 1, 'A', 'Z' + 1, '_', '_' + 1, 'a', 'z' + 1, 0x10000 };
+static const int kWordRangeCount = ARRAY_SIZE(kWordRanges);
+static const int kDigitRanges[] = { '0', '9' + 1, 0x10000 };
+static const int kDigitRangeCount = ARRAY_SIZE(kDigitRanges);
+static const int kSurrogateRanges[] = { 0xd800, 0xe000, 0x10000 };
+static const int kSurrogateRangeCount = ARRAY_SIZE(kSurrogateRanges);
+static const int kLineTerminatorRanges[] = { 0x000A, 0x000B, 0x000D, 0x000E,
+ 0x2028, 0x202A, 0x10000 };
+static const int kLineTerminatorRangeCount = ARRAY_SIZE(kLineTerminatorRanges);
+
+
+void BoyerMoorePositionInfo::Set(int character) {
+ SetInterval(Interval(character, character));
+}
+
+
+void BoyerMoorePositionInfo::SetInterval(const Interval& interval) {
+ s_ = AddRange(s_, kSpaceRanges, kSpaceRangeCount, interval);
+ w_ = AddRange(w_, kWordRanges, kWordRangeCount, interval);
+ d_ = AddRange(d_, kDigitRanges, kDigitRangeCount, interval);
+ surrogate_ =
+ AddRange(surrogate_, kSurrogateRanges, kSurrogateRangeCount, interval);
+ if (interval.to() - interval.from() >= kMapSize - 1) {
+ if (map_count_ != kMapSize) {
+ map_count_ = kMapSize;
+ for (int i = 0; i < kMapSize; i++) map_->at(i) = true;
+ }
+ return;
+ }
+ for (int i = interval.from(); i <= interval.to(); i++) {
+ int mod_character = (i & kMask);
+ if (!map_->at(mod_character)) {
+ map_count_++;
+ map_->at(mod_character) = true;
+ }
+ if (map_count_ == kMapSize) return;
+ }
+}
+
+
+void BoyerMoorePositionInfo::SetAll() {
+ s_ = w_ = d_ = kLatticeUnknown;
+ if (map_count_ != kMapSize) {
+ map_count_ = kMapSize;
+ for (int i = 0; i < kMapSize; i++) map_->at(i) = true;
+ }
+}
+
+
+BoyerMooreLookahead::BoyerMooreLookahead(
+ int length, RegExpCompiler* compiler, Zone* zone)
+ : length_(length),
+ compiler_(compiler) {
+ if (compiler->ascii()) {
+ max_char_ = String::kMaxAsciiCharCode;
+ } else {
+ max_char_ = String::kMaxUtf16CodeUnit;
+ }
+ bitmaps_ = new(zone) ZoneList<BoyerMoorePositionInfo*>(length, zone);
+ for (int i = 0; i < length; i++) {
+ bitmaps_->Add(new(zone) BoyerMoorePositionInfo(zone), zone);
+ }
+}
+
+
+// Find the longest range of lookahead that has the fewest number of different
+// characters that can occur at a given position. Since we are optimizing two
+// different parameters at once this is a tradeoff.
+bool BoyerMooreLookahead::FindWorthwhileInterval(int* from, int* to) {
+ int biggest_points = 0;
+ // If more than 32 characters out of 128 can occur it is unlikely that we can
+ // be lucky enough to step forwards much of the time.
+ const int kMaxMax = 32;
+ for (int max_number_of_chars = 4;
+ max_number_of_chars < kMaxMax;
+ max_number_of_chars *= 2) {
+ biggest_points =
+ FindBestInterval(max_number_of_chars, biggest_points, from, to);
+ }
+ if (biggest_points == 0) return false;
+ return true;
+}
+
+
+// Find the highest-points range between 0 and length_ where the character
+// information is not too vague. 'Too vague' means that there are more than
+// max_number_of_chars that can occur at this position. Calculates the number
+// of points as the product of width-of-the-range and
+// probability-of-finding-one-of-the-characters, where the probability is
+// calculated using the frequency distribution of the sample subject string.
+int BoyerMooreLookahead::FindBestInterval(
+ int max_number_of_chars, int old_biggest_points, int* from, int* to) {
+ int biggest_points = old_biggest_points;
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ for (int i = 0; i < length_; ) {
+ while (i < length_ && Count(i) > max_number_of_chars) i++;
+ if (i == length_) break;
+ int remembered_from = i;
+ bool union_map[kSize];
+ for (int j = 0; j < kSize; j++) union_map[j] = false;
+ while (i < length_ && Count(i) <= max_number_of_chars) {
+ BoyerMoorePositionInfo* map = bitmaps_->at(i);
+ for (int j = 0; j < kSize; j++) union_map[j] |= map->at(j);
+ i++;
+ }
+ int frequency = 0;
+ for (int j = 0; j < kSize; j++) {
+ if (union_map[j]) {
+ // Add 1 to the frequency to give a small per-character boost for
+ // the cases where our sampling is not good enough and many
+ // characters have a frequency of zero. This means the frequency
+ // can theoretically be up to 2*kSize though we treat it mostly as
+ // a fraction of kSize.
+ frequency += compiler_->frequency_collator()->Frequency(j) + 1;
+ }
+ }
+ // We use the probability of skipping times the distance we are skipping to
+ // judge the effectiveness of this. Actually we have a cut-off: By
+ // dividing by 2 we switch off the skipping if the probability of skipping
+ // is less than 50%. This is because the multibyte mask-and-compare
+ // skipping in quickcheck is more likely to do well on this case.
+ bool in_quickcheck_range = ((i - remembered_from < 4) ||
+ (compiler_->ascii() ? remembered_from <= 4 : remembered_from <= 2));
+ // Called 'probability' but it is only a rough estimate and can actually
+ // be outside the 0-kSize range.
+ int probability = (in_quickcheck_range ? kSize / 2 : kSize) - frequency;
+ int points = (i - remembered_from) * probability;
+ if (points > biggest_points) {
+ *from = remembered_from;
+ *to = i - 1;
+ biggest_points = points;
+ }
+ }
+ return biggest_points;
+}
+
+
+// Take all the characters that will not prevent a successful match if they
+// occur in the subject string in the range between min_lookahead and
+// max_lookahead (inclusive) measured from the current position. If the
+// character at max_lookahead offset is not one of these characters, then we
+// can safely skip forwards by the number of characters in the range.
+int BoyerMooreLookahead::GetSkipTable(int min_lookahead,
+ int max_lookahead,
+ Handle<ByteArray> boolean_skip_table) {
+ const int kSize = RegExpMacroAssembler::kTableSize;
+
+ const int kSkipArrayEntry = 0;
+ const int kDontSkipArrayEntry = 1;
+
+ for (int i = 0; i < kSize; i++) {
+ boolean_skip_table->set(i, kSkipArrayEntry);
+ }
+ int skip = max_lookahead + 1 - min_lookahead;
+
+ for (int i = max_lookahead; i >= min_lookahead; i--) {
+ BoyerMoorePositionInfo* map = bitmaps_->at(i);
+ for (int j = 0; j < kSize; j++) {
+ if (map->at(j)) {
+ boolean_skip_table->set(j, kDontSkipArrayEntry);
+ }
+ }
+ }
+
+ return skip;
+}
+
+
+// See comment above on the implementation of GetSkipTable.
+bool BoyerMooreLookahead::EmitSkipInstructions(RegExpMacroAssembler* masm) {
+ const int kSize = RegExpMacroAssembler::kTableSize;
+
+ int min_lookahead = 0;
+ int max_lookahead = 0;
+
+ if (!FindWorthwhileInterval(&min_lookahead, &max_lookahead)) return false;
+
+ bool found_single_character = false;
+ int single_character = 0;
+ for (int i = max_lookahead; i >= min_lookahead; i--) {
+ BoyerMoorePositionInfo* map = bitmaps_->at(i);
+ if (map->map_count() > 1 ||
+ (found_single_character && map->map_count() != 0)) {
+ found_single_character = false;
+ break;
+ }
+ for (int j = 0; j < kSize; j++) {
+ if (map->at(j)) {
+ found_single_character = true;
+ single_character = j;
+ break;
+ }
+ }
+ }
+
+ int lookahead_width = max_lookahead + 1 - min_lookahead;
+
+ if (found_single_character && lookahead_width == 1 && max_lookahead < 3) {
+ // The mask-compare can probably handle this better.
+ return false;
+ }
+
+ if (found_single_character) {
+ Label cont, again;
+ masm->Bind(&again);
+ masm->LoadCurrentCharacter(max_lookahead, &cont, true);
+ if (max_char_ > kSize) {
+ masm->CheckCharacterAfterAnd(single_character,
+ RegExpMacroAssembler::kTableMask,
+ &cont);
+ } else {
+ masm->CheckCharacter(single_character, &cont);
+ }
+ masm->AdvanceCurrentPosition(lookahead_width);
+ masm->GoTo(&again);
+ masm->Bind(&cont);
+ return true;
+ }
+
+ Handle<ByteArray> boolean_skip_table =
+ FACTORY->NewByteArray(kSize, TENURED);
+ int skip_distance = GetSkipTable(
+ min_lookahead, max_lookahead, boolean_skip_table);
+ ASSERT(skip_distance != 0);
+
+ Label cont, again;
+ masm->Bind(&again);
+ masm->LoadCurrentCharacter(max_lookahead, &cont, true);
+ masm->CheckBitInTable(boolean_skip_table, &cont);
+ masm->AdvanceCurrentPosition(skip_distance);
+ masm->GoTo(&again);
+ masm->Bind(&cont);
+
+ return true;
+}
+
+
/* Code generation for choice nodes.
*
* We generate quick checks that do a mask and compare to eliminate a
@@ -2870,7 +3807,6 @@ class AlternativeGenerationList {
* \______________/
*/
-
void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
RegExpMacroAssembler* macro_assembler = compiler->macro_assembler();
int choice_count = alternatives_->length();
@@ -2935,14 +3871,58 @@ void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
int first_normal_choice = greedy_loop ? 1 : 0;
- int preload_characters =
- CalculatePreloadCharacters(compiler,
- current_trace->at_start() == Trace::FALSE);
- bool preload_is_current =
+ bool not_at_start = current_trace->at_start() == Trace::FALSE;
+ const int kEatsAtLeastNotYetInitialized = -1;
+ int eats_at_least = kEatsAtLeastNotYetInitialized;
+
+ bool skip_was_emitted = false;
+
+ if (!greedy_loop && choice_count == 2) {
+ GuardedAlternative alt1 = alternatives_->at(1);
+ if (alt1.guards() == NULL || alt1.guards()->length() == 0) {
+ RegExpNode* eats_anything_node = alt1.node();
+ if (eats_anything_node->GetSuccessorOfOmnivorousTextNode(compiler) ==
+ this) {
+ // At this point we know that we are at a non-greedy loop that will eat
+ // any character one at a time. Any non-anchored regexp has such a
+ // loop prepended to it in order to find where it starts. We look for
+ // a pattern of the form ...abc... where we can look 6 characters ahead
+ // and step forwards 3 if the character is not one of abc. Abc need
+ // not be atoms, they can be any reasonably limited character class or
+ // small alternation.
+ ASSERT(trace->is_trivial()); // This is the case on LoopChoiceNodes.
+ BoyerMooreLookahead* lookahead = bm_info(not_at_start);
+ if (lookahead == NULL) {
+ eats_at_least =
+ Min(kMaxLookaheadForBoyerMoore,
+ EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
+ if (eats_at_least >= 1) {
+ BoyerMooreLookahead* bm =
+ new(zone()) BoyerMooreLookahead(eats_at_least,
+ compiler,
+ zone());
+ GuardedAlternative alt0 = alternatives_->at(0);
+ alt0.node()->FillInBMInfo(0, 0, kFillInBMBudget, bm, not_at_start);
+ skip_was_emitted = bm->EmitSkipInstructions(macro_assembler);
+ }
+ } else {
+ skip_was_emitted = lookahead->EmitSkipInstructions(macro_assembler);
+ }
+ }
+ }
+ }
+
+ if (eats_at_least == kEatsAtLeastNotYetInitialized) {
+ // Save some time by looking at most one machine word ahead.
+ eats_at_least = EatsAtLeast(compiler->ascii() ? 4 : 2, 0, not_at_start);
+ }
+ int preload_characters = CalculatePreloadCharacters(compiler, eats_at_least);
+
+ bool preload_is_current = !skip_was_emitted &&
(current_trace->characters_preloaded() == preload_characters);
bool preload_has_checked_bounds = preload_is_current;
- AlternativeGenerationList alt_gens(choice_count);
+ AlternativeGenerationList alt_gens(choice_count, zone());
// For now we just call all choices one after the other. The idea ultimately
// is to use the Dispatch table to try only the relevant ones.
@@ -3422,6 +4402,7 @@ void DotPrinter::VisitChoice(ChoiceNode* that) {
void DotPrinter::VisitText(TextNode* that) {
+ Zone* zone = that->zone();
stream()->Add(" n%p [label=\"", that);
for (int i = 0; i < that->elements()->length(); i++) {
if (i > 0) stream()->Add(" ");
@@ -3436,8 +4417,8 @@ void DotPrinter::VisitText(TextNode* that) {
stream()->Add("[");
if (node->is_negated())
stream()->Add("^");
- for (int j = 0; j < node->ranges()->length(); j++) {
- CharacterRange range = node->ranges()->at(j);
+ for (int j = 0; j < node->ranges(zone)->length(); j++) {
+ CharacterRange range = node->ranges(zone)->at(j);
stream()->Add("%k-%k", range.from(), range.to());
}
stream()->Add("]");
@@ -3489,12 +4470,6 @@ void DotPrinter::VisitAssertion(AssertionNode* that) {
case AssertionNode::AFTER_NEWLINE:
stream()->Add("label=\"(?<=\\n)\", shape=septagon");
break;
- case AssertionNode::AFTER_WORD_CHARACTER:
- stream()->Add("label=\"(?<=\\w)\", shape=septagon");
- break;
- case AssertionNode::AFTER_NONWORD_CHARACTER:
- stream()->Add("label=\"(?<=\\W)\", shape=septagon");
- break;
}
stream()->Add("];\n");
PrintAttributes(that);
@@ -3599,37 +4574,26 @@ void RegExpEngine::DotPrint(const char* label,
// -------------------------------------------------------------------
// Tree to graph conversion
-static const uc16 kSpaceRanges[] = { 0x0009, 0x000D, 0x0020, 0x0020, 0x00A0,
- 0x00A0, 0x1680, 0x1680, 0x180E, 0x180E, 0x2000, 0x200A, 0x2028, 0x2029,
- 0x202F, 0x202F, 0x205F, 0x205F, 0x3000, 0x3000, 0xFEFF, 0xFEFF };
-static const int kSpaceRangeCount = ARRAY_SIZE(kSpaceRanges);
-
-static const uc16 kWordRanges[] = { '0', '9', 'A', 'Z', '_', '_', 'a', 'z' };
-static const int kWordRangeCount = ARRAY_SIZE(kWordRanges);
-
-static const uc16 kDigitRanges[] = { '0', '9' };
-static const int kDigitRangeCount = ARRAY_SIZE(kDigitRanges);
-
-static const uc16 kLineTerminatorRanges[] = { 0x000A, 0x000A, 0x000D, 0x000D,
- 0x2028, 0x2029 };
-static const int kLineTerminatorRangeCount = ARRAY_SIZE(kLineTerminatorRanges);
-
RegExpNode* RegExpAtom::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- ZoneList<TextElement>* elms = new ZoneList<TextElement>(1);
- elms->Add(TextElement::Atom(this));
- return new TextNode(elms, on_success);
+ ZoneList<TextElement>* elms =
+ new(compiler->zone()) ZoneList<TextElement>(1, compiler->zone());
+ elms->Add(TextElement::Atom(this), compiler->zone());
+ return new(compiler->zone()) TextNode(elms, on_success);
}
RegExpNode* RegExpText::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new TextNode(elements(), on_success);
+ return new(compiler->zone()) TextNode(elements(), on_success);
}
+
static bool CompareInverseRanges(ZoneList<CharacterRange>* ranges,
- const uc16* special_class,
+ const int* special_class,
int length) {
+ length--; // Remove final 0x10000.
+ ASSERT(special_class[length] == 0x10000);
ASSERT(ranges->length() != 0);
ASSERT(length != 0);
ASSERT(special_class[0] != 0);
@@ -3645,7 +4609,7 @@ static bool CompareInverseRanges(ZoneList<CharacterRange>* ranges,
return false;
}
range = ranges->at((i >> 1) + 1);
- if (special_class[i+1] != range.from() - 1) {
+ if (special_class[i+1] != range.from()) {
return false;
}
}
@@ -3657,14 +4621,17 @@ static bool CompareInverseRanges(ZoneList<CharacterRange>* ranges,
static bool CompareRanges(ZoneList<CharacterRange>* ranges,
- const uc16* special_class,
+ const int* special_class,
int length) {
+ length--; // Remove final 0x10000.
+ ASSERT(special_class[length] == 0x10000);
if (ranges->length() * 2 != length) {
return false;
}
for (int i = 0; i < length; i += 2) {
CharacterRange range = ranges->at(i >> 1);
- if (range.from() != special_class[i] || range.to() != special_class[i+1]) {
+ if (range.from() != special_class[i] ||
+ range.to() != special_class[i + 1] - 1) {
return false;
}
}
@@ -3672,7 +4639,7 @@ static bool CompareRanges(ZoneList<CharacterRange>* ranges,
}
-bool RegExpCharacterClass::is_standard() {
+bool RegExpCharacterClass::is_standard(Zone* zone) {
// TODO(lrn): Remove need for this function, by not throwing away information
// along the way.
if (is_negated_) {
@@ -3681,31 +4648,31 @@ bool RegExpCharacterClass::is_standard() {
if (set_.is_standard()) {
return true;
}
- if (CompareRanges(set_.ranges(), kSpaceRanges, kSpaceRangeCount)) {
+ if (CompareRanges(set_.ranges(zone), kSpaceRanges, kSpaceRangeCount)) {
set_.set_standard_set_type('s');
return true;
}
- if (CompareInverseRanges(set_.ranges(), kSpaceRanges, kSpaceRangeCount)) {
+ if (CompareInverseRanges(set_.ranges(zone), kSpaceRanges, kSpaceRangeCount)) {
set_.set_standard_set_type('S');
return true;
}
- if (CompareInverseRanges(set_.ranges(),
+ if (CompareInverseRanges(set_.ranges(zone),
kLineTerminatorRanges,
kLineTerminatorRangeCount)) {
set_.set_standard_set_type('.');
return true;
}
- if (CompareRanges(set_.ranges(),
+ if (CompareRanges(set_.ranges(zone),
kLineTerminatorRanges,
kLineTerminatorRangeCount)) {
set_.set_standard_set_type('n');
return true;
}
- if (CompareRanges(set_.ranges(), kWordRanges, kWordRangeCount)) {
+ if (CompareRanges(set_.ranges(zone), kWordRanges, kWordRangeCount)) {
set_.set_standard_set_type('w');
return true;
}
- if (CompareInverseRanges(set_.ranges(), kWordRanges, kWordRangeCount)) {
+ if (CompareInverseRanges(set_.ranges(zone), kWordRanges, kWordRangeCount)) {
set_.set_standard_set_type('W');
return true;
}
@@ -3715,7 +4682,7 @@ bool RegExpCharacterClass::is_standard() {
RegExpNode* RegExpCharacterClass::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new TextNode(this, on_success);
+ return new(compiler->zone()) TextNode(this, on_success);
}
@@ -3723,7 +4690,8 @@ RegExpNode* RegExpDisjunction::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
ZoneList<RegExpTree*>* alternatives = this->alternatives();
int length = alternatives->length();
- ChoiceNode* result = new ChoiceNode(length);
+ ChoiceNode* result =
+ new(compiler->zone()) ChoiceNode(length, compiler->zone());
for (int i = 0; i < length; i++) {
GuardedAlternative alternative(alternatives->at(i)->ToNode(compiler,
on_success));
@@ -3816,6 +4784,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
int body_start_reg = RegExpCompiler::kNoRegister;
Interval capture_registers = body->CaptureRegisters();
bool needs_capture_clearing = !capture_registers.is_empty();
+ Zone* zone = compiler->zone();
+
if (body_can_be_empty) {
body_start_reg = compiler->AllocateRegister();
} else if (FLAG_regexp_optimization && !needs_capture_clearing) {
@@ -3846,7 +4816,7 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
// Unroll the optional matches up to max.
RegExpNode* answer = on_success;
for (int i = 0; i < max; i++) {
- ChoiceNode* alternation = new ChoiceNode(2);
+ ChoiceNode* alternation = new(zone) ChoiceNode(2, zone);
if (is_greedy) {
alternation->AddAlternative(
GuardedAlternative(body->ToNode(compiler, answer)));
@@ -3869,7 +4839,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
int reg_ctr = needs_counter
? compiler->AllocateRegister()
: RegExpCompiler::kNoRegister;
- LoopChoiceNode* center = new LoopChoiceNode(body->min_match() == 0);
+ LoopChoiceNode* center = new(zone) LoopChoiceNode(body->min_match() == 0,
+ zone);
if (not_at_start) center->set_not_at_start();
RegExpNode* loop_return = needs_counter
? static_cast<RegExpNode*>(ActionNode::IncrementRegister(reg_ctr, center))
@@ -3894,13 +4865,14 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
}
GuardedAlternative body_alt(body_node);
if (has_max) {
- Guard* body_guard = new Guard(reg_ctr, Guard::LT, max);
- body_alt.AddGuard(body_guard);
+ Guard* body_guard =
+ new(zone) Guard(reg_ctr, Guard::LT, max);
+ body_alt.AddGuard(body_guard, zone);
}
GuardedAlternative rest_alt(on_success);
if (has_min) {
- Guard* rest_guard = new Guard(reg_ctr, Guard::GEQ, min);
- rest_alt.AddGuard(rest_guard);
+ Guard* rest_guard = new(compiler->zone()) Guard(reg_ctr, Guard::GEQ, min);
+ rest_alt.AddGuard(rest_guard, zone);
}
if (is_greedy) {
center->AddLoopAlternative(body_alt);
@@ -3920,6 +4892,8 @@ RegExpNode* RegExpQuantifier::ToNode(int min,
RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
NodeInfo info;
+ Zone* zone = compiler->zone();
+
switch (type()) {
case START_OF_LINE:
return AssertionNode::AfterNewline(on_success);
@@ -3938,13 +4912,13 @@ RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
int stack_pointer_register = compiler->AllocateRegister();
int position_register = compiler->AllocateRegister();
// The ChoiceNode to distinguish between a newline and end-of-input.
- ChoiceNode* result = new ChoiceNode(2);
+ ChoiceNode* result = new(zone) ChoiceNode(2, zone);
// Create a newline atom.
ZoneList<CharacterRange>* newline_ranges =
- new ZoneList<CharacterRange>(3);
- CharacterRange::AddClassEscape('n', newline_ranges);
- RegExpCharacterClass* newline_atom = new RegExpCharacterClass('n');
- TextNode* newline_matcher = new TextNode(
+ new(zone) ZoneList<CharacterRange>(3, zone);
+ CharacterRange::AddClassEscape('n', newline_ranges, zone);
+ RegExpCharacterClass* newline_atom = new(zone) RegExpCharacterClass('n');
+ TextNode* newline_matcher = new(zone) TextNode(
newline_atom,
ActionNode::PositiveSubmatchSuccess(stack_pointer_register,
position_register,
@@ -3972,9 +4946,10 @@ RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler,
RegExpNode* RegExpBackReference::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
- return new BackReferenceNode(RegExpCapture::StartRegister(index()),
- RegExpCapture::EndRegister(index()),
- on_success);
+ return new(compiler->zone())
+ BackReferenceNode(RegExpCapture::StartRegister(index()),
+ RegExpCapture::EndRegister(index()),
+ on_success);
}
@@ -4019,16 +4994,20 @@ RegExpNode* RegExpLookahead::ToNode(RegExpCompiler* compiler,
// for a negative lookahead. The NegativeLookaheadChoiceNode is a special
// ChoiceNode that knows to ignore the first exit when calculating quick
// checks.
+ Zone* zone = compiler->zone();
+
GuardedAlternative body_alt(
body()->ToNode(
compiler,
- success = new NegativeSubmatchSuccess(stack_pointer_register,
- position_register,
- register_count,
- register_start)));
+ success = new(zone) NegativeSubmatchSuccess(stack_pointer_register,
+ position_register,
+ register_count,
+ register_start,
+ zone)));
ChoiceNode* choice_node =
- new NegativeLookaheadChoiceNode(body_alt,
- GuardedAlternative(on_success));
+ new(zone) NegativeLookaheadChoiceNode(body_alt,
+ GuardedAlternative(on_success),
+ zone);
return ActionNode::BeginSubmatch(stack_pointer_register,
position_register,
choice_node);
@@ -4065,70 +5044,79 @@ RegExpNode* RegExpAlternative::ToNode(RegExpCompiler* compiler,
}
-static void AddClass(const uc16* elmv,
+static void AddClass(const int* elmv,
int elmc,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
+ elmc--;
+ ASSERT(elmv[elmc] == 0x10000);
for (int i = 0; i < elmc; i += 2) {
- ASSERT(elmv[i] <= elmv[i + 1]);
- ranges->Add(CharacterRange(elmv[i], elmv[i + 1]));
+ ASSERT(elmv[i] < elmv[i + 1]);
+ ranges->Add(CharacterRange(elmv[i], elmv[i + 1] - 1), zone);
}
}
-static void AddClassNegated(const uc16 *elmv,
+static void AddClassNegated(const int *elmv,
int elmc,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
+ elmc--;
+ ASSERT(elmv[elmc] == 0x10000);
ASSERT(elmv[0] != 0x0000);
ASSERT(elmv[elmc-1] != String::kMaxUtf16CodeUnit);
uc16 last = 0x0000;
for (int i = 0; i < elmc; i += 2) {
ASSERT(last <= elmv[i] - 1);
- ASSERT(elmv[i] <= elmv[i + 1]);
- ranges->Add(CharacterRange(last, elmv[i] - 1));
- last = elmv[i + 1] + 1;
+ ASSERT(elmv[i] < elmv[i + 1]);
+ ranges->Add(CharacterRange(last, elmv[i] - 1), zone);
+ last = elmv[i + 1];
}
- ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit));
+ ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit), zone);
}
void CharacterRange::AddClassEscape(uc16 type,
- ZoneList<CharacterRange>* ranges) {
+ ZoneList<CharacterRange>* ranges,
+ Zone* zone) {
switch (type) {
case 's':
- AddClass(kSpaceRanges, kSpaceRangeCount, ranges);
+ AddClass(kSpaceRanges, kSpaceRangeCount, ranges, zone);
break;
case 'S':
- AddClassNegated(kSpaceRanges, kSpaceRangeCount, ranges);
+ AddClassNegated(kSpaceRanges, kSpaceRangeCount, ranges, zone);
break;
case 'w':
- AddClass(kWordRanges, kWordRangeCount, ranges);
+ AddClass(kWordRanges, kWordRangeCount, ranges, zone);
break;
case 'W':
- AddClassNegated(kWordRanges, kWordRangeCount, ranges);
+ AddClassNegated(kWordRanges, kWordRangeCount, ranges, zone);
break;
case 'd':
- AddClass(kDigitRanges, kDigitRangeCount, ranges);
+ AddClass(kDigitRanges, kDigitRangeCount, ranges, zone);
break;
case 'D':
- AddClassNegated(kDigitRanges, kDigitRangeCount, ranges);
+ AddClassNegated(kDigitRanges, kDigitRangeCount, ranges, zone);
break;
case '.':
AddClassNegated(kLineTerminatorRanges,
kLineTerminatorRangeCount,
- ranges);
+ ranges,
+ zone);
break;
// This is not a character range as defined by the spec but a
// convenient shorthand for a character class that matches any
// character.
case '*':
- ranges->Add(CharacterRange::Everything());
+ ranges->Add(CharacterRange::Everything(), zone);
break;
// This is the set of characters matched by the $ and ^ symbols
// in multiline mode.
case 'n':
AddClass(kLineTerminatorRanges,
kLineTerminatorRangeCount,
- ranges);
+ ranges,
+ zone);
break;
default:
UNREACHABLE();
@@ -4136,17 +5124,19 @@ void CharacterRange::AddClassEscape(uc16 type,
}
-Vector<const uc16> CharacterRange::GetWordBounds() {
- return Vector<const uc16>(kWordRanges, kWordRangeCount);
+Vector<const int> CharacterRange::GetWordBounds() {
+ return Vector<const int>(kWordRanges, kWordRangeCount - 1);
}
class CharacterRangeSplitter {
public:
CharacterRangeSplitter(ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded)
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone)
: included_(included),
- excluded_(excluded) { }
+ excluded_(excluded),
+ zone_(zone) { }
void Call(uc16 from, DispatchTable::Entry entry);
static const int kInBase = 0;
@@ -4155,6 +5145,7 @@ class CharacterRangeSplitter {
private:
ZoneList<CharacterRange>** included_;
ZoneList<CharacterRange>** excluded_;
+ Zone* zone_;
};
@@ -4163,31 +5154,33 @@ void CharacterRangeSplitter::Call(uc16 from, DispatchTable::Entry entry) {
ZoneList<CharacterRange>** target = entry.out_set()->Get(kInOverlay)
? included_
: excluded_;
- if (*target == NULL) *target = new ZoneList<CharacterRange>(2);
- (*target)->Add(CharacterRange(entry.from(), entry.to()));
+ if (*target == NULL) *target = new(zone_) ZoneList<CharacterRange>(2, zone_);
+ (*target)->Add(CharacterRange(entry.from(), entry.to()), zone_);
}
void CharacterRange::Split(ZoneList<CharacterRange>* base,
- Vector<const uc16> overlay,
+ Vector<const int> overlay,
ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded) {
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone) {
ASSERT_EQ(NULL, *included);
ASSERT_EQ(NULL, *excluded);
- DispatchTable table;
+ DispatchTable table(zone);
for (int i = 0; i < base->length(); i++)
- table.AddRange(base->at(i), CharacterRangeSplitter::kInBase);
+ table.AddRange(base->at(i), CharacterRangeSplitter::kInBase, zone);
for (int i = 0; i < overlay.length(); i += 2) {
- table.AddRange(CharacterRange(overlay[i], overlay[i+1]),
- CharacterRangeSplitter::kInOverlay);
+ table.AddRange(CharacterRange(overlay[i], overlay[i + 1] - 1),
+ CharacterRangeSplitter::kInOverlay, zone);
}
- CharacterRangeSplitter callback(included, excluded);
+ CharacterRangeSplitter callback(included, excluded, zone);
table.ForEach(&callback);
}
void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
- bool is_ascii) {
+ bool is_ascii,
+ Zone* zone) {
Isolate* isolate = Isolate::Current();
uc16 bottom = from();
uc16 top = to();
@@ -4202,7 +5195,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
for (int i = 0; i < length; i++) {
uc32 chr = chars[i];
if (chr != bottom) {
- ranges->Add(CharacterRange::Singleton(chars[i]));
+ ranges->Add(CharacterRange::Singleton(chars[i]), zone);
}
}
} else {
@@ -4226,7 +5219,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
// as a "singleton block").
unibrow::uchar range[unibrow::Ecma262UnCanonicalize::kMaxWidth];
int pos = bottom;
- while (pos < top) {
+ while (pos <= top) {
int length = isolate->jsregexp_canonrange()->get(pos, '\0', range);
uc16 block_end;
if (length == 0) {
@@ -4242,7 +5235,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
uc16 range_from = c - (block_end - pos);
uc16 range_to = c - (block_end - end);
if (!(bottom <= range_from && range_to <= top)) {
- ranges->Add(CharacterRange(range_from, range_to));
+ ranges->Add(CharacterRange(range_from, range_to), zone);
}
}
pos = end + 1;
@@ -4264,92 +5257,11 @@ bool CharacterRange::IsCanonical(ZoneList<CharacterRange>* ranges) {
return true;
}
-SetRelation CharacterRange::WordCharacterRelation(
- ZoneList<CharacterRange>* range) {
- ASSERT(IsCanonical(range));
- int i = 0; // Word character range index.
- int j = 0; // Argument range index.
- ASSERT_NE(0, kWordRangeCount);
- SetRelation result;
- if (range->length() == 0) {
- result.SetElementsInSecondSet();
- return result;
- }
- CharacterRange argument_range = range->at(0);
- CharacterRange word_range = CharacterRange(kWordRanges[0], kWordRanges[1]);
- while (i < kWordRangeCount && j < range->length()) {
- // Check the two ranges for the five cases:
- // - no overlap.
- // - partial overlap (there are elements in both ranges that isn't
- // in the other, and there are also elements that are in both).
- // - argument range entirely inside word range.
- // - word range entirely inside argument range.
- // - ranges are completely equal.
-
- // First check for no overlap. The earlier range is not in the other set.
- if (argument_range.from() > word_range.to()) {
- // Ranges are disjoint. The earlier word range contains elements that
- // cannot be in the argument set.
- result.SetElementsInSecondSet();
- } else if (word_range.from() > argument_range.to()) {
- // Ranges are disjoint. The earlier argument range contains elements that
- // cannot be in the word set.
- result.SetElementsInFirstSet();
- } else if (word_range.from() <= argument_range.from() &&
- word_range.to() >= argument_range.from()) {
- result.SetElementsInBothSets();
- // argument range completely inside word range.
- if (word_range.from() < argument_range.from() ||
- word_range.to() > argument_range.from()) {
- result.SetElementsInSecondSet();
- }
- } else if (word_range.from() >= argument_range.from() &&
- word_range.to() <= argument_range.from()) {
- result.SetElementsInBothSets();
- result.SetElementsInFirstSet();
- } else {
- // There is overlap, and neither is a subrange of the other
- result.SetElementsInFirstSet();
- result.SetElementsInSecondSet();
- result.SetElementsInBothSets();
- }
- if (result.NonTrivialIntersection()) {
- // The result is as (im)precise as we can possibly make it.
- return result;
- }
- // Progress the range(s) with minimal to-character.
- uc16 word_to = word_range.to();
- uc16 argument_to = argument_range.to();
- if (argument_to <= word_to) {
- j++;
- if (j < range->length()) {
- argument_range = range->at(j);
- }
- }
- if (word_to <= argument_to) {
- i += 2;
- if (i < kWordRangeCount) {
- word_range = CharacterRange(kWordRanges[i], kWordRanges[i + 1]);
- }
- }
- }
- // Check if anything wasn't compared in the loop.
- if (i < kWordRangeCount) {
- // word range contains something not in argument range.
- result.SetElementsInSecondSet();
- } else if (j < range->length()) {
- // Argument range contains something not in word range.
- result.SetElementsInFirstSet();
- }
-
- return result;
-}
-
-ZoneList<CharacterRange>* CharacterSet::ranges() {
+ZoneList<CharacterRange>* CharacterSet::ranges(Zone* zone) {
if (ranges_ == NULL) {
- ranges_ = new ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(standard_set_type_, ranges_);
+ ranges_ = new(zone) ZoneList<CharacterRange>(2, zone);
+ CharacterRange::AddClassEscape(standard_set_type_, ranges_, zone);
}
return ranges_;
}
@@ -4477,147 +5389,9 @@ void CharacterRange::Canonicalize(ZoneList<CharacterRange>* character_ranges) {
}
-// Utility function for CharacterRange::Merge. Adds a range at the end of
-// a canonicalized range list, if necessary merging the range with the last
-// range of the list.
-static void AddRangeToSet(ZoneList<CharacterRange>* set, CharacterRange range) {
- if (set == NULL) return;
- ASSERT(set->length() == 0 || set->at(set->length() - 1).to() < range.from());
- int n = set->length();
- if (n > 0) {
- CharacterRange lastRange = set->at(n - 1);
- if (lastRange.to() == range.from() - 1) {
- set->at(n - 1) = CharacterRange(lastRange.from(), range.to());
- return;
- }
- }
- set->Add(range);
-}
-
-
-static void AddRangeToSelectedSet(int selector,
- ZoneList<CharacterRange>* first_set,
- ZoneList<CharacterRange>* second_set,
- ZoneList<CharacterRange>* intersection_set,
- CharacterRange range) {
- switch (selector) {
- case kInsideFirst:
- AddRangeToSet(first_set, range);
- break;
- case kInsideSecond:
- AddRangeToSet(second_set, range);
- break;
- case kInsideBoth:
- AddRangeToSet(intersection_set, range);
- break;
- }
-}
-
-
-
-void CharacterRange::Merge(ZoneList<CharacterRange>* first_set,
- ZoneList<CharacterRange>* second_set,
- ZoneList<CharacterRange>* first_set_only_out,
- ZoneList<CharacterRange>* second_set_only_out,
- ZoneList<CharacterRange>* both_sets_out) {
- // Inputs are canonicalized.
- ASSERT(CharacterRange::IsCanonical(first_set));
- ASSERT(CharacterRange::IsCanonical(second_set));
- // Outputs are empty, if applicable.
- ASSERT(first_set_only_out == NULL || first_set_only_out->length() == 0);
- ASSERT(second_set_only_out == NULL || second_set_only_out->length() == 0);
- ASSERT(both_sets_out == NULL || both_sets_out->length() == 0);
-
- // Merge sets by iterating through the lists in order of lowest "from" value,
- // and putting intervals into one of three sets.
-
- if (first_set->length() == 0) {
- second_set_only_out->AddAll(*second_set);
- return;
- }
- if (second_set->length() == 0) {
- first_set_only_out->AddAll(*first_set);
- return;
- }
- // Indices into input lists.
- int i1 = 0;
- int i2 = 0;
- // Cache length of input lists.
- int n1 = first_set->length();
- int n2 = second_set->length();
- // Current range. May be invalid if state is kInsideNone.
- int from = 0;
- int to = -1;
- // Where current range comes from.
- int state = kInsideNone;
-
- while (i1 < n1 || i2 < n2) {
- CharacterRange next_range;
- int range_source;
- if (i2 == n2 ||
- (i1 < n1 && first_set->at(i1).from() < second_set->at(i2).from())) {
- // Next smallest element is in first set.
- next_range = first_set->at(i1++);
- range_source = kInsideFirst;
- } else {
- // Next smallest element is in second set.
- next_range = second_set->at(i2++);
- range_source = kInsideSecond;
- }
- if (to < next_range.from()) {
- // Ranges disjoint: |current| |next|
- AddRangeToSelectedSet(state,
- first_set_only_out,
- second_set_only_out,
- both_sets_out,
- CharacterRange(from, to));
- from = next_range.from();
- to = next_range.to();
- state = range_source;
- } else {
- if (from < next_range.from()) {
- AddRangeToSelectedSet(state,
- first_set_only_out,
- second_set_only_out,
- both_sets_out,
- CharacterRange(from, next_range.from()-1));
- }
- if (to < next_range.to()) {
- // Ranges overlap: |current|
- // |next|
- AddRangeToSelectedSet(state | range_source,
- first_set_only_out,
- second_set_only_out,
- both_sets_out,
- CharacterRange(next_range.from(), to));
- from = to + 1;
- to = next_range.to();
- state = range_source;
- } else {
- // Range included: |current| , possibly ending at same character.
- // |next|
- AddRangeToSelectedSet(
- state | range_source,
- first_set_only_out,
- second_set_only_out,
- both_sets_out,
- CharacterRange(next_range.from(), next_range.to()));
- from = next_range.to() + 1;
- // If ranges end at same character, both ranges are consumed completely.
- if (next_range.to() == to) state = kInsideNone;
- }
- }
- }
- AddRangeToSelectedSet(state,
- first_set_only_out,
- second_set_only_out,
- both_sets_out,
- CharacterRange(from, to));
-}
-
-
void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
- ZoneList<CharacterRange>* negated_ranges) {
+ ZoneList<CharacterRange>* negated_ranges,
+ Zone* zone) {
ASSERT(CharacterRange::IsCanonical(ranges));
ASSERT_EQ(0, negated_ranges->length());
int range_count = ranges->length();
@@ -4629,52 +5403,14 @@ void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
}
while (i < range_count) {
CharacterRange range = ranges->at(i);
- negated_ranges->Add(CharacterRange(from + 1, range.from() - 1));
+ negated_ranges->Add(CharacterRange(from + 1, range.from() - 1), zone);
from = range.to();
i++;
}
if (from < String::kMaxUtf16CodeUnit) {
- negated_ranges->Add(CharacterRange(from + 1, String::kMaxUtf16CodeUnit));
- }
-}
-
-
-
-// -------------------------------------------------------------------
-// Interest propagation
-
-
-RegExpNode* RegExpNode::TryGetSibling(NodeInfo* info) {
- for (int i = 0; i < siblings_.length(); i++) {
- RegExpNode* sibling = siblings_.Get(i);
- if (sibling->info()->Matches(info))
- return sibling;
+ negated_ranges->Add(CharacterRange(from + 1, String::kMaxUtf16CodeUnit),
+ zone);
}
- return NULL;
-}
-
-
-RegExpNode* RegExpNode::EnsureSibling(NodeInfo* info, bool* cloned) {
- ASSERT_EQ(false, *cloned);
- siblings_.Ensure(this);
- RegExpNode* result = TryGetSibling(info);
- if (result != NULL) return result;
- result = this->Clone();
- NodeInfo* new_info = result->info();
- new_info->ResetCompilationState();
- new_info->AddFromPreceding(info);
- AddSibling(result);
- *cloned = true;
- return result;
-}
-
-
-template <class C>
-static RegExpNode* PropagateToEndpoint(C* node, NodeInfo* info) {
- NodeInfo full_info(*node->info());
- full_info.AddFromPreceding(info);
- bool cloned = false;
- return RegExpNode::EnsureSibling(node, &full_info, &cloned);
}
@@ -4682,33 +5418,33 @@ static RegExpNode* PropagateToEndpoint(C* node, NodeInfo* info) {
// Splay tree
-OutSet* OutSet::Extend(unsigned value) {
+OutSet* OutSet::Extend(unsigned value, Zone* zone) {
if (Get(value))
return this;
- if (successors() != NULL) {
- for (int i = 0; i < successors()->length(); i++) {
- OutSet* successor = successors()->at(i);
+ if (successors(zone) != NULL) {
+ for (int i = 0; i < successors(zone)->length(); i++) {
+ OutSet* successor = successors(zone)->at(i);
if (successor->Get(value))
return successor;
}
} else {
- successors_ = new ZoneList<OutSet*>(2);
+ successors_ = new(zone) ZoneList<OutSet*>(2, zone);
}
- OutSet* result = new OutSet(first_, remaining_);
- result->Set(value);
- successors()->Add(result);
+ OutSet* result = new(zone) OutSet(first_, remaining_);
+ result->Set(value, zone);
+ successors(zone)->Add(result, zone);
return result;
}
-void OutSet::Set(unsigned value) {
+void OutSet::Set(unsigned value, Zone *zone) {
if (value < kFirstLimit) {
first_ |= (1 << value);
} else {
if (remaining_ == NULL)
- remaining_ = new ZoneList<unsigned>(1);
+ remaining_ = new(zone) ZoneList<unsigned>(1, zone);
if (remaining_->is_empty() || !remaining_->Contains(value))
- remaining_->Add(value);
+ remaining_->Add(value, zone);
}
}
@@ -4727,13 +5463,15 @@ bool OutSet::Get(unsigned value) {
const uc16 DispatchTable::Config::kNoKey = unibrow::Utf8::kBadChar;
-void DispatchTable::AddRange(CharacterRange full_range, int value) {
+void DispatchTable::AddRange(CharacterRange full_range, int value,
+ Zone* zone) {
CharacterRange current = full_range;
if (tree()->is_empty()) {
// If this is the first range we just insert into the table.
ZoneSplayTree<Config>::Locator loc;
ASSERT_RESULT(tree()->Insert(current.from(), &loc));
- loc.set_value(Entry(current.from(), current.to(), empty()->Extend(value)));
+ loc.set_value(Entry(current.from(), current.to(),
+ empty()->Extend(value, zone)));
return;
}
// First see if there is a range to the left of this one that
@@ -4776,7 +5514,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
ASSERT_RESULT(tree()->Insert(current.from(), &ins));
ins.set_value(Entry(current.from(),
entry->from() - 1,
- empty()->Extend(value)));
+ empty()->Extend(value, zone)));
current.set_from(entry->from());
}
ASSERT_EQ(current.from(), entry->from());
@@ -4794,7 +5532,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
// The overlapping range is now completely contained by the range
// we're adding so we can just update it and move the start point
// of the range we're adding just past it.
- entry->AddValue(value);
+ entry->AddValue(value, zone);
// Bail out if the last interval ended at 0xFFFF since otherwise
// adding 1 will wrap around to 0.
if (entry->to() == String::kMaxUtf16CodeUnit)
@@ -4807,7 +5545,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
ASSERT_RESULT(tree()->Insert(current.from(), &ins));
ins.set_value(Entry(current.from(),
current.to(),
- empty()->Extend(value)));
+ empty()->Extend(value, zone)));
break;
}
}
@@ -4928,213 +5666,112 @@ void Analysis::VisitBackReference(BackReferenceNode* that) {
void Analysis::VisitAssertion(AssertionNode* that) {
EnsureAnalyzed(that->on_success());
- AssertionNode::AssertionNodeType type = that->type();
- if (type == AssertionNode::AT_BOUNDARY ||
- type == AssertionNode::AT_NON_BOUNDARY) {
- // Check if the following character is known to be a word character
- // or known to not be a word character.
- ZoneList<CharacterRange>* following_chars = that->FirstCharacterSet();
-
- CharacterRange::Canonicalize(following_chars);
-
- SetRelation word_relation =
- CharacterRange::WordCharacterRelation(following_chars);
- if (word_relation.Disjoint()) {
- // Includes the case where following_chars is empty (e.g., end-of-input).
- // Following character is definitely *not* a word character.
- type = (type == AssertionNode::AT_BOUNDARY) ?
- AssertionNode::AFTER_WORD_CHARACTER :
- AssertionNode::AFTER_NONWORD_CHARACTER;
- that->set_type(type);
- } else if (word_relation.ContainedIn()) {
- // Following character is definitely a word character.
- type = (type == AssertionNode::AT_BOUNDARY) ?
- AssertionNode::AFTER_NONWORD_CHARACTER :
- AssertionNode::AFTER_WORD_CHARACTER;
- that->set_type(type);
- }
- }
}
-ZoneList<CharacterRange>* RegExpNode::FirstCharacterSet() {
- if (first_character_set_ == NULL) {
- if (ComputeFirstCharacterSet(kFirstCharBudget) < 0) {
- // If we can't find an exact solution within the budget, we
- // set the value to the set of every character, i.e., all characters
- // are possible.
- ZoneList<CharacterRange>* all_set = new ZoneList<CharacterRange>(1);
- all_set->Add(CharacterRange::Everything());
- first_character_set_ = all_set;
- }
- }
- return first_character_set_;
-}
-
-
-int RegExpNode::ComputeFirstCharacterSet(int budget) {
- // Default behavior is to not be able to determine the first character.
- return kComputeFirstCharacterSetFail;
-}
-
-
-int LoopChoiceNode::ComputeFirstCharacterSet(int budget) {
- budget--;
- if (budget >= 0) {
- // Find loop min-iteration. It's the value of the guarded choice node
- // with a GEQ guard, if any.
- int min_repetition = 0;
-
- for (int i = 0; i <= 1; i++) {
- GuardedAlternative alternative = alternatives()->at(i);
- ZoneList<Guard*>* guards = alternative.guards();
- if (guards != NULL && guards->length() > 0) {
- Guard* guard = guards->at(0);
- if (guard->op() == Guard::GEQ) {
- min_repetition = guard->value();
- break;
- }
- }
- }
-
- budget = loop_node()->ComputeFirstCharacterSet(budget);
- if (budget >= 0) {
- ZoneList<CharacterRange>* character_set =
- loop_node()->first_character_set();
- if (body_can_be_zero_length() || min_repetition == 0) {
- budget = continue_node()->ComputeFirstCharacterSet(budget);
- if (budget < 0) return budget;
- ZoneList<CharacterRange>* body_set =
- continue_node()->first_character_set();
- ZoneList<CharacterRange>* union_set =
- new ZoneList<CharacterRange>(Max(character_set->length(),
- body_set->length()));
- CharacterRange::Merge(character_set,
- body_set,
- union_set,
- union_set,
- union_set);
- character_set = union_set;
- }
- set_first_character_set(character_set);
- }
- }
- return budget;
-}
-
-
-int NegativeLookaheadChoiceNode::ComputeFirstCharacterSet(int budget) {
- budget--;
- if (budget >= 0) {
- GuardedAlternative successor = this->alternatives()->at(1);
- RegExpNode* successor_node = successor.node();
- budget = successor_node->ComputeFirstCharacterSet(budget);
- if (budget >= 0) {
- set_first_character_set(successor_node->first_character_set());
- }
- }
- return budget;
+void BackReferenceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ // Working out the set of characters that a backreference can match is too
+ // hard, so we just say that any character can match.
+ bm->SetRest(offset);
+ SaveBMInfo(bm, not_at_start, offset);
}
-// The first character set of an EndNode is unknowable. Just use the
-// default implementation that fails and returns all characters as possible.
+STATIC_ASSERT(BoyerMoorePositionInfo::kMapSize ==
+ RegExpMacroAssembler::kTableSize);
-int AssertionNode::ComputeFirstCharacterSet(int budget) {
- budget -= 1;
- if (budget >= 0) {
- switch (type_) {
- case AT_END: {
- set_first_character_set(new ZoneList<CharacterRange>(0));
- break;
- }
- case AT_START:
- case AT_BOUNDARY:
- case AT_NON_BOUNDARY:
- case AFTER_NEWLINE:
- case AFTER_NONWORD_CHARACTER:
- case AFTER_WORD_CHARACTER: {
- ASSERT_NOT_NULL(on_success());
- budget = on_success()->ComputeFirstCharacterSet(budget);
- if (budget >= 0) {
- set_first_character_set(on_success()->first_character_set());
- }
- break;
- }
+void ChoiceNode::FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ ZoneList<GuardedAlternative>* alts = alternatives();
+ budget = (budget - 1) / alts->length();
+ for (int i = 0; i < alts->length(); i++) {
+ GuardedAlternative& alt = alts->at(i);
+ if (alt.guards() != NULL && alt.guards()->length() != 0) {
+ bm->SetRest(offset); // Give up trying to fill in info.
+ SaveBMInfo(bm, not_at_start, offset);
+ return;
}
+ alt.node()->FillInBMInfo(
+ offset, recursion_depth + 1, budget, bm, not_at_start);
}
- return budget;
+ SaveBMInfo(bm, not_at_start, offset);
}
-int ActionNode::ComputeFirstCharacterSet(int budget) {
- if (type_ == POSITIVE_SUBMATCH_SUCCESS) return kComputeFirstCharacterSetFail;
- budget--;
- if (budget >= 0) {
- ASSERT_NOT_NULL(on_success());
- budget = on_success()->ComputeFirstCharacterSet(budget);
- if (budget >= 0) {
- set_first_character_set(on_success()->first_character_set());
+void TextNode::FillInBMInfo(int initial_offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ if (initial_offset >= bm->length()) return;
+ int offset = initial_offset;
+ int max_char = bm->max_char();
+ for (int i = 0; i < elements()->length(); i++) {
+ if (offset >= bm->length()) {
+ if (initial_offset == 0) set_bm_info(not_at_start, bm);
+ return;
}
- }
- return budget;
-}
-
-
-int BackReferenceNode::ComputeFirstCharacterSet(int budget) {
- // We don't know anything about the first character of a backreference
- // at this point.
- // The potential first characters are the first characters of the capture,
- // and the first characters of the on_success node, depending on whether the
- // capture can be empty and whether it is known to be participating or known
- // not to be.
- return kComputeFirstCharacterSetFail;
-}
-
-
-int TextNode::ComputeFirstCharacterSet(int budget) {
- budget--;
- if (budget >= 0) {
- ASSERT_NE(0, elements()->length());
- TextElement text = elements()->at(0);
+ TextElement text = elements()->at(i);
if (text.type == TextElement::ATOM) {
RegExpAtom* atom = text.data.u_atom;
- ASSERT_NE(0, atom->length());
- uc16 first_char = atom->data()[0];
- ZoneList<CharacterRange>* range = new ZoneList<CharacterRange>(1);
- range->Add(CharacterRange(first_char, first_char));
- set_first_character_set(range);
+ for (int j = 0; j < atom->length(); j++, offset++) {
+ if (offset >= bm->length()) {
+ if (initial_offset == 0) set_bm_info(not_at_start, bm);
+ return;
+ }
+ uc16 character = atom->data()[j];
+ if (bm->compiler()->ignore_case()) {
+ unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
+ int length = GetCaseIndependentLetters(
+ ISOLATE,
+ character,
+ bm->max_char() == String::kMaxAsciiCharCode,
+ chars);
+ for (int j = 0; j < length; j++) {
+ bm->Set(offset, chars[j]);
+ }
+ } else {
+ if (character <= max_char) bm->Set(offset, character);
+ }
+ }
} else {
ASSERT(text.type == TextElement::CHAR_CLASS);
RegExpCharacterClass* char_class = text.data.u_char_class;
- ZoneList<CharacterRange>* ranges = char_class->ranges();
- // TODO(lrn): Canonicalize ranges when they are created
- // instead of waiting until now.
- CharacterRange::Canonicalize(ranges);
+ ZoneList<CharacterRange>* ranges = char_class->ranges(zone());
if (char_class->is_negated()) {
- int length = ranges->length();
- int new_length = length + 1;
- if (length > 0) {
- if (ranges->at(0).from() == 0) new_length--;
- if (ranges->at(length - 1).to() == String::kMaxUtf16CodeUnit) {
- new_length--;
- }
- }
- ZoneList<CharacterRange>* negated_ranges =
- new ZoneList<CharacterRange>(new_length);
- CharacterRange::Negate(ranges, negated_ranges);
- set_first_character_set(negated_ranges);
+ bm->SetAll(offset);
} else {
- set_first_character_set(ranges);
+ for (int k = 0; k < ranges->length(); k++) {
+ CharacterRange& range = ranges->at(k);
+ if (range.from() > max_char) continue;
+ int to = Min(max_char, static_cast<int>(range.to()));
+ bm->SetInterval(offset, Interval(range.from(), to));
+ }
}
+ offset++;
}
}
- return budget;
+ if (offset >= bm->length()) {
+ if (initial_offset == 0) set_bm_info(not_at_start, bm);
+ return;
+ }
+ on_success()->FillInBMInfo(offset,
+ recursion_depth + 1,
+ budget - 1,
+ bm,
+ true); // Not at start after a text node.
+ if (initial_offset == 0) set_bm_info(not_at_start, bm);
}
-
// -------------------------------------------------------------------
// Dispatch table construction
@@ -5228,7 +5865,7 @@ void DispatchTableConstructor::VisitText(TextNode* that) {
}
case TextElement::CHAR_CLASS: {
RegExpCharacterClass* tree = elm.data.u_char_class;
- ZoneList<CharacterRange>* ranges = tree->ranges();
+ ZoneList<CharacterRange>* ranges = tree->ranges(that->zone());
if (tree->is_negated()) {
AddInverse(ranges);
} else {
@@ -5250,15 +5887,32 @@ void DispatchTableConstructor::VisitAction(ActionNode* that) {
}
-RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
- bool ignore_case,
- bool is_multiline,
- Handle<String> pattern,
- bool is_ascii) {
+RegExpEngine::CompilationResult RegExpEngine::Compile(
+ RegExpCompileData* data,
+ bool ignore_case,
+ bool is_global,
+ bool is_multiline,
+ Handle<String> pattern,
+ Handle<String> sample_subject,
+ bool is_ascii,
+ Zone* zone) {
if ((data->capture_count + 1) * 2 - 1 > RegExpMacroAssembler::kMaxRegister) {
return IrregexpRegExpTooBig();
}
- RegExpCompiler compiler(data->capture_count, ignore_case, is_ascii);
+ RegExpCompiler compiler(data->capture_count, ignore_case, is_ascii, zone);
+
+ // Sample some characters from the middle of the string.
+ static const int kSampleSize = 128;
+
+ FlattenString(sample_subject);
+ int chars_sampled = 0;
+ int half_way = (sample_subject->length() - kSampleSize) / 2;
+ for (int i = Max(0, half_way);
+ i < sample_subject->length() && chars_sampled < kSampleSize;
+ i++, chars_sampled++) {
+ compiler.frequency_collator()->CountCharacter(sample_subject->Get(i));
+ }
+
// Wrap the body of the regexp in capture #0.
RegExpNode* captured_body = RegExpCapture::ToNode(data->tree,
0,
@@ -5275,7 +5929,7 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
RegExpQuantifier::ToNode(0,
RegExpTree::kInfinity,
false,
- new RegExpCharacterClass('*'),
+ new(zone) RegExpCharacterClass('*'),
&compiler,
captured_body,
data->contains_anchor);
@@ -5283,15 +5937,23 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
if (data->contains_anchor) {
// Unroll loop once, to take care of the case that might start
// at the start of input.
- ChoiceNode* first_step_node = new ChoiceNode(2);
+ ChoiceNode* first_step_node = new(zone) ChoiceNode(2, zone);
first_step_node->AddAlternative(GuardedAlternative(captured_body));
first_step_node->AddAlternative(GuardedAlternative(
- new TextNode(new RegExpCharacterClass('*'), loop_node)));
+ new(zone) TextNode(new(zone) RegExpCharacterClass('*'), loop_node)));
node = first_step_node;
} else {
node = loop_node;
}
}
+ if (is_ascii) {
+ node = node->FilterASCII(RegExpCompiler::kMaxRecursion);
+ // Do it again to propagate the new nodes to places where they were not
+ // put because they had not been calculated yet.
+ if (node != NULL) node = node->FilterASCII(RegExpCompiler::kMaxRecursion);
+ }
+
+ if (node == NULL) node = new(zone) EndNode(EndNode::BACKTRACK, zone);
data->node = node;
Analysis analysis(ignore_case, is_ascii);
analysis.EnsureAnalyzed(node);
@@ -5309,13 +5971,17 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
: NativeRegExpMacroAssembler::UC16;
#if V8_TARGET_ARCH_IA32
- RegExpMacroAssemblerIA32 macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerIA32 macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_X64
- RegExpMacroAssemblerX64 macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerX64 macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_ARM
- RegExpMacroAssemblerARM macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerARM macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#elif V8_TARGET_ARCH_MIPS
- RegExpMacroAssemblerMIPS macro_assembler(mode, (data->capture_count + 1) * 2);
+ RegExpMacroAssemblerMIPS macro_assembler(mode, (data->capture_count + 1) * 2,
+ zone);
#endif
#else // V8_INTERPRETED_REGEXP
@@ -5333,6 +5999,13 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
macro_assembler.SetCurrentPositionFromEnd(max_length);
}
+ if (is_global) {
+ macro_assembler.set_global_mode(
+ (data->tree->min_match() > 0)
+ ? RegExpMacroAssembler::GLOBAL_NO_ZERO_LENGTH_CHECK
+ : RegExpMacroAssembler::GLOBAL);
+ }
+
return compiler.Assemble(&macro_assembler,
node,
data->capture_count,
diff --git a/deps/v8/src/jsregexp.h b/deps/v8/src/jsregexp.h
index 8875de9eb2..782c5b0b20 100644
--- a/deps/v8/src/jsregexp.h
+++ b/deps/v8/src/jsregexp.h
@@ -40,6 +40,7 @@ class RegExpCompiler;
class RegExpMacroAssembler;
class RegExpNode;
class RegExpTree;
+class BoyerMooreLookahead;
class RegExpImpl {
public:
@@ -77,7 +78,8 @@ class RegExpImpl {
static Handle<Object> Exec(Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
- Handle<JSArray> lastMatchInfo);
+ Handle<JSArray> lastMatchInfo,
+ Zone* zone);
// Prepares a JSRegExp object with Irregexp-specific data.
static void IrregexpInitialize(Handle<JSRegExp> re,
@@ -106,18 +108,26 @@ class RegExpImpl {
// as its "registers" argument. If the regexp cannot be compiled,
// an exception is set as pending, and this function returns negative.
static int IrregexpPrepare(Handle<JSRegExp> regexp,
- Handle<String> subject);
-
- // Execute a regular expression once on the subject, starting from
- // character "index".
- // If successful, returns RE_SUCCESS and set the capture positions
- // in the first registers.
+ Handle<String> subject,
+ Zone* zone);
+
+ // Calculate the size of offsets vector for the case of global regexp
+ // and the number of matches this vector is able to store.
+ static int GlobalOffsetsVectorSize(Handle<JSRegExp> regexp,
+ int registers_per_match,
+ int* max_matches);
+
+ // Execute a regular expression on the subject, starting from index.
+ // If matching succeeds, return the number of matches. This can be larger
+ // than one in the case of global regular expressions.
+ // The captures and subcaptures are stored into the registers vector.
// If matching fails, returns RE_FAILURE.
// If execution fails, sets a pending exception and returns RE_EXCEPTION.
- static IrregexpResult IrregexpExecOnce(Handle<JSRegExp> regexp,
- Handle<String> subject,
- int index,
- Vector<int> registers);
+ static int IrregexpExecRaw(Handle<JSRegExp> regexp,
+ Handle<String> subject,
+ int index,
+ Vector<int> registers,
+ Zone* zone);
// Execute an Irregexp bytecode pattern.
// On a successful match, the result is a JSArray containing
@@ -126,7 +136,8 @@ class RegExpImpl {
static Handle<Object> IrregexpExec(Handle<JSRegExp> regexp,
Handle<String> subject,
int index,
- Handle<JSArray> lastMatchInfo);
+ Handle<JSArray> lastMatchInfo,
+ Zone* zone);
// Array index in the lastMatchInfo array.
static const int kLastCaptureCount = 0;
@@ -190,8 +201,12 @@ class RegExpImpl {
static String* last_ascii_string_;
static String* two_byte_cached_string_;
- static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii);
- static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
+ static bool CompileIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii,
+ Zone* zone);
+ static inline bool EnsureCompiledIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii,
+ Zone* zone);
// Set the subject cache. The previous string buffer is not deleted, so the
@@ -222,56 +237,17 @@ enum ElementInSetsRelation {
};
-// Represents the relation of two sets.
-// Sets can be either disjoint, partially or fully overlapping, or equal.
-class SetRelation BASE_EMBEDDED {
- public:
- // Relation is represented by a bit saying whether there are elements in
- // one set that is not in the other, and a bit saying that there are elements
- // that are in both sets.
-
- // Location of an element. Corresponds to the internal areas of
- // a Venn diagram.
- enum {
- kInFirst = 1 << kInsideFirst,
- kInSecond = 1 << kInsideSecond,
- kInBoth = 1 << kInsideBoth
- };
- SetRelation() : bits_(0) {}
- ~SetRelation() {}
- // Add the existence of objects in a particular
- void SetElementsInFirstSet() { bits_ |= kInFirst; }
- void SetElementsInSecondSet() { bits_ |= kInSecond; }
- void SetElementsInBothSets() { bits_ |= kInBoth; }
- // Check the currently known relation of the sets (common functions only,
- // for other combinations, use value() to get the bits and check them
- // manually).
- // Sets are completely disjoint.
- bool Disjoint() { return (bits_ & kInBoth) == 0; }
- // Sets are equal.
- bool Equals() { return (bits_ & (kInFirst | kInSecond)) == 0; }
- // First set contains second.
- bool Contains() { return (bits_ & kInSecond) == 0; }
- // Second set contains first.
- bool ContainedIn() { return (bits_ & kInFirst) == 0; }
- bool NonTrivialIntersection() {
- return (bits_ == (kInFirst | kInSecond | kInBoth));
- }
- int value() { return bits_; }
-
- private:
- int bits_;
-};
-
-
+// Represents code units in the range from from_ to to_, both ends are
+// inclusive.
class CharacterRange {
public:
CharacterRange() : from_(0), to_(0) { }
// For compatibility with the CHECK_OK macro
CharacterRange(void* null) { ASSERT_EQ(NULL, null); } //NOLINT
CharacterRange(uc16 from, uc16 to) : from_(from), to_(to) { }
- static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges);
- static Vector<const uc16> GetWordBounds();
+ static void AddClassEscape(uc16 type, ZoneList<CharacterRange>* ranges,
+ Zone* zone);
+ static Vector<const int> GetWordBounds();
static inline CharacterRange Singleton(uc16 value) {
return CharacterRange(value, value);
}
@@ -290,11 +266,13 @@ class CharacterRange {
bool is_valid() { return from_ <= to_; }
bool IsEverything(uc16 max) { return from_ == 0 && to_ >= max; }
bool IsSingleton() { return (from_ == to_); }
- void AddCaseEquivalents(ZoneList<CharacterRange>* ranges, bool is_ascii);
+ void AddCaseEquivalents(ZoneList<CharacterRange>* ranges, bool is_ascii,
+ Zone* zone);
static void Split(ZoneList<CharacterRange>* base,
- Vector<const uc16> overlay,
+ Vector<const int> overlay,
ZoneList<CharacterRange>** included,
- ZoneList<CharacterRange>** excluded);
+ ZoneList<CharacterRange>** excluded,
+ Zone* zone);
// Whether a range list is in canonical form: Ranges ordered by from value,
// and ranges non-overlapping and non-adjacent.
static bool IsCanonical(ZoneList<CharacterRange>* ranges);
@@ -303,31 +281,10 @@ class CharacterRange {
// adjacent ranges are merged. The resulting list may be shorter than the
// original, but cannot be longer.
static void Canonicalize(ZoneList<CharacterRange>* ranges);
- // Check how the set of characters defined by a CharacterRange list relates
- // to the set of word characters. List must be in canonical form.
- static SetRelation WordCharacterRelation(ZoneList<CharacterRange>* ranges);
- // Takes two character range lists (representing character sets) in canonical
- // form and merges them.
- // The characters that are only covered by the first set are added to
- // first_set_only_out. the characters that are only in the second set are
- // added to second_set_only_out, and the characters that are in both are
- // added to both_sets_out.
- // The pointers to first_set_only_out, second_set_only_out and both_sets_out
- // should be to empty lists, but they need not be distinct, and may be NULL.
- // If NULL, the characters are dropped, and if two arguments are the same
- // pointer, the result is the union of the two sets that would be created
- // if the pointers had been distinct.
- // This way, the Merge function can compute all the usual set operations:
- // union (all three out-sets are equal), intersection (only both_sets_out is
- // non-NULL), and set difference (only first_set is non-NULL).
- static void Merge(ZoneList<CharacterRange>* first_set,
- ZoneList<CharacterRange>* second_set,
- ZoneList<CharacterRange>* first_set_only_out,
- ZoneList<CharacterRange>* second_set_only_out,
- ZoneList<CharacterRange>* both_sets_out);
// Negate the contents of a character range in canonical form.
static void Negate(ZoneList<CharacterRange>* src,
- ZoneList<CharacterRange>* dst);
+ ZoneList<CharacterRange>* dst,
+ Zone* zone);
static const int kStartMarker = (1 << 24);
static const int kPayloadMask = (1 << 24) - 1;
@@ -342,7 +299,7 @@ class CharacterRange {
class OutSet: public ZoneObject {
public:
OutSet() : first_(0), remaining_(NULL), successors_(NULL) { }
- OutSet* Extend(unsigned value);
+ OutSet* Extend(unsigned value, Zone* zone);
bool Get(unsigned value);
static const unsigned kFirstLimit = 32;
@@ -350,12 +307,12 @@ class OutSet: public ZoneObject {
// Destructively set a value in this set. In most cases you want
// to use Extend instead to ensure that only one instance exists
// that contains the same values.
- void Set(unsigned value);
+ void Set(unsigned value, Zone* zone);
// The successors are a list of sets that contain the same values
// as this set and the one more value that is not present in this
// set.
- ZoneList<OutSet*>* successors() { return successors_; }
+ ZoneList<OutSet*>* successors(Zone* zone) { return successors_; }
OutSet(uint32_t first, ZoneList<unsigned>* remaining)
: first_(first), remaining_(remaining), successors_(NULL) { }
@@ -370,6 +327,8 @@ class OutSet: public ZoneObject {
// Used for mapping character ranges to choices.
class DispatchTable : public ZoneObject {
public:
+ explicit DispatchTable(Zone* zone) : tree_(zone) { }
+
class Entry {
public:
Entry() : from_(0), to_(0), out_set_(NULL) { }
@@ -378,7 +337,9 @@ class DispatchTable : public ZoneObject {
uc16 from() { return from_; }
uc16 to() { return to_; }
void set_to(uc16 value) { to_ = value; }
- void AddValue(int value) { out_set_ = out_set_->Extend(value); }
+ void AddValue(int value, Zone* zone) {
+ out_set_ = out_set_->Extend(value, zone);
+ }
OutSet* out_set() { return out_set_; }
private:
uc16 from_;
@@ -402,12 +363,14 @@ class DispatchTable : public ZoneObject {
}
};
- void AddRange(CharacterRange range, int value);
+ void AddRange(CharacterRange range, int value, Zone* zone);
OutSet* Get(uc16 value);
void Dump();
template <typename Callback>
- void ForEach(Callback* callback) { return tree()->ForEach(callback); }
+ void ForEach(Callback* callback) {
+ return tree()->ForEach(callback);
+ }
private:
// There can't be a static empty set since it allocates its
@@ -475,7 +438,8 @@ struct NodeInfo {
follows_newline_interest(false),
follows_start_interest(false),
at_end(false),
- visited(false) { }
+ visited(false),
+ replacement_calculated(false) { }
// Returns true if the interests and assumptions of this node
// matches the given one.
@@ -525,25 +489,7 @@ struct NodeInfo {
bool at_end: 1;
bool visited: 1;
-};
-
-
-class SiblingList {
- public:
- SiblingList() : list_(NULL) { }
- int length() {
- return list_ == NULL ? 0 : list_->length();
- }
- void Ensure(RegExpNode* parent) {
- if (list_ == NULL) {
- list_ = new ZoneList<RegExpNode*>(2);
- list_->Add(parent);
- }
- }
- void Add(RegExpNode* node) { list_->Add(node); }
- RegExpNode* Get(int index) { return list_->at(index); }
- private:
- ZoneList<RegExpNode*>* list_;
+ bool replacement_calculated: 1;
};
@@ -599,9 +545,15 @@ class QuickCheckDetails {
};
+extern int kUninitializedRegExpNodePlaceHolder;
+
+
class RegExpNode: public ZoneObject {
public:
- RegExpNode() : first_character_set_(NULL), trace_count_(0) { }
+ explicit RegExpNode(Zone* zone)
+ : replacement_(NULL), trace_count_(0), zone_(zone) {
+ bm_info_[0] = bm_info_[1] = NULL;
+ }
virtual ~RegExpNode();
virtual void Accept(NodeVisitor* visitor) = 0;
// Generates a goto to this node or actually generates the code at this point.
@@ -635,6 +587,50 @@ class RegExpNode: public ZoneObject {
bool not_at_start) = 0;
static const int kNodeIsTooComplexForGreedyLoops = -1;
virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
+ // Only returns the successor for a text node of length 1 that matches any
+ // character and that has no guards on it.
+ virtual RegExpNode* GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler) {
+ return NULL;
+ }
+
+ // Collects information on the possible code units (mod 128) that can match if
+ // we look forward. This is used for a Boyer-Moore-like string searching
+ // implementation. TODO(erikcorry): This should share more code with
+ // EatsAtLeast, GetQuickCheckDetails. The budget argument is used to limit
+ // the number of nodes we are willing to look at in order to create this data.
+ static const int kFillInBMBudget = 200;
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ UNREACHABLE();
+ }
+
+ // If we know that the input is ASCII then there are some nodes that can
+ // never match. This method returns a node that can be substituted for
+ // itself, or NULL if the node can never match.
+ virtual RegExpNode* FilterASCII(int depth) { return this; }
+ // Helper for FilterASCII.
+ RegExpNode* replacement() {
+ ASSERT(info()->replacement_calculated);
+ return replacement_;
+ }
+ RegExpNode* set_replacement(RegExpNode* replacement) {
+ info()->replacement_calculated = true;
+ replacement_ = replacement;
+ return replacement; // For convenience.
+ }
+
+ // We want to avoid recalculating the lookahead info, so we store it on the
+ // node. Only info that is for this node is stored. We can tell that the
+ // info is for this node when offset == 0, so the information is calculated
+ // relative to this node.
+ void SaveBMInfo(BoyerMooreLookahead* bm, bool not_at_start, int offset) {
+ if (offset == 0) set_bm_info(not_at_start, bm);
+ }
+
Label* label() { return &label_; }
// If non-generic code is generated for a node (i.e. the node is not at the
// start of the trace) then it cannot be reused. This variable sets a limit
@@ -645,72 +641,35 @@ class RegExpNode: public ZoneObject {
NodeInfo* info() { return &info_; }
- void AddSibling(RegExpNode* node) { siblings_.Add(node); }
-
- // Static version of EnsureSibling that expresses the fact that the
- // result has the same type as the input.
- template <class C>
- static C* EnsureSibling(C* node, NodeInfo* info, bool* cloned) {
- return static_cast<C*>(node->EnsureSibling(info, cloned));
+ BoyerMooreLookahead* bm_info(bool not_at_start) {
+ return bm_info_[not_at_start ? 1 : 0];
}
- SiblingList* siblings() { return &siblings_; }
- void set_siblings(SiblingList* other) { siblings_ = *other; }
-
- // Return the set of possible next characters recognized by the regexp
- // (or a safe subset, potentially the set of all characters).
- ZoneList<CharacterRange>* FirstCharacterSet();
-
- // Compute (if possible within the budget of traversed nodes) the
- // possible first characters of the input matched by this node and
- // its continuation. Returns the remaining budget after the computation.
- // If the budget is spent, the result is negative, and the cached
- // first_character_set_ value isn't set.
- virtual int ComputeFirstCharacterSet(int budget);
-
- // Get and set the cached first character set value.
- ZoneList<CharacterRange>* first_character_set() {
- return first_character_set_;
- }
- void set_first_character_set(ZoneList<CharacterRange>* character_set) {
- first_character_set_ = character_set;
- }
+ Zone* zone() const { return zone_; }
protected:
enum LimitResult { DONE, CONTINUE };
- static const int kComputeFirstCharacterSetFail = -1;
+ RegExpNode* replacement_;
LimitResult LimitVersions(RegExpCompiler* compiler, Trace* trace);
- // Returns a sibling of this node whose interests and assumptions
- // match the ones in the given node info. If no sibling exists NULL
- // is returned.
- RegExpNode* TryGetSibling(NodeInfo* info);
-
- // Returns a sibling of this node whose interests match the ones in
- // the given node info. The info must not contain any assertions.
- // If no node exists a new one will be created by cloning the current
- // node. The result will always be an instance of the same concrete
- // class as this node.
- RegExpNode* EnsureSibling(NodeInfo* info, bool* cloned);
-
- // Returns a clone of this node initialized using the copy constructor
- // of its concrete class. Note that the node may have to be pre-
- // processed before it is on a usable state.
- virtual RegExpNode* Clone() = 0;
+ void set_bm_info(bool not_at_start, BoyerMooreLookahead* bm) {
+ bm_info_[not_at_start ? 1 : 0] = bm;
+ }
private:
static const int kFirstCharBudget = 10;
Label label_;
NodeInfo info_;
- SiblingList siblings_;
- ZoneList<CharacterRange>* first_character_set_;
// This variable keeps track of how many times code has been generated for
// this node (in different traces). We don't keep track of where the
// generated code is located unless the code is generated at the start of
// a trace, in which case it is generic and can be reused by flushing the
// deferred operations in the current trace and generating a goto.
int trace_count_;
+ BoyerMooreLookahead* bm_info_[2];
+
+ Zone* zone_;
};
@@ -731,8 +690,8 @@ class Interval {
return (from_ <= value) && (value <= to_);
}
bool is_empty() { return from_ == kNone; }
- int from() { return from_; }
- int to() { return to_; }
+ int from() const { return from_; }
+ int to() const { return to_; }
static Interval Empty() { return Interval(); }
static const int kNone = -1;
private:
@@ -744,9 +703,23 @@ class Interval {
class SeqRegExpNode: public RegExpNode {
public:
explicit SeqRegExpNode(RegExpNode* on_success)
- : on_success_(on_success) { }
+ : RegExpNode(on_success->zone()), on_success_(on_success) { }
RegExpNode* on_success() { return on_success_; }
void set_on_success(RegExpNode* node) { on_success_ = node; }
+ virtual RegExpNode* FilterASCII(int depth);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ on_success_->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
+ if (offset == 0) set_bm_info(not_at_start, bm);
+ }
+
+ protected:
+ RegExpNode* FilterSuccessor(int depth);
+
private:
RegExpNode* on_success_;
};
@@ -793,11 +766,14 @@ class ActionNode: public SeqRegExpNode {
return on_success()->GetQuickCheckDetails(
details, compiler, filled_in, not_at_start);
}
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
Type type() { return type_; }
// TODO(erikcorry): We should allow some action nodes in greedy loops.
virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
- virtual ActionNode* Clone() { return new ActionNode(*this); }
- virtual int ComputeFirstCharacterSet(int budget);
private:
union {
@@ -845,8 +821,8 @@ class TextNode: public SeqRegExpNode {
TextNode(RegExpCharacterClass* that,
RegExpNode* on_success)
: SeqRegExpNode(on_success),
- elms_(new ZoneList<TextElement>(1)) {
- elms_->Add(TextElement::CharClass(that));
+ elms_(new(zone()) ZoneList<TextElement>(1, zone())) {
+ elms_->Add(TextElement::CharClass(that), zone());
}
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -860,13 +836,15 @@ class TextNode: public SeqRegExpNode {
ZoneList<TextElement>* elements() { return elms_; }
void MakeCaseIndependent(bool is_ascii);
virtual int GreedyLoopTextLength();
- virtual TextNode* Clone() {
- TextNode* result = new TextNode(*this);
- result->CalculateOffsets();
- return result;
- }
+ virtual RegExpNode* GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
void CalculateOffsets();
- virtual int ComputeFirstCharacterSet(int budget);
+ virtual RegExpNode* FilterASCII(int depth);
private:
enum TextEmitPassType {
@@ -897,27 +875,22 @@ class AssertionNode: public SeqRegExpNode {
AT_START,
AT_BOUNDARY,
AT_NON_BOUNDARY,
- AFTER_NEWLINE,
- // Types not directly expressible in regexp syntax.
- // Used for modifying a boundary node if its following character is
- // known to be word and/or non-word.
- AFTER_NONWORD_CHARACTER,
- AFTER_WORD_CHARACTER
+ AFTER_NEWLINE
};
static AssertionNode* AtEnd(RegExpNode* on_success) {
- return new AssertionNode(AT_END, on_success);
+ return new(on_success->zone()) AssertionNode(AT_END, on_success);
}
static AssertionNode* AtStart(RegExpNode* on_success) {
- return new AssertionNode(AT_START, on_success);
+ return new(on_success->zone()) AssertionNode(AT_START, on_success);
}
static AssertionNode* AtBoundary(RegExpNode* on_success) {
- return new AssertionNode(AT_BOUNDARY, on_success);
+ return new(on_success->zone()) AssertionNode(AT_BOUNDARY, on_success);
}
static AssertionNode* AtNonBoundary(RegExpNode* on_success) {
- return new AssertionNode(AT_NON_BOUNDARY, on_success);
+ return new(on_success->zone()) AssertionNode(AT_NON_BOUNDARY, on_success);
}
static AssertionNode* AfterNewline(RegExpNode* on_success) {
- return new AssertionNode(AFTER_NEWLINE, on_success);
+ return new(on_success->zone()) AssertionNode(AFTER_NEWLINE, on_success);
}
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -928,12 +901,20 @@ class AssertionNode: public SeqRegExpNode {
RegExpCompiler* compiler,
int filled_in,
bool not_at_start);
- virtual int ComputeFirstCharacterSet(int budget);
- virtual AssertionNode* Clone() { return new AssertionNode(*this); }
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
AssertionNodeType type() { return type_; }
void set_type(AssertionNodeType type) { type_ = type; }
private:
+ void EmitBoundaryCheck(RegExpCompiler* compiler, Trace* trace);
+ enum IfPrevious { kIsNonWord, kIsWord };
+ void BacktrackIfPrevious(RegExpCompiler* compiler,
+ Trace* trace,
+ IfPrevious backtrack_if_previous);
AssertionNode(AssertionNodeType t, RegExpNode* on_success)
: SeqRegExpNode(on_success), type_(t) { }
AssertionNodeType type_;
@@ -961,8 +942,11 @@ class BackReferenceNode: public SeqRegExpNode {
bool not_at_start) {
return;
}
- virtual BackReferenceNode* Clone() { return new BackReferenceNode(*this); }
- virtual int ComputeFirstCharacterSet(int budget);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
private:
int start_reg_;
@@ -973,7 +957,8 @@ class BackReferenceNode: public SeqRegExpNode {
class EndNode: public RegExpNode {
public:
enum Action { ACCEPT, BACKTRACK, NEGATIVE_SUBMATCH_SUCCESS };
- explicit EndNode(Action action) : action_(action) { }
+ explicit EndNode(Action action, Zone* zone)
+ : RegExpNode(zone), action_(action) { }
virtual void Accept(NodeVisitor* visitor);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
virtual int EatsAtLeast(int still_to_find,
@@ -986,7 +971,15 @@ class EndNode: public RegExpNode {
// Returning 0 from EatsAtLeast should ensure we never get here.
UNREACHABLE();
}
- virtual EndNode* Clone() { return new EndNode(*this); }
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ // Returning 0 from EatsAtLeast should ensure we never get here.
+ UNREACHABLE();
+ }
+
private:
Action action_;
};
@@ -997,8 +990,9 @@ class NegativeSubmatchSuccess: public EndNode {
NegativeSubmatchSuccess(int stack_pointer_reg,
int position_reg,
int clear_capture_count,
- int clear_capture_start)
- : EndNode(NEGATIVE_SUBMATCH_SUCCESS),
+ int clear_capture_start,
+ Zone* zone)
+ : EndNode(NEGATIVE_SUBMATCH_SUCCESS, zone),
stack_pointer_register_(stack_pointer_reg),
current_position_register_(position_reg),
clear_capture_count_(clear_capture_count),
@@ -1034,7 +1028,7 @@ class Guard: public ZoneObject {
class GuardedAlternative {
public:
explicit GuardedAlternative(RegExpNode* node) : node_(node), guards_(NULL) { }
- void AddGuard(Guard* guard);
+ void AddGuard(Guard* guard, Zone* zone);
RegExpNode* node() { return node_; }
void set_node(RegExpNode* node) { node_ = node; }
ZoneList<Guard*>* guards() { return guards_; }
@@ -1050,13 +1044,17 @@ class AlternativeGeneration;
class ChoiceNode: public RegExpNode {
public:
- explicit ChoiceNode(int expected_size)
- : alternatives_(new ZoneList<GuardedAlternative>(expected_size)),
+ explicit ChoiceNode(int expected_size, Zone* zone)
+ : RegExpNode(zone),
+ alternatives_(new(zone)
+ ZoneList<GuardedAlternative>(expected_size, zone)),
table_(NULL),
not_at_start_(false),
being_calculated_(false) { }
virtual void Accept(NodeVisitor* visitor);
- void AddAlternative(GuardedAlternative node) { alternatives()->Add(node); }
+ void AddAlternative(GuardedAlternative node) {
+ alternatives()->Add(node, zone());
+ }
ZoneList<GuardedAlternative>* alternatives() { return alternatives_; }
DispatchTable* GetTable(bool ignore_case);
virtual void Emit(RegExpCompiler* compiler, Trace* trace);
@@ -1071,13 +1069,18 @@ class ChoiceNode: public RegExpNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
- virtual ChoiceNode* Clone() { return new ChoiceNode(*this); }
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
bool being_calculated() { return being_calculated_; }
bool not_at_start() { return not_at_start_; }
void set_not_at_start() { not_at_start_ = true; }
void set_being_calculated(bool b) { being_calculated_ = b; }
virtual bool try_to_emit_quick_check_for_alternative(int i) { return true; }
+ virtual RegExpNode* FilterASCII(int depth);
protected:
int GreedyLoopTextLengthForAlternative(GuardedAlternative* alternative);
@@ -1089,7 +1092,7 @@ class ChoiceNode: public RegExpNode {
void GenerateGuard(RegExpMacroAssembler* macro_assembler,
Guard* guard,
Trace* trace);
- int CalculatePreloadCharacters(RegExpCompiler* compiler, bool not_at_start);
+ int CalculatePreloadCharacters(RegExpCompiler* compiler, int eats_at_least);
void EmitOutOfLineContinuation(RegExpCompiler* compiler,
Trace* trace,
GuardedAlternative alternative,
@@ -1107,8 +1110,9 @@ class ChoiceNode: public RegExpNode {
class NegativeLookaheadChoiceNode: public ChoiceNode {
public:
explicit NegativeLookaheadChoiceNode(GuardedAlternative this_must_fail,
- GuardedAlternative then_do_this)
- : ChoiceNode(2) {
+ GuardedAlternative then_do_this,
+ Zone* zone)
+ : ChoiceNode(2, zone) {
AddAlternative(this_must_fail);
AddAlternative(then_do_this);
}
@@ -1119,20 +1123,29 @@ class NegativeLookaheadChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ alternatives_->at(1).node()->FillInBMInfo(
+ offset, recursion_depth + 1, budget - 1, bm, not_at_start);
+ if (offset == 0) set_bm_info(not_at_start, bm);
+ }
// For a negative lookahead we don't emit the quick check for the
// alternative that is expected to fail. This is because quick check code
// starts by loading enough characters for the alternative that takes fewest
// characters, but on a negative lookahead the negative branch did not take
// part in that calculation (EatsAtLeast) so the assumptions don't hold.
virtual bool try_to_emit_quick_check_for_alternative(int i) { return i != 0; }
- virtual int ComputeFirstCharacterSet(int budget);
+ virtual RegExpNode* FilterASCII(int depth);
};
class LoopChoiceNode: public ChoiceNode {
public:
- explicit LoopChoiceNode(bool body_can_be_zero_length)
- : ChoiceNode(2),
+ explicit LoopChoiceNode(bool body_can_be_zero_length, Zone* zone)
+ : ChoiceNode(2, zone),
loop_node_(NULL),
continue_node_(NULL),
body_can_be_zero_length_(body_can_be_zero_length) { }
@@ -1146,12 +1159,16 @@ class LoopChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
- virtual int ComputeFirstCharacterSet(int budget);
- virtual LoopChoiceNode* Clone() { return new LoopChoiceNode(*this); }
+ virtual void FillInBMInfo(int offset,
+ int recursion_depth,
+ int budget,
+ BoyerMooreLookahead* bm,
+ bool not_at_start);
RegExpNode* loop_node() { return loop_node_; }
RegExpNode* continue_node() { return continue_node_; }
bool body_can_be_zero_length() { return body_can_be_zero_length_; }
virtual void Accept(NodeVisitor* visitor);
+ virtual RegExpNode* FilterASCII(int depth);
private:
// AddAlternative is made private for loop nodes because alternatives
@@ -1167,6 +1184,146 @@ class LoopChoiceNode: public ChoiceNode {
};
+// Improve the speed that we scan for an initial point where a non-anchored
+// regexp can match by using a Boyer-Moore-like table. This is done by
+// identifying non-greedy non-capturing loops in the nodes that eat any
+// character one at a time. For example in the middle of the regexp
+// /foo[\s\S]*?bar/ we find such a loop. There is also such a loop implicitly
+// inserted at the start of any non-anchored regexp.
+//
+// When we have found such a loop we look ahead in the nodes to find the set of
+// characters that can come at given distances. For example for the regexp
+// /.?foo/ we know that there are at least 3 characters ahead of us, and the
+// sets of characters that can occur are [any, [f, o], [o]]. We find a range in
+// the lookahead info where the set of characters is reasonably constrained. In
+// our example this is from index 1 to 2 (0 is not constrained). We can now
+// look 3 characters ahead and if we don't find one of [f, o] (the union of
+// [f, o] and [o]) then we can skip forwards by the range size (in this case 2).
+//
+// For Unicode input strings we do the same, but modulo 128.
+//
+// We also look at the first string fed to the regexp and use that to get a hint
+// of the character frequencies in the inputs. This affects the assessment of
+// whether the set of characters is 'reasonably constrained'.
+//
+// We also have another lookahead mechanism (called quick check in the code),
+// which uses a wide load of multiple characters followed by a mask and compare
+// to determine whether a match is possible at this point.
+enum ContainedInLattice {
+ kNotYet = 0,
+ kLatticeIn = 1,
+ kLatticeOut = 2,
+ kLatticeUnknown = 3 // Can also mean both in and out.
+};
+
+
+inline ContainedInLattice Combine(ContainedInLattice a, ContainedInLattice b) {
+ return static_cast<ContainedInLattice>(a | b);
+}
+
+
+ContainedInLattice AddRange(ContainedInLattice a,
+ const int* ranges,
+ int ranges_size,
+ Interval new_range);
+
+
+class BoyerMoorePositionInfo : public ZoneObject {
+ public:
+ explicit BoyerMoorePositionInfo(Zone* zone)
+ : map_(new(zone) ZoneList<bool>(kMapSize, zone)),
+ map_count_(0),
+ w_(kNotYet),
+ s_(kNotYet),
+ d_(kNotYet),
+ surrogate_(kNotYet) {
+ for (int i = 0; i < kMapSize; i++) {
+ map_->Add(false, zone);
+ }
+ }
+
+ bool& at(int i) { return map_->at(i); }
+
+ static const int kMapSize = 128;
+ static const int kMask = kMapSize - 1;
+
+ int map_count() const { return map_count_; }
+
+ void Set(int character);
+ void SetInterval(const Interval& interval);
+ void SetAll();
+ bool is_non_word() { return w_ == kLatticeOut; }
+ bool is_word() { return w_ == kLatticeIn; }
+
+ private:
+ ZoneList<bool>* map_;
+ int map_count_; // Number of set bits in the map.
+ ContainedInLattice w_; // The \w character class.
+ ContainedInLattice s_; // The \s character class.
+ ContainedInLattice d_; // The \d character class.
+ ContainedInLattice surrogate_; // Surrogate UTF-16 code units.
+};
+
+
+class BoyerMooreLookahead : public ZoneObject {
+ public:
+ BoyerMooreLookahead(int length, RegExpCompiler* compiler, Zone* zone);
+
+ int length() { return length_; }
+ int max_char() { return max_char_; }
+ RegExpCompiler* compiler() { return compiler_; }
+
+ int Count(int map_number) {
+ return bitmaps_->at(map_number)->map_count();
+ }
+
+ BoyerMoorePositionInfo* at(int i) { return bitmaps_->at(i); }
+
+ void Set(int map_number, int character) {
+ if (character > max_char_) return;
+ BoyerMoorePositionInfo* info = bitmaps_->at(map_number);
+ info->Set(character);
+ }
+
+ void SetInterval(int map_number, const Interval& interval) {
+ if (interval.from() > max_char_) return;
+ BoyerMoorePositionInfo* info = bitmaps_->at(map_number);
+ if (interval.to() > max_char_) {
+ info->SetInterval(Interval(interval.from(), max_char_));
+ } else {
+ info->SetInterval(interval);
+ }
+ }
+
+ void SetAll(int map_number) {
+ bitmaps_->at(map_number)->SetAll();
+ }
+
+ void SetRest(int from_map) {
+ for (int i = from_map; i < length_; i++) SetAll(i);
+ }
+ bool EmitSkipInstructions(RegExpMacroAssembler* masm);
+
+ private:
+ // This is the value obtained by EatsAtLeast. If we do not have at least this
+ // many characters left in the sample string then the match is bound to fail.
+ // Therefore it is OK to read a character this far ahead of the current match
+ // point.
+ int length_;
+ RegExpCompiler* compiler_;
+ // 0x7f for ASCII, 0xffff for UTF-16.
+ int max_char_;
+ ZoneList<BoyerMoorePositionInfo*>* bitmaps_;
+
+ int GetSkipTable(int min_lookahead,
+ int max_lookahead,
+ Handle<ByteArray> boolean_skip_table);
+ bool FindWorthwhileInterval(int* from, int* to);
+ int FindBestInterval(
+ int max_number_of_chars, int old_biggest_points, int* from, int* to);
+};
+
+
// There are many ways to generate code for a node. This class encapsulates
// the current way we should be generating. In other words it encapsulates
// the current state of the code generator. The effect of this is that we
@@ -1312,12 +1469,13 @@ class Trace {
void AdvanceCurrentPositionInTrace(int by, RegExpCompiler* compiler);
private:
- int FindAffectedRegisters(OutSet* affected_registers);
+ int FindAffectedRegisters(OutSet* affected_registers, Zone* zone);
void PerformDeferredActions(RegExpMacroAssembler* macro,
- int max_register,
- OutSet& affected_registers,
- OutSet* registers_to_pop,
- OutSet* registers_to_clear);
+ int max_register,
+ OutSet& affected_registers,
+ OutSet* registers_to_pop,
+ OutSet* registers_to_clear,
+ Zone* zone);
void RestoreAffectedRegisters(RegExpMacroAssembler* macro,
int max_register,
OutSet& registers_to_pop,
@@ -1350,15 +1508,17 @@ FOR_EACH_NODE_TYPE(DECLARE_VISIT)
// dispatch table of a choice node.
class DispatchTableConstructor: public NodeVisitor {
public:
- DispatchTableConstructor(DispatchTable* table, bool ignore_case)
+ DispatchTableConstructor(DispatchTable* table, bool ignore_case,
+ Zone* zone)
: table_(table),
choice_index_(-1),
- ignore_case_(ignore_case) { }
+ ignore_case_(ignore_case),
+ zone_(zone) { }
void BuildTable(ChoiceNode* node);
void AddRange(CharacterRange range) {
- table()->AddRange(range, choice_index_);
+ table()->AddRange(range, choice_index_, zone_);
}
void AddInverse(ZoneList<CharacterRange>* ranges);
@@ -1375,6 +1535,7 @@ FOR_EACH_NODE_TYPE(DECLARE_VISIT)
DispatchTable* table_;
int choice_index_;
bool ignore_case_;
+ Zone* zone_;
};
@@ -1456,9 +1617,11 @@ class RegExpEngine: public AllStatic {
static CompilationResult Compile(RegExpCompileData* input,
bool ignore_case,
+ bool global,
bool multiline,
Handle<String> pattern,
- bool is_ascii);
+ Handle<String> sample_subject,
+ bool is_ascii, Zone* zone);
static void DotPrint(const char* label, RegExpNode* node, bool ignore_case);
};
@@ -1483,7 +1646,8 @@ class OffsetsVector {
inline int* vector() { return vector_; }
inline int length() { return offsets_vector_length_; }
- static const int kStaticOffsetsVectorSize = 50;
+ static const int kStaticOffsetsVectorSize =
+ Isolate::kJSRegexpStaticOffsetsVectorSize;
private:
static Address static_offsets_vector_address(Isolate* isolate) {
diff --git a/deps/v8/src/lazy-instance.h b/deps/v8/src/lazy-instance.h
index d0893e5604..9d68b8cacc 100644
--- a/deps/v8/src/lazy-instance.h
+++ b/deps/v8/src/lazy-instance.h
@@ -65,8 +65,12 @@
// static LazyInstance<MyClass, MyCreateTrait>::type my_instance =
// LAZY_INSTANCE_INITIALIZER;
//
-// WARNING: This implementation of LazyInstance is NOT thread-safe by default.
-// See ThreadSafeInitOnceTrait declared below for that.
+// WARNINGS:
+// - This implementation of LazyInstance is NOT THREAD-SAFE by default. See
+// ThreadSafeInitOnceTrait declared below for that.
+// - Lazy initialization comes with a cost. Make sure that you don't use it on
+// critical path. Consider adding your initialization code to a function
+// which is explicitly called once.
//
// Notes for advanced users:
// LazyInstance can actually be used in two different ways:
@@ -246,7 +250,7 @@ struct LazyInstance {
template <typename T,
- typename CreateTrait = DefaultConstructTrait<T>,
+ typename CreateTrait = DefaultCreateTrait<T>,
typename InitOnceTrait = SingleThreadInitOnceTrait,
typename DestroyTrait = LeakyInstanceTrait<T> >
struct LazyDynamicInstance {
diff --git a/deps/v8/src/list-inl.h b/deps/v8/src/list-inl.h
index 7c2c83f0f7..60a033df04 100644
--- a/deps/v8/src/list-inl.h
+++ b/deps/v8/src/list-inl.h
@@ -35,25 +35,25 @@ namespace internal {
template<typename T, class P>
-void List<T, P>::Add(const T& element) {
+void List<T, P>::Add(const T& element, P alloc) {
if (length_ < capacity_) {
data_[length_++] = element;
} else {
- List<T, P>::ResizeAdd(element);
+ List<T, P>::ResizeAdd(element, alloc);
}
}
template<typename T, class P>
-void List<T, P>::AddAll(const List<T, P>& other) {
- AddAll(other.ToVector());
+void List<T, P>::AddAll(const List<T, P>& other, P alloc) {
+ AddAll(other.ToVector(), alloc);
}
template<typename T, class P>
-void List<T, P>::AddAll(const Vector<T>& other) {
+void List<T, P>::AddAll(const Vector<T>& other, P alloc) {
int result_length = length_ + other.length();
- if (capacity_ < result_length) Resize(result_length);
+ if (capacity_ < result_length) Resize(result_length, alloc);
for (int i = 0; i < other.length(); i++) {
data_[length_ + i] = other.at(i);
}
@@ -64,13 +64,13 @@ void List<T, P>::AddAll(const Vector<T>& other) {
// Use two layers of inlining so that the non-inlined function can
// use the same implementation as the inlined version.
template<typename T, class P>
-void List<T, P>::ResizeAdd(const T& element) {
- ResizeAddInternal(element);
+void List<T, P>::ResizeAdd(const T& element, P alloc) {
+ ResizeAddInternal(element, alloc);
}
template<typename T, class P>
-void List<T, P>::ResizeAddInternal(const T& element) {
+void List<T, P>::ResizeAddInternal(const T& element, P alloc) {
ASSERT(length_ >= capacity_);
// Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case).
@@ -78,14 +78,14 @@ void List<T, P>::ResizeAddInternal(const T& element) {
// Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing.
T temp = element;
- Resize(new_capacity);
+ Resize(new_capacity, alloc);
data_[length_++] = temp;
}
template<typename T, class P>
-void List<T, P>::Resize(int new_capacity) {
- T* new_data = List<T, P>::NewData(new_capacity);
+void List<T, P>::Resize(int new_capacity, P alloc) {
+ T* new_data = NewData(new_capacity, alloc);
memcpy(new_data, data_, capacity_ * sizeof(T));
List<T, P>::DeleteData(data_);
data_ = new_data;
@@ -94,17 +94,17 @@ void List<T, P>::Resize(int new_capacity) {
template<typename T, class P>
-Vector<T> List<T, P>::AddBlock(T value, int count) {
+Vector<T> List<T, P>::AddBlock(T value, int count, P alloc) {
int start = length_;
- for (int i = 0; i < count; i++) Add(value);
+ for (int i = 0; i < count; i++) Add(value, alloc);
return Vector<T>(&data_[start], count);
}
template<typename T, class P>
-void List<T, P>::InsertAt(int index, const T& elm) {
+void List<T, P>::InsertAt(int index, const T& elm, P alloc) {
ASSERT(index >= 0 && index <= length_);
- Add(elm);
+ Add(elm, alloc);
for (int i = length_ - 1; i > index; --i) {
data_[i] = data_[i - 1];
}
@@ -137,9 +137,21 @@ bool List<T, P>::RemoveElement(const T& elm) {
template<typename T, class P>
+void List<T, P>::Allocate(int length, P allocator) {
+ DeleteData(data_);
+ Initialize(length, allocator);
+ length_ = length;
+}
+
+
+template<typename T, class P>
void List<T, P>::Clear() {
DeleteData(data_);
- Initialize(0);
+ // We don't call Initialize(0) since that requires passing a Zone,
+ // which we don't really need.
+ data_ = NULL;
+ capacity_ = 0;
+ length_ = 0;
}
@@ -199,28 +211,27 @@ void List<T, P>::Sort() {
template<typename T, class P>
-void List<T, P>::Initialize(int capacity) {
+void List<T, P>::Initialize(int capacity, P allocator) {
ASSERT(capacity >= 0);
- data_ = (capacity > 0) ? NewData(capacity) : NULL;
+ data_ = (capacity > 0) ? NewData(capacity, allocator) : NULL;
capacity_ = capacity;
length_ = 0;
}
-template <typename T>
-int SortedListBSearch(
- const List<T>& list, T elem, int (*cmp)(const T* x, const T* y)) {
+template <typename T, typename P>
+int SortedListBSearch(const List<T>& list, P cmp) {
int low = 0;
int high = list.length() - 1;
while (low <= high) {
int mid = (low + high) / 2;
T mid_elem = list[mid];
- if (cmp(&mid_elem, &elem) > 0) {
+ if (cmp(&mid_elem) > 0) {
high = mid - 1;
continue;
}
- if (cmp(&mid_elem, &elem) < 0) {
+ if (cmp(&mid_elem) < 0) {
low = mid + 1;
continue;
}
@@ -231,9 +242,21 @@ int SortedListBSearch(
}
+template<typename T>
+class ElementCmp {
+ public:
+ explicit ElementCmp(T e) : elem_(e) {}
+ int operator()(const T* other) {
+ return PointerValueCompare(other, &elem_);
+ }
+ private:
+ T elem_;
+};
+
+
template <typename T>
int SortedListBSearch(const List<T>& list, T elem) {
- return SortedListBSearch<T>(list, elem, PointerValueCompare<T>);
+ return SortedListBSearch<T, ElementCmp<T> > (list, ElementCmp<T>(elem));
}
diff --git a/deps/v8/src/list.h b/deps/v8/src/list.h
index adddea41f0..3ca4a3fba8 100644
--- a/deps/v8/src/list.h
+++ b/deps/v8/src/list.h
@@ -45,12 +45,18 @@ namespace internal {
// the C free store or the zone; see zone.h.
// Forward defined as
-// template <typename T, class P = FreeStoreAllocationPolicy> class List;
-template <typename T, class P>
+// template <typename T,
+// class AllocationPolicy = FreeStoreAllocationPolicy> class List;
+template <typename T, class AllocationPolicy>
class List {
public:
- List() { Initialize(0); }
- INLINE(explicit List(int capacity)) { Initialize(capacity); }
+ explicit List(AllocationPolicy allocator = AllocationPolicy()) {
+ Initialize(0, allocator);
+ }
+ INLINE(explicit List(int capacity,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ Initialize(capacity, allocator);
+ }
INLINE(~List()) { DeleteData(data_); }
// Deallocates memory used by the list and leaves the list in a consistent
@@ -60,10 +66,13 @@ class List {
Initialize(0);
}
- INLINE(void* operator new(size_t size)) {
- return P::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ return allocator.New(static_cast<int>(size));
+ }
+ INLINE(void operator delete(void* p)) {
+ AllocationPolicy::Delete(p);
}
- INLINE(void operator delete(void* p, size_t)) { return P::Delete(p); }
// Returns a reference to the element at index i. This reference is
// not safe to use after operations that can change the list's
@@ -87,21 +96,25 @@ class List {
// Adds a copy of the given 'element' to the end of the list,
// expanding the list if necessary.
- void Add(const T& element);
+ void Add(const T& element, AllocationPolicy allocator = AllocationPolicy());
// Add all the elements from the argument list to this list.
- void AddAll(const List<T, P>& other);
+ void AddAll(const List<T, AllocationPolicy>& other,
+ AllocationPolicy allocator = AllocationPolicy());
// Add all the elements from the vector to this list.
- void AddAll(const Vector<T>& other);
+ void AddAll(const Vector<T>& other,
+ AllocationPolicy allocator = AllocationPolicy());
// Inserts the element at the specific index.
- void InsertAt(int index, const T& element);
+ void InsertAt(int index, const T& element,
+ AllocationPolicy allocator = AllocationPolicy());
// Added 'count' elements with the value 'value' and returns a
// vector that allows access to the elements. The vector is valid
// until the next change is made to this list.
- Vector<T> AddBlock(T value, int count);
+ Vector<T> AddBlock(T value, int count,
+ AllocationPolicy allocator = AllocationPolicy());
// Removes the i'th element without deleting it even if T is a
// pointer type; moves all elements above i "down". Returns the
@@ -117,6 +130,10 @@ class List {
// pointer type. Returns the removed element.
INLINE(T RemoveLast()) { return Remove(length_ - 1); }
+ // Deletes current list contents and allocates space for 'length' elements.
+ INLINE(void Allocate(int length,
+ AllocationPolicy allocator = AllocationPolicy()));
+
// Clears the list by setting the length to zero. Even if T is a
// pointer type, clearing the list doesn't delete the entries.
INLINE(void Clear());
@@ -139,26 +156,31 @@ class List {
void Sort(int (*cmp)(const T* x, const T* y));
void Sort();
- INLINE(void Initialize(int capacity));
+ INLINE(void Initialize(int capacity,
+ AllocationPolicy allocator = AllocationPolicy()));
private:
T* data_;
int capacity_;
int length_;
- INLINE(T* NewData(int n)) { return static_cast<T*>(P::New(n * sizeof(T))); }
- INLINE(void DeleteData(T* data)) { P::Delete(data); }
+ INLINE(T* NewData(int n, AllocationPolicy allocator)) {
+ return static_cast<T*>(allocator.New(n * sizeof(T)));
+ }
+ INLINE(void DeleteData(T* data)) {
+ AllocationPolicy::Delete(data);
+ }
// Increase the capacity of a full list, and add an element.
// List must be full already.
- void ResizeAdd(const T& element);
+ void ResizeAdd(const T& element, AllocationPolicy allocator);
// Inlined implementation of ResizeAdd, shared by inlined and
// non-inlined versions of ResizeAdd.
- void ResizeAddInternal(const T& element);
+ void ResizeAddInternal(const T& element, AllocationPolicy allocator);
// Resize the list.
- void Resize(int new_capacity);
+ void Resize(int new_capacity, AllocationPolicy allocator);
DISALLOW_COPY_AND_ASSIGN(List);
};
@@ -173,9 +195,11 @@ typedef List<Handle<Code> > CodeHandleList;
// Perform binary search for an element in an already sorted
// list. Returns the index of the element of -1 if it was not found.
-template <typename T>
-int SortedListBSearch(
- const List<T>& list, T elem, int (*cmp)(const T* x, const T* y));
+// |cmp| is a predicate that takes a pointer to an element of the List
+// and returns +1 if it is greater, -1 if it is less than the element
+// being searched.
+template <typename T, class P>
+int SortedListBSearch(const List<T>& list, P cmp);
template <typename T>
int SortedListBSearch(const List<T>& list, T elem);
diff --git a/deps/v8/src/lithium-allocator.cc b/deps/v8/src/lithium-allocator.cc
index 4396c7354c..bbc405ba0b 100644
--- a/deps/v8/src/lithium-allocator.cc
+++ b/deps/v8/src/lithium-allocator.cc
@@ -230,9 +230,9 @@ LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
if (HasRegisterAssigned()) {
ASSERT(!IsSpilled());
if (IsDouble()) {
- op = LDoubleRegister::Create(assigned_register());
+ op = LDoubleRegister::Create(assigned_register(), zone);
} else {
- op = LRegister::Create(assigned_register());
+ op = LRegister::Create(assigned_register(), zone);
}
} else if (IsSpilled()) {
ASSERT(!HasRegisterAssigned());
@@ -533,14 +533,14 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
LAllocator::LAllocator(int num_values, HGraph* graph)
: zone_(graph->zone()),
chunk_(NULL),
- live_in_sets_(graph->blocks()->length()),
- live_ranges_(num_values * 2),
+ live_in_sets_(graph->blocks()->length(), zone_),
+ live_ranges_(num_values * 2, zone_),
fixed_live_ranges_(NULL),
fixed_double_live_ranges_(NULL),
- unhandled_live_ranges_(num_values * 2),
- active_live_ranges_(8),
- inactive_live_ranges_(8),
- reusable_slots_(8),
+ unhandled_live_ranges_(num_values * 2, zone_),
+ active_live_ranges_(8, zone_),
+ inactive_live_ranges_(8, zone_),
+ reusable_slots_(8, zone_),
next_virtual_register_(num_values),
first_artificial_register_(num_values),
mode_(GENERAL_REGISTERS),
@@ -553,8 +553,8 @@ LAllocator::LAllocator(int num_values, HGraph* graph)
void LAllocator::InitializeLivenessAnalysis() {
// Initialize the live_in sets for each block to NULL.
int block_count = graph_->blocks()->length();
- live_in_sets_.Initialize(block_count);
- live_in_sets_.AddBlock(NULL, block_count);
+ live_in_sets_.Initialize(block_count, zone());
+ live_in_sets_.AddBlock(NULL, block_count, zone());
}
@@ -630,7 +630,7 @@ LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
TraceAlloc("Fixed reg is tagged at %d\n", pos);
LInstruction* instr = InstructionAt(pos);
if (instr->HasPointerMap()) {
- instr->pointer_map()->RecordPointer(operand);
+ instr->pointer_map()->RecordPointer(operand, zone());
}
}
return operand;
@@ -665,7 +665,7 @@ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
LiveRange* LAllocator::LiveRangeFor(int index) {
if (index >= live_ranges_.length()) {
- live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
+ live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
}
LiveRange* result = live_ranges_[index];
if (result == NULL) {
@@ -746,7 +746,7 @@ void LAllocator::AddConstraintsGapMove(int index,
LOperand* from,
LOperand* to) {
LGap* gap = GapAt(index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
if (from->IsUnallocated()) {
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
for (int i = 0; i < move_operands->length(); ++i) {
@@ -755,13 +755,13 @@ void LAllocator::AddConstraintsGapMove(int index,
if (cur_to->IsUnallocated()) {
if (LUnallocated::cast(cur_to)->virtual_register() ==
LUnallocated::cast(from)->virtual_register()) {
- move->AddMove(cur.source(), to);
+ move->AddMove(cur.source(), to, zone());
return;
}
}
}
}
- move->AddMove(from, to);
+ move->AddMove(from, to, zone());
}
@@ -800,7 +800,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
LiveRange* range = LiveRangeFor(first_output->virtual_register());
bool assigned = false;
if (first_output->HasFixedPolicy()) {
- LUnallocated* output_copy = first_output->CopyUnconstrained();
+ LUnallocated* output_copy = first_output->CopyUnconstrained(zone());
bool is_tagged = HasTaggedValue(first_output->virtual_register());
AllocateFixed(first_output, gap_index, is_tagged);
@@ -821,8 +821,8 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
// Thus it should be inserted to a lifetime position corresponding to
// the instruction end.
LGap* gap = GapAt(gap_index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
- move->AddMove(first_output, range->GetSpillOperand());
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone());
+ move->AddMove(first_output, range->GetSpillOperand(), zone());
}
}
@@ -831,7 +831,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
for (UseIterator it(second); !it.Done(); it.Advance()) {
LUnallocated* cur_input = LUnallocated::cast(it.Current());
if (cur_input->HasFixedPolicy()) {
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
bool is_tagged = HasTaggedValue(cur_input->virtual_register());
AllocateFixed(cur_input, gap_index + 1, is_tagged);
AddConstraintsGapMove(gap_index, input_copy, cur_input);
@@ -840,7 +840,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
// of the instruction.
ASSERT(!cur_input->IsUsedAtStart());
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
cur_input->set_virtual_register(GetVirtualRegister());
if (!AllocationOk()) return;
@@ -864,7 +864,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
int output_vreg = second_output->virtual_register();
int input_vreg = cur_input->virtual_register();
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
+ LUnallocated* input_copy = cur_input->CopyUnconstrained(zone());
cur_input->set_virtual_register(second_output->virtual_register());
AddConstraintsGapMove(gap_index, input_copy, cur_input);
@@ -872,7 +872,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
int index = gap_index + 1;
LInstruction* instr = InstructionAt(index);
if (instr->HasPointerMap()) {
- instr->pointer_map()->RecordPointer(input_copy);
+ instr->pointer_map()->RecordPointer(input_copy, zone());
}
} else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
// The input is assumed to immediately have a tagged representation,
@@ -901,7 +901,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
if (IsGapAt(index)) {
// We have a gap at this position.
LGap* gap = GapAt(index);
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
const ZoneList<LMoveOperands>* move_operands = move->move_operands();
for (int i = 0; i < move_operands->length(); ++i) {
LMoveOperands* cur = &move_operands->at(i);
@@ -958,7 +958,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
}
}
- if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
+ if (instr->IsMarkedAsCall()) {
for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
if (output == NULL || !output->IsDoubleRegister() ||
output->index() != i) {
@@ -1046,17 +1046,17 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
InstructionAt(cur_block->last_instruction_index());
if (branch->HasPointerMap()) {
if (phi->representation().IsTagged()) {
- branch->pointer_map()->RecordPointer(phi_operand);
+ branch->pointer_map()->RecordPointer(phi_operand, zone());
} else if (!phi->representation().IsDouble()) {
- branch->pointer_map()->RecordUntagged(phi_operand);
+ branch->pointer_map()->RecordUntagged(phi_operand, zone());
}
}
}
LiveRange* live_range = LiveRangeFor(phi->id());
LLabel* label = chunk_->GetLabel(phi->block()->block_id());
- label->GetOrCreateParallelMove(LGap::START)->
- AddMove(phi_operand, live_range->GetSpillOperand());
+ label->GetOrCreateParallelMove(LGap::START, zone())->
+ AddMove(phi_operand, live_range->GetSpillOperand(), zone());
live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
}
}
@@ -1151,14 +1151,15 @@ void LAllocator::ResolveControlFlow(LiveRange* range,
LInstruction* branch = InstructionAt(pred->last_instruction_index());
if (branch->HasPointerMap()) {
if (HasTaggedValue(range->id())) {
- branch->pointer_map()->RecordPointer(cur_op);
+ branch->pointer_map()->RecordPointer(cur_op, zone());
} else if (!cur_op->IsDoubleStackSlot() &&
!cur_op->IsDoubleRegister()) {
branch->pointer_map()->RemovePointer(cur_op);
}
}
}
- gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
+ gap->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(pred_op, cur_op, zone());
}
}
}
@@ -1169,11 +1170,11 @@ LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
if (IsGapAt(index)) {
LGap* gap = GapAt(index);
return gap->GetOrCreateParallelMove(
- pos.IsInstructionStart() ? LGap::START : LGap::END);
+ pos.IsInstructionStart() ? LGap::START : LGap::END, zone());
}
int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
return GapAt(gap_pos)->GetOrCreateParallelMove(
- (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
+ (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone());
}
@@ -1205,7 +1206,7 @@ void LAllocator::ConnectRanges() {
LParallelMove* move = GetConnectingParallelMove(pos);
LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
- move->AddMove(prev_operand, cur_operand);
+ move->AddMove(prev_operand, cur_operand, zone());
}
}
}
@@ -1270,7 +1271,7 @@ void LAllocator::BuildLiveRanges() {
LOperand* hint = NULL;
LOperand* phi_operand = NULL;
LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone());
for (int j = 0; j < move->move_operands()->length(); ++j) {
LOperand* to = move->move_operands()->at(j).destination();
if (to->IsUnallocated() &&
@@ -1421,7 +1422,7 @@ void LAllocator::PopulatePointerMaps() {
safe_point >= range->spill_start_index()) {
TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
range->id(), range->spill_start_index(), safe_point);
- map->RecordPointer(range->GetSpillOperand());
+ map->RecordPointer(range->GetSpillOperand(), zone());
}
if (!cur->IsSpilled()) {
@@ -1430,7 +1431,7 @@ void LAllocator::PopulatePointerMaps() {
cur->id(), cur->Start().Value(), safe_point);
LOperand* operand = cur->CreateAssignedOperand(zone_);
ASSERT(!operand->IsStackSlot());
- map->RecordPointer(operand);
+ map->RecordPointer(operand, zone());
}
}
}
@@ -1632,13 +1633,13 @@ RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
void LAllocator::AddToActive(LiveRange* range) {
TraceAlloc("Add live range %d to active\n", range->id());
- active_live_ranges_.Add(range);
+ active_live_ranges_.Add(range, zone());
}
void LAllocator::AddToInactive(LiveRange* range) {
TraceAlloc("Add live range %d to inactive\n", range->id());
- inactive_live_ranges_.Add(range);
+ inactive_live_ranges_.Add(range, zone());
}
@@ -1649,13 +1650,13 @@ void LAllocator::AddToUnhandledSorted(LiveRange* range) {
LiveRange* cur_range = unhandled_live_ranges_.at(i);
if (range->ShouldBeAllocatedBefore(cur_range)) {
TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
- unhandled_live_ranges_.InsertAt(i + 1, range);
+ unhandled_live_ranges_.InsertAt(i + 1, range, zone());
ASSERT(UnhandledIsSorted());
return;
}
}
TraceAlloc("Add live range %d to unhandled at start\n", range->id());
- unhandled_live_ranges_.InsertAt(0, range);
+ unhandled_live_ranges_.InsertAt(0, range, zone());
ASSERT(UnhandledIsSorted());
}
@@ -1664,7 +1665,7 @@ void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
if (range == NULL || range->IsEmpty()) return;
ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
- unhandled_live_ranges_.Add(range);
+ unhandled_live_ranges_.Add(range, zone());
}
@@ -1705,7 +1706,7 @@ void LAllocator::FreeSpillSlot(LiveRange* range) {
int index = range->TopLevel()->GetSpillOperand()->index();
if (index >= 0) {
- reusable_slots_.Add(range);
+ reusable_slots_.Add(range, zone());
}
}
@@ -1733,7 +1734,7 @@ void LAllocator::ActiveToHandled(LiveRange* range) {
void LAllocator::ActiveToInactive(LiveRange* range) {
ASSERT(active_live_ranges_.Contains(range));
active_live_ranges_.RemoveElement(range);
- inactive_live_ranges_.Add(range);
+ inactive_live_ranges_.Add(range, zone());
TraceAlloc("Moving live range %d from active to inactive\n", range->id());
}
@@ -1749,7 +1750,7 @@ void LAllocator::InactiveToHandled(LiveRange* range) {
void LAllocator::InactiveToActive(LiveRange* range) {
ASSERT(inactive_live_ranges_.Contains(range));
inactive_live_ranges_.RemoveElement(range);
- active_live_ranges_.Add(range);
+ active_live_ranges_.Add(range, zone());
TraceAlloc("Moving live range %d from inactive to active\n", range->id());
}
diff --git a/deps/v8/src/lithium-allocator.h b/deps/v8/src/lithium-allocator.h
index f5ab055ab3..d47e33595a 100644
--- a/deps/v8/src/lithium-allocator.h
+++ b/deps/v8/src/lithium-allocator.h
@@ -457,6 +457,7 @@ class LAllocator BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
int GetVirtualRegister() {
if (next_virtual_register_ > LUnallocated::kMaxVirtualRegisters) {
diff --git a/deps/v8/src/lithium.cc b/deps/v8/src/lithium.cc
index aefd8b6492..fd8b7965f1 100644
--- a/deps/v8/src/lithium.cc
+++ b/deps/v8/src/lithium.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -95,31 +95,37 @@ void LOperand::PrintTo(StringStream* stream) {
}
#define DEFINE_OPERAND_CACHE(name, type) \
- name* name::cache = NULL; \
- void name::SetUpCache() { \
+ L##name* L##name::cache = NULL; \
+ \
+ void L##name::SetUpCache() { \
if (cache) return; \
- cache = new name[kNumCachedOperands]; \
+ cache = new L##name[kNumCachedOperands]; \
for (int i = 0; i < kNumCachedOperands; i++) { \
cache[i].ConvertTo(type, i); \
} \
} \
+ \
+ void L##name::TearDownCache() { \
+ delete[] cache; \
+ }
-DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
-DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
-DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
-DEFINE_OPERAND_CACHE(LRegister, REGISTER)
-DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
-
+LITHIUM_OPERAND_LIST(DEFINE_OPERAND_CACHE)
#undef DEFINE_OPERAND_CACHE
void LOperand::SetUpCaches() {
- LConstantOperand::SetUpCache();
- LStackSlot::SetUpCache();
- LDoubleStackSlot::SetUpCache();
- LRegister::SetUpCache();
- LDoubleRegister::SetUpCache();
+#define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache();
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
+#undef LITHIUM_OPERAND_SETUP
+}
+
+
+void LOperand::TearDownCaches() {
+#define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache();
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
+#undef LITHIUM_OPERAND_TEARDOWN
}
+
bool LParallelMove::IsRedundant() const {
for (int i = 0; i < move_operands_.length(); ++i) {
if (!move_operands_[i].IsRedundant()) return false;
@@ -165,11 +171,11 @@ void LEnvironment::PrintTo(StringStream* stream) {
}
-void LPointerMap::RecordPointer(LOperand* op) {
+void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
- pointer_operands_.Add(op);
+ pointer_operands_.Add(op, zone);
}
@@ -186,11 +192,11 @@ void LPointerMap::RemovePointer(LOperand* op) {
}
-void LPointerMap::RecordUntagged(LOperand* op) {
+void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
// Do not record arguments as pointers.
if (op->IsStackSlot() && op->index() < 0) return;
ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
- untagged_operands_.Add(op);
+ untagged_operands_.Add(op, zone);
}
@@ -219,9 +225,12 @@ int ElementsKindToShiftSize(ElementsKind elements_kind) {
return 2;
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
return 3;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
return kPointerSizeLog2;
diff --git a/deps/v8/src/lithium.h b/deps/v8/src/lithium.h
index d1e2e3cdef..1f42b686a7 100644
--- a/deps/v8/src/lithium.h
+++ b/deps/v8/src/lithium.h
@@ -35,6 +35,14 @@
namespace v8 {
namespace internal {
+#define LITHIUM_OPERAND_LIST(V) \
+ V(ConstantOperand, CONSTANT_OPERAND) \
+ V(StackSlot, STACK_SLOT) \
+ V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
+ V(Register, REGISTER) \
+ V(DoubleRegister, DOUBLE_REGISTER)
+
+
class LOperand: public ZoneObject {
public:
enum Kind {
@@ -52,14 +60,13 @@ class LOperand: public ZoneObject {
Kind kind() const { return KindField::decode(value_); }
int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
- bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
- bool IsStackSlot() const { return kind() == STACK_SLOT; }
- bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
- bool IsRegister() const { return kind() == REGISTER; }
- bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
- bool IsArgument() const { return kind() == ARGUMENT; }
- bool IsUnallocated() const { return kind() == UNALLOCATED; }
- bool IsIgnored() const { return kind() == INVALID; }
+#define LITHIUM_OPERAND_PREDICATE(name, type) \
+ bool Is##name() const { return kind() == type; }
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
+ LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT)
+ LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
+ LITHIUM_OPERAND_PREDICATE(Ignored, INVALID)
+#undef LITHIUM_OPERAND_PREDICATE
bool Equals(LOperand* other) const { return value_ == other->value_; }
void PrintTo(StringStream* stream);
@@ -69,9 +76,9 @@ class LOperand: public ZoneObject {
ASSERT(this->index() == index);
}
- // Calls SetUpCache() for each subclass. Don't forget to update this method
- // if you add a new LOperand subclass.
+ // Calls SetUpCache()/TearDownCache() for each subclass.
static void SetUpCaches();
+ static void TearDownCaches();
protected:
static const int kKindFieldWidth = 3;
@@ -180,8 +187,8 @@ class LUnallocated: public LOperand {
value_ = VirtualRegisterField::update(value_, id);
}
- LUnallocated* CopyUnconstrained() {
- LUnallocated* result = new LUnallocated(ANY);
+ LUnallocated* CopyUnconstrained(Zone* zone) {
+ LUnallocated* result = new(zone) LUnallocated(ANY);
result->set_virtual_register(virtual_register());
return result;
}
@@ -253,10 +260,10 @@ class LMoveOperands BASE_EMBEDDED {
class LConstantOperand: public LOperand {
public:
- static LConstantOperand* Create(int index) {
+ static LConstantOperand* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LConstantOperand(index);
+ return new(zone) LConstantOperand(index);
}
static LConstantOperand* cast(LOperand* op) {
@@ -265,6 +272,7 @@ class LConstantOperand: public LOperand {
}
static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
@@ -288,10 +296,10 @@ class LArgument: public LOperand {
class LStackSlot: public LOperand {
public:
- static LStackSlot* Create(int index) {
+ static LStackSlot* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LStackSlot(index);
+ return new(zone) LStackSlot(index);
}
static LStackSlot* cast(LOperand* op) {
@@ -300,6 +308,7 @@ class LStackSlot: public LOperand {
}
static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
@@ -312,10 +321,10 @@ class LStackSlot: public LOperand {
class LDoubleStackSlot: public LOperand {
public:
- static LDoubleStackSlot* Create(int index) {
+ static LDoubleStackSlot* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LDoubleStackSlot(index);
+ return new(zone) LDoubleStackSlot(index);
}
static LDoubleStackSlot* cast(LOperand* op) {
@@ -324,6 +333,7 @@ class LDoubleStackSlot: public LOperand {
}
static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
@@ -336,10 +346,10 @@ class LDoubleStackSlot: public LOperand {
class LRegister: public LOperand {
public:
- static LRegister* Create(int index) {
+ static LRegister* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LRegister(index);
+ return new(zone) LRegister(index);
}
static LRegister* cast(LOperand* op) {
@@ -348,6 +358,7 @@ class LRegister: public LOperand {
}
static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 16;
@@ -360,10 +371,10 @@ class LRegister: public LOperand {
class LDoubleRegister: public LOperand {
public:
- static LDoubleRegister* Create(int index) {
+ static LDoubleRegister* Create(int index, Zone* zone) {
ASSERT(index >= 0);
if (index < kNumCachedOperands) return &cache[index];
- return new LDoubleRegister(index);
+ return new(zone) LDoubleRegister(index);
}
static LDoubleRegister* cast(LOperand* op) {
@@ -372,6 +383,7 @@ class LDoubleRegister: public LOperand {
}
static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 16;
@@ -384,10 +396,10 @@ class LDoubleRegister: public LOperand {
class LParallelMove : public ZoneObject {
public:
- LParallelMove() : move_operands_(4) { }
+ explicit LParallelMove(Zone* zone) : move_operands_(4, zone) { }
- void AddMove(LOperand* from, LOperand* to) {
- move_operands_.Add(LMoveOperands(from, to));
+ void AddMove(LOperand* from, LOperand* to, Zone* zone) {
+ move_operands_.Add(LMoveOperands(from, to), zone);
}
bool IsRedundant() const;
@@ -405,9 +417,9 @@ class LParallelMove : public ZoneObject {
class LPointerMap: public ZoneObject {
public:
- explicit LPointerMap(int position)
- : pointer_operands_(8),
- untagged_operands_(0),
+ explicit LPointerMap(int position, Zone* zone)
+ : pointer_operands_(8, zone),
+ untagged_operands_(0, zone),
position_(position),
lithium_position_(-1) { }
@@ -426,9 +438,9 @@ class LPointerMap: public ZoneObject {
lithium_position_ = pos;
}
- void RecordPointer(LOperand* op);
+ void RecordPointer(LOperand* op, Zone* zone);
void RemovePointer(LOperand* op);
- void RecordUntagged(LOperand* op);
+ void RecordUntagged(LOperand* op, Zone* zone);
void PrintTo(StringStream* stream);
private:
@@ -447,7 +459,8 @@ class LEnvironment: public ZoneObject {
int parameter_count,
int argument_count,
int value_count,
- LEnvironment* outer)
+ LEnvironment* outer,
+ Zone* zone)
: closure_(closure),
frame_type_(frame_type),
arguments_stack_height_(argument_count),
@@ -456,11 +469,12 @@ class LEnvironment: public ZoneObject {
ast_id_(ast_id),
parameter_count_(parameter_count),
pc_offset_(-1),
- values_(value_count),
+ values_(value_count, zone),
is_tagged_(value_count, closure->GetHeap()->isolate()->zone()),
spilled_registers_(NULL),
spilled_double_registers_(NULL),
- outer_(outer) { }
+ outer_(outer),
+ zone_(zone) { }
Handle<JSFunction> closure() const { return closure_; }
FrameType frame_type() const { return frame_type_; }
@@ -478,7 +492,7 @@ class LEnvironment: public ZoneObject {
LEnvironment* outer() const { return outer_; }
void AddValue(LOperand* operand, Representation representation) {
- values_.Add(operand);
+ values_.Add(operand, zone());
if (representation.IsTagged()) {
is_tagged_.Add(values_.length() - 1);
}
@@ -508,6 +522,8 @@ class LEnvironment: public ZoneObject {
void PrintTo(StringStream* stream);
+ Zone* zone() const { return zone_; }
+
private:
Handle<JSFunction> closure_;
FrameType frame_type_;
@@ -527,6 +543,8 @@ class LEnvironment: public ZoneObject {
LOperand** spilled_double_registers_;
LEnvironment* outer_;
+
+ Zone* zone_;
};
diff --git a/deps/v8/src/liveedit-debugger.js b/deps/v8/src/liveedit-debugger.js
index abfb0f69c6..4463c93e2a 100644
--- a/deps/v8/src/liveedit-debugger.js
+++ b/deps/v8/src/liveedit-debugger.js
@@ -159,6 +159,11 @@ Debug.LiveEdit = new function() {
preview_description.stack_modified = dropped_functions_number != 0;
+ // Our current implementation requires client to manually issue "step in"
+ // command for correct stack state.
+ preview_description.stack_update_needs_step_in =
+ preview_description.stack_modified;
+
// Start with breakpoints. Convert their line/column positions and
// temporary remove.
var break_points_restorer = TemporaryRemoveBreakPoints(script, change_log);
diff --git a/deps/v8/src/liveedit.cc b/deps/v8/src/liveedit.cc
index 9c5294a26d..e670b442b6 100644
--- a/deps/v8/src/liveedit.cc
+++ b/deps/v8/src/liveedit.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,6 +30,7 @@
#include "liveedit.h"
+#include "code-stubs.h"
#include "compilation-cache.h"
#include "compiler.h"
#include "debug.h"
@@ -824,7 +825,8 @@ class FunctionInfoListener {
// Saves full information about a function: its code, its scope info
// and a SharedFunctionInfo object.
- void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope) {
+ void FunctionInfo(Handle<SharedFunctionInfo> shared, Scope* scope,
+ Zone* zone) {
if (!shared->IsSharedFunctionInfo()) {
return;
}
@@ -835,14 +837,14 @@ class FunctionInfoListener {
Handle<Object>(shared->scope_info()));
info.SetSharedFunctionInfo(shared);
- Handle<Object> scope_info_list(SerializeFunctionScope(scope));
+ Handle<Object> scope_info_list(SerializeFunctionScope(scope, zone));
info.SetOuterScopeInfo(scope_info_list);
}
Handle<JSArray> GetResult() { return result_; }
private:
- Object* SerializeFunctionScope(Scope* scope) {
+ Object* SerializeFunctionScope(Scope* scope, Zone* zone) {
HandleScope handle_scope;
Handle<JSArray> scope_info_list = FACTORY->NewJSArray(10);
@@ -856,8 +858,8 @@ class FunctionInfoListener {
return HEAP->undefined_value();
}
do {
- ZoneList<Variable*> stack_list(outer_scope->StackLocalCount());
- ZoneList<Variable*> context_list(outer_scope->ContextLocalCount());
+ ZoneList<Variable*> stack_list(outer_scope->StackLocalCount(), zone);
+ ZoneList<Variable*> context_list(outer_scope->ContextLocalCount(), zone);
outer_scope->CollectStackAndContextLocals(&stack_list, &context_list);
context_list.Sort(&Variable::CompareIndex);
@@ -926,28 +928,32 @@ void LiveEdit::WrapSharedFunctionInfos(Handle<JSArray> array) {
// It works in context of ZoneScope.
class ReferenceCollectorVisitor : public ObjectVisitor {
public:
- explicit ReferenceCollectorVisitor(Code* original)
- : original_(original), rvalues_(10), reloc_infos_(10), code_entries_(10) {
+ ReferenceCollectorVisitor(Code* original, Zone* zone)
+ : original_(original),
+ rvalues_(10, zone),
+ reloc_infos_(10, zone),
+ code_entries_(10, zone),
+ zone_(zone) {
}
virtual void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) {
if (*p == original_) {
- rvalues_.Add(p);
+ rvalues_.Add(p, zone_);
}
}
}
virtual void VisitCodeEntry(Address entry) {
if (Code::GetObjectFromEntryAddress(entry) == original_) {
- code_entries_.Add(entry);
+ code_entries_.Add(entry, zone_);
}
}
virtual void VisitCodeTarget(RelocInfo* rinfo) {
if (RelocInfo::IsCodeTarget(rinfo->rmode()) &&
Code::GetCodeFromTargetAddress(rinfo->target_address()) == original_) {
- reloc_infos_.Add(*rinfo);
+ reloc_infos_.Add(*rinfo, zone_);
}
}
@@ -976,6 +982,7 @@ class ReferenceCollectorVisitor : public ObjectVisitor {
ZoneList<Object**> rvalues_;
ZoneList<RelocInfo> reloc_infos_;
ZoneList<Address> code_entries_;
+ Zone* zone_;
};
@@ -989,7 +996,7 @@ static void ReplaceCodeObject(Code* original, Code* substitution) {
// A zone scope for ReferenceCollectorVisitor.
ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
- ReferenceCollectorVisitor visitor(original);
+ ReferenceCollectorVisitor visitor(original, Isolate::Current()->zone());
// Iterate over all roots. Stack frames may have pointer into original code,
// so temporary replace the pointers with offset numbers
@@ -1475,26 +1482,36 @@ static const char* DropFrames(Vector<StackFrame*> frames,
// Check the nature of the top frame.
Isolate* isolate = Isolate::Current();
Code* pre_top_frame_code = pre_top_frame->LookupCode();
+ bool frame_has_padding;
if (pre_top_frame_code->is_inline_cache_stub() &&
pre_top_frame_code->ic_state() == DEBUG_BREAK) {
// OK, we can drop inline cache calls.
*mode = Debug::FRAME_DROPPED_IN_IC_CALL;
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
} else if (pre_top_frame_code ==
isolate->debug()->debug_break_slot()) {
// OK, we can drop debug break slot.
*mode = Debug::FRAME_DROPPED_IN_DEBUG_SLOT_CALL;
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
} else if (pre_top_frame_code ==
isolate->builtins()->builtin(
Builtins::kFrameDropper_LiveEdit)) {
// OK, we can drop our own code.
*mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
+ frame_has_padding = false;
} else if (pre_top_frame_code ==
isolate->builtins()->builtin(Builtins::kReturn_DebugBreak)) {
*mode = Debug::FRAME_DROPPED_IN_RETURN_CALL;
+ frame_has_padding = Debug::FramePaddingLayout::kIsSupported;
} else if (pre_top_frame_code->kind() == Code::STUB &&
- pre_top_frame_code->major_key()) {
- // Entry from our unit tests, it's fine, we support this case.
+ pre_top_frame_code->major_key() == CodeStub::CEntry) {
+ // Entry from our unit tests on 'debugger' statement.
+ // It's fine, we support this case.
*mode = Debug::FRAME_DROPPED_IN_DIRECT_CALL;
+ // We don't have a padding from 'debugger' statement call.
+ // Here the stub is CEntry, it's not debug-only and can't be padded.
+ // If anyone would complain, a proxy padded stub could be added.
+ frame_has_padding = false;
} else {
return "Unknown structure of stack above changing function";
}
@@ -1504,8 +1521,49 @@ static const char* DropFrames(Vector<StackFrame*> frames,
- Debug::kFrameDropperFrameSize * kPointerSize // Size of the new frame.
+ kPointerSize; // Bigger address end is exclusive.
+ Address* top_frame_pc_address = top_frame->pc_address();
+
+ // top_frame may be damaged below this point. Do not used it.
+ ASSERT(!(top_frame = NULL));
+
if (unused_stack_top > unused_stack_bottom) {
- return "Not enough space for frame dropper frame";
+ if (frame_has_padding) {
+ int shortage_bytes =
+ static_cast<int>(unused_stack_top - unused_stack_bottom);
+
+ Address padding_start = pre_top_frame->fp() -
+ Debug::FramePaddingLayout::kFrameBaseSize * kPointerSize;
+
+ Address padding_pointer = padding_start;
+ Smi* padding_object =
+ Smi::FromInt(Debug::FramePaddingLayout::kPaddingValue);
+ while (Memory::Object_at(padding_pointer) == padding_object) {
+ padding_pointer -= kPointerSize;
+ }
+ int padding_counter =
+ Smi::cast(Memory::Object_at(padding_pointer))->value();
+ if (padding_counter * kPointerSize < shortage_bytes) {
+ return "Not enough space for frame dropper frame "
+ "(even with padding frame)";
+ }
+ Memory::Object_at(padding_pointer) =
+ Smi::FromInt(padding_counter - shortage_bytes / kPointerSize);
+
+ StackFrame* pre_pre_frame = frames[top_frame_index - 2];
+
+ memmove(padding_start + kPointerSize - shortage_bytes,
+ padding_start + kPointerSize,
+ Debug::FramePaddingLayout::kFrameBaseSize * kPointerSize);
+
+ pre_top_frame->UpdateFp(pre_top_frame->fp() - shortage_bytes);
+ pre_pre_frame->SetCallerFp(pre_top_frame->fp());
+ unused_stack_top -= shortage_bytes;
+
+ STATIC_ASSERT(sizeof(Address) == kPointerSize);
+ top_frame_pc_address -= shortage_bytes / kPointerSize;
+ } else {
+ return "Not enough space for frame dropper frame";
+ }
}
// Committing now. After this point we should return only NULL value.
@@ -1515,7 +1573,7 @@ static const char* DropFrames(Vector<StackFrame*> frames,
ASSERT(!FixTryCatchHandler(pre_top_frame, bottom_js_frame));
Handle<Code> code = Isolate::Current()->builtins()->FrameDropper_LiveEdit();
- top_frame->set_pc(code->entry());
+ *top_frame_pc_address = code->entry();
pre_top_frame->SetCallerFp(bottom_js_frame->fp());
*restarter_frame_function_pointer =
@@ -1540,11 +1598,12 @@ static bool IsDropableFrame(StackFrame* frame) {
// Fills result array with statuses of functions. Modifies the stack
// removing all listed function if possible and if do_drop is true.
static const char* DropActivationsInActiveThread(
- Handle<JSArray> shared_info_array, Handle<JSArray> result, bool do_drop) {
+ Handle<JSArray> shared_info_array, Handle<JSArray> result, bool do_drop,
+ Zone* zone) {
Isolate* isolate = Isolate::Current();
Debug* debug = isolate->debug();
ZoneScope scope(isolate, DELETE_ON_EXIT);
- Vector<StackFrame*> frames = CreateStackMap();
+ Vector<StackFrame*> frames = CreateStackMap(zone);
int array_len = Smi::cast(shared_info_array->length())->value();
@@ -1671,7 +1730,7 @@ class InactiveThreadActivationsChecker : public ThreadVisitor {
Handle<JSArray> LiveEdit::CheckAndDropActivations(
- Handle<JSArray> shared_info_array, bool do_drop) {
+ Handle<JSArray> shared_info_array, bool do_drop, Zone* zone) {
int len = Smi::cast(shared_info_array->length())->value();
Handle<JSArray> result = FACTORY->NewJSArray(len);
@@ -1696,7 +1755,7 @@ Handle<JSArray> LiveEdit::CheckAndDropActivations(
// Try to drop activations from the current stack.
const char* error_message =
- DropActivationsInActiveThread(shared_info_array, result, do_drop);
+ DropActivationsInActiveThread(shared_info_array, result, do_drop, zone);
if (error_message != NULL) {
// Add error message as an array extra element.
Vector<const char> vector_message(error_message, StrLength(error_message));
@@ -1724,9 +1783,11 @@ LiveEditFunctionTracker::~LiveEditFunctionTracker() {
void LiveEditFunctionTracker::RecordFunctionInfo(
- Handle<SharedFunctionInfo> info, FunctionLiteral* lit) {
+ Handle<SharedFunctionInfo> info, FunctionLiteral* lit,
+ Zone* zone) {
if (isolate_->active_function_info_listener() != NULL) {
- isolate_->active_function_info_listener()->FunctionInfo(info, lit->scope());
+ isolate_->active_function_info_listener()->FunctionInfo(info, lit->scope(),
+ zone);
}
}
diff --git a/deps/v8/src/liveedit.h b/deps/v8/src/liveedit.h
index 4ee4466126..424c24e351 100644
--- a/deps/v8/src/liveedit.h
+++ b/deps/v8/src/liveedit.h
@@ -69,7 +69,7 @@ class LiveEditFunctionTracker {
explicit LiveEditFunctionTracker(Isolate* isolate, FunctionLiteral* fun);
~LiveEditFunctionTracker();
void RecordFunctionInfo(Handle<SharedFunctionInfo> info,
- FunctionLiteral* lit);
+ FunctionLiteral* lit, Zone* zone);
void RecordRootFunctionInfo(Handle<Code> code);
static bool IsActive(Isolate* isolate);
@@ -121,7 +121,7 @@ class LiveEdit : AllStatic {
// has restart the lowest found frames and drops all other frames above
// if possible and if do_drop is true.
static Handle<JSArray> CheckAndDropActivations(
- Handle<JSArray> shared_info_array, bool do_drop);
+ Handle<JSArray> shared_info_array, bool do_drop, Zone* zone);
// A copy of this is in liveedit-debugger.js.
enum FunctionPatchabilityStatus {
diff --git a/deps/v8/src/log.cc b/deps/v8/src/log.cc
index 21d64df21c..d93a9d82b1 100644
--- a/deps/v8/src/log.cc
+++ b/deps/v8/src/log.cc
@@ -1730,13 +1730,20 @@ void Logger::EnableSlidingStateWindow() {
}
// Protects the state below.
-static LazyMutex active_samplers_mutex = LAZY_MUTEX_INITIALIZER;
+static Mutex* active_samplers_mutex = NULL;
List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
+void SamplerRegistry::SetUp() {
+ if (!active_samplers_mutex) {
+ active_samplers_mutex = OS::CreateMutex();
+ }
+}
+
+
bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
- ScopedLock lock(active_samplers_mutex.Pointer());
+ ScopedLock lock(active_samplers_mutex);
for (int i = 0;
ActiveSamplersExist() && i < active_samplers_->length();
++i) {
@@ -1763,7 +1770,7 @@ SamplerRegistry::State SamplerRegistry::GetState() {
void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
ASSERT(sampler->IsActive());
- ScopedLock lock(active_samplers_mutex.Pointer());
+ ScopedLock lock(active_samplers_mutex);
if (active_samplers_ == NULL) {
active_samplers_ = new List<Sampler*>;
} else {
@@ -1775,7 +1782,7 @@ void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
ASSERT(sampler->IsActive());
- ScopedLock lock(active_samplers_mutex.Pointer());
+ ScopedLock lock(active_samplers_mutex);
ASSERT(active_samplers_ != NULL);
bool removed = active_samplers_->RemoveElement(sampler);
ASSERT(removed);
diff --git a/deps/v8/src/log.h b/deps/v8/src/log.h
index 129738757e..03c7b3b670 100644
--- a/deps/v8/src/log.h
+++ b/deps/v8/src/log.h
@@ -437,6 +437,8 @@ class SamplerRegistry : public AllStatic {
HAS_CPU_PROFILING_SAMPLERS
};
+ static void SetUp();
+
typedef void (*VisitSampler)(Sampler*, void*);
static State GetState();
diff --git a/deps/v8/src/macros.py b/deps/v8/src/macros.py
index 93287ae3d4..08fa82e686 100644
--- a/deps/v8/src/macros.py
+++ b/deps/v8/src/macros.py
@@ -196,6 +196,7 @@ macro SET_UTC_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 1));
macro SET_LOCAL_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 0));
# Last input and last subject of regexp matches.
+const LAST_SUBJECT_INDEX = 1;
macro LAST_SUBJECT(array) = ((array)[1]);
macro LAST_INPUT(array) = ((array)[2]);
@@ -204,6 +205,15 @@ macro CAPTURE(index) = (3 + (index));
const CAPTURE0 = 3;
const CAPTURE1 = 4;
+# For the regexp capture override array. This has the same
+# format as the arguments to a function called from
+# String.prototype.replace.
+macro OVERRIDE_MATCH(override) = ((override)[0]);
+macro OVERRIDE_POS(override) = ((override)[(override).length - 2]);
+macro OVERRIDE_SUBJECT(override) = ((override)[(override).length - 1]);
+# 1-based so index of 1 returns the first capture
+macro OVERRIDE_CAPTURE(override, index) = ((override)[(index)]);
+
# PropertyDescriptor return value indices - must match
# PropertyDescriptorIndices in runtime.cc.
const IS_ACCESSOR_INDEX = 0;
diff --git a/deps/v8/src/mark-compact-inl.h b/deps/v8/src/mark-compact-inl.h
index 43f6b8986f..2f7e31fea5 100644
--- a/deps/v8/src/mark-compact-inl.h
+++ b/deps/v8/src/mark-compact-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -52,6 +52,15 @@ void MarkCompactCollector::SetFlags(int flags) {
}
+bool MarkCompactCollector::MarkObjectAndPush(HeapObject* obj) {
+ if (MarkObjectWithoutPush(obj)) {
+ marking_deque_.PushBlack(obj);
+ return true;
+ }
+ return false;
+}
+
+
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
if (!mark_bit.Get()) {
@@ -62,16 +71,13 @@ void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
}
-bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) {
- MarkBit mark = Marking::MarkBitFrom(object);
- bool old_mark = mark.Get();
- if (!old_mark) SetMark(object, mark);
- return old_mark;
-}
-
-
-void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) {
- if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object);
+bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
+ if (!mark_bit.Get()) {
+ SetMark(obj, mark_bit);
+ return true;
+ }
+ return false;
}
diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc
index b4f488bd95..878c97413b 100644
--- a/deps/v8/src/mark-compact.cc
+++ b/deps/v8/src/mark-compact.cc
@@ -64,13 +64,13 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
abort_incremental_marking_(false),
compacting_(false),
was_marked_incrementally_(false),
- collect_maps_(FLAG_collect_maps),
flush_monomorphic_ics_(false),
tracer_(NULL),
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
- encountered_weak_maps_(NULL) { }
+ encountered_weak_maps_(NULL),
+ marker_(this, this) { }
#ifdef DEBUG
@@ -282,7 +282,7 @@ void MarkCompactCollector::CollectGarbage() {
MarkLiveObjects();
ASSERT(heap_->incremental_marking()->IsStopped());
- if (collect_maps_) ClearNonLiveTransitions();
+ if (FLAG_collect_maps) ClearNonLiveTransitions();
ClearWeakMaps();
@@ -294,9 +294,7 @@ void MarkCompactCollector::CollectGarbage() {
SweepSpaces();
- if (!collect_maps_) ReattachInitialMaps();
-
- heap_->isolate()->inner_pointer_to_code_cache()->Flush();
+ if (!FLAG_collect_maps) ReattachInitialMaps();
Finish();
@@ -337,6 +335,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
ASSERT(Marking::IsWhite(mark_bit));
+ ASSERT_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
}
}
#endif
@@ -373,6 +372,7 @@ void MarkCompactCollector::ClearMarkbits() {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
mark_bit.Clear();
mark_bit.Next().Clear();
+ Page::FromAddress(obj->address())->ResetLiveBytes();
}
}
@@ -658,11 +658,6 @@ void MarkCompactCollector::AbortCompaction() {
void MarkCompactCollector::Prepare(GCTracer* tracer) {
was_marked_incrementally_ = heap()->incremental_marking()->IsMarking();
- // Disable collection of maps if incremental marking is enabled.
- // Map collection algorithm relies on a special map transition tree traversal
- // order which is not implemented for incremental marking.
- collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_;
-
// Monomorphic ICs are preserved when possible, but need to be flushed
// when they might be keeping a Context alive, or when the heap is about
// to be serialized.
@@ -680,7 +675,6 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
ASSERT(!FLAG_never_compact || !FLAG_always_compact);
- if (collect_maps_) CreateBackPointers();
#ifdef ENABLE_GDB_JIT_INTERFACE
if (FLAG_gdbjit) {
// If GDBJIT interface is active disable compaction.
@@ -1150,9 +1144,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
// Enqueue weak map in linked list of encountered weak maps.
- ASSERT(weak_map->next() == Smi::FromInt(0));
- weak_map->set_next(collector->encountered_weak_maps());
- collector->set_encountered_weak_maps(weak_map);
+ if (weak_map->next() == Smi::FromInt(0)) {
+ weak_map->set_next(collector->encountered_weak_maps());
+ collector->set_encountered_weak_maps(weak_map);
+ }
// Skip visiting the backing hash table containing the mappings.
int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
@@ -1168,9 +1163,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
object_size);
// Mark the backing hash table without pushing it on the marking stack.
- ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
- ASSERT(!MarkCompactCollector::IsMarked(table));
- collector->SetMark(table, Marking::MarkBitFrom(table));
+ Object* table_object = weak_map->table();
+ if (!table_object->IsHashTable()) return;
+ ObjectHashTable* table = ObjectHashTable::cast(table_object);
+ Object** table_slot =
+ HeapObject::RawField(weak_map, JSWeakMap::kTableOffset);
+ MarkBit table_mark = Marking::MarkBitFrom(table);
+ collector->RecordSlot(table_slot, table_slot, table);
+ if (!table_mark.Get()) collector->SetMark(table, table_mark);
+ // Recording the map slot can be skipped, because maps are not compacted.
collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
ASSERT(MarkCompactCollector::IsMarked(table->map()));
}
@@ -1179,16 +1180,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
Heap* heap = map->GetHeap();
Code* code = reinterpret_cast<Code*>(object);
if (FLAG_cleanup_code_caches_at_gc) {
- Object* raw_info = code->type_feedback_info();
- if (raw_info->IsTypeFeedbackInfo()) {
- TypeFeedbackCells* type_feedback_cells =
- TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
- for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
- ASSERT(type_feedback_cells->AstId(i)->IsSmi());
- JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
- cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
- }
- }
+ code->ClearTypeFeedbackCells(heap);
}
code->CodeIterateBody<StaticMarkingVisitor>(heap);
}
@@ -1390,6 +1382,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
static void VisitSharedFunctionInfoAndFlushCode(Map* map,
HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object);
+ if (shared->ic_age() != heap->global_ic_age()) {
+ shared->ResetForNewContext(heap->global_ic_age());
+ }
+
MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
if (!collector->is_code_flushing_enabled()) {
VisitSharedFunctionInfoGeneric(map, object);
@@ -1406,10 +1404,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
- if (shared->ic_age() != heap->global_ic_age()) {
- shared->ResetForNewContext(heap->global_ic_age());
- }
-
if (!known_flush_code_candidate) {
known_flush_code_candidate = IsFlushable(heap, shared);
if (known_flush_code_candidate) {
@@ -1523,12 +1517,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
JSFunction::kCodeEntryOffset + kPointerSize),
HeapObject::RawField(object,
JSFunction::kNonWeakFieldsEndOffset));
-
- // Don't visit the next function list field as it is a weak reference.
- Object** next_function =
- HeapObject::RawField(object, JSFunction::kNextFunctionLinkOffset);
- heap->mark_compact_collector()->RecordSlot(
- next_function, next_function, *next_function);
}
static inline void VisitJSRegExpFields(Map* map,
@@ -1805,11 +1793,11 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's transitions
- // in a special way to make transition links weak.
- // Only maps for subclasses of JSReceiver can have transitions.
+ // in a special way to make transition links weak. Only maps for subclasses
+ // of JSReceiver can have transitions.
STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (collect_maps_ && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- MarkMapContents(map);
+ if (FLAG_collect_maps && map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
+ marker_.MarkMapContents(map);
} else {
marking_deque_.PushBlack(map);
}
@@ -1819,79 +1807,113 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
}
-void MarkCompactCollector::MarkMapContents(Map* map) {
+// Force instantiation of template instances.
+template void Marker<IncrementalMarking>::MarkMapContents(Map* map);
+template void Marker<MarkCompactCollector>::MarkMapContents(Map* map);
+
+
+template <class T>
+void Marker<T>::MarkMapContents(Map* map) {
// Mark prototype transitions array but don't push it into marking stack.
// This will make references from it weak. We will clean dead prototype
// transitions in ClearNonLiveTransitions.
- FixedArray* prototype_transitions = map->prototype_transitions();
- MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
- if (!mark.Get()) {
- mark.Set();
- MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
- prototype_transitions->Size());
+ Object** proto_trans_slot =
+ HeapObject::RawField(map, Map::kPrototypeTransitionsOrBackPointerOffset);
+ HeapObject* prototype_transitions = HeapObject::cast(*proto_trans_slot);
+ if (prototype_transitions->IsFixedArray()) {
+ mark_compact_collector()->RecordSlot(proto_trans_slot,
+ proto_trans_slot,
+ prototype_transitions);
+ MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
+ if (!mark.Get()) {
+ mark.Set();
+ MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
+ prototype_transitions->Size());
+ }
}
- Object** raw_descriptor_array_slot =
+ // Make sure that the back pointer stored either in the map itself or inside
+ // its prototype transitions array is marked. Treat pointers in the descriptor
+ // array as weak and also mark that array to prevent visiting it later.
+ base_marker()->MarkObjectAndPush(HeapObject::cast(map->GetBackPointer()));
+
+ Object** descriptor_array_slot =
HeapObject::RawField(map, Map::kInstanceDescriptorsOrBitField3Offset);
- Object* raw_descriptor_array = *raw_descriptor_array_slot;
- if (!raw_descriptor_array->IsSmi()) {
- MarkDescriptorArray(
- reinterpret_cast<DescriptorArray*>(raw_descriptor_array));
+ Object* descriptor_array = *descriptor_array_slot;
+ if (!descriptor_array->IsSmi()) {
+ MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(descriptor_array));
+ }
+
+ // Mark the Object* fields of the Map. Since the descriptor array has been
+ // marked already, it is fine that one of these fields contains a pointer
+ // to it. But make sure to skip back pointer and prototype transitions.
+ STATIC_ASSERT(Map::kPointerFieldsEndOffset ==
+ Map::kPrototypeTransitionsOrBackPointerOffset + kPointerSize);
+ Object** start_slot = HeapObject::RawField(
+ map, Map::kPointerFieldsBeginOffset);
+ Object** end_slot = HeapObject::RawField(
+ map, Map::kPrototypeTransitionsOrBackPointerOffset);
+ for (Object** slot = start_slot; slot < end_slot; slot++) {
+ Object* obj = *slot;
+ if (!obj->NonFailureIsHeapObject()) continue;
+ mark_compact_collector()->RecordSlot(start_slot, slot, obj);
+ base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(obj));
}
-
- // Mark the Object* fields of the Map.
- // Since the descriptor array has been marked already, it is fine
- // that one of these fields contains a pointer to it.
- Object** start_slot = HeapObject::RawField(map,
- Map::kPointerFieldsBeginOffset);
-
- Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset);
-
- StaticMarkingVisitor::VisitPointers(map->GetHeap(), start_slot, end_slot);
}
-void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors,
- int offset) {
- Object** slot = HeapObject::RawField(accessors, offset);
- HeapObject* accessor = HeapObject::cast(*slot);
- if (accessor->IsMap()) return;
- RecordSlot(slot, slot, accessor);
- MarkObjectAndPush(accessor);
-}
+template <class T>
+void Marker<T>::MarkDescriptorArray(DescriptorArray* descriptors) {
+ // Empty descriptor array is marked as a root before any maps are marked.
+ ASSERT(descriptors != descriptors->GetHeap()->empty_descriptor_array());
+
+ if (!base_marker()->MarkObjectWithoutPush(descriptors)) return;
+ Object** descriptor_start = descriptors->data_start();
+
+ // Since the descriptor array itself is not pushed for scanning, all fields
+ // that point to objects manually have to be pushed, marked, and their slots
+ // recorded.
+ if (descriptors->HasEnumCache()) {
+ Object** enum_cache_slot = descriptors->GetEnumCacheSlot();
+ Object* enum_cache = *enum_cache_slot;
+ base_marker()->MarkObjectAndPush(
+ reinterpret_cast<HeapObject*>(enum_cache));
+ mark_compact_collector()->RecordSlot(descriptor_start,
+ enum_cache_slot,
+ enum_cache);
+ }
+
+ // TODO(verwaest) Make sure we free unused transitions.
+ if (descriptors->elements_transition_map() != NULL) {
+ Object** transitions_slot = descriptors->GetTransitionsSlot();
+ Object* transitions = *transitions_slot;
+ base_marker()->MarkObjectAndPush(
+ reinterpret_cast<HeapObject*>(transitions));
+ mark_compact_collector()->RecordSlot(descriptor_start,
+ transitions_slot,
+ transitions);
+ }
+
+ // If the descriptor contains a transition (value is a Map), we don't mark the
+ // value as live. It might be set to the NULL_DESCRIPTOR in
+ // ClearNonLiveTransitions later.
+ for (int i = 0; i < descriptors->number_of_descriptors(); ++i) {
+ Object** key_slot = descriptors->GetKeySlot(i);
+ Object* key = *key_slot;
+ if (key->IsHeapObject()) {
+ base_marker()->MarkObjectAndPush(reinterpret_cast<HeapObject*>(key));
+ mark_compact_collector()->RecordSlot(descriptor_start, key_slot, key);
+ }
+ Object** value_slot = descriptors->GetValueSlot(i);
+ if (!(*value_slot)->IsHeapObject()) continue;
+ HeapObject* value = HeapObject::cast(*value_slot);
-void MarkCompactCollector::MarkDescriptorArray(
- DescriptorArray* descriptors) {
- MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors);
- if (descriptors_mark.Get()) return;
- // Empty descriptor array is marked as a root before any maps are marked.
- ASSERT(descriptors != heap()->empty_descriptor_array());
- SetMark(descriptors, descriptors_mark);
-
- FixedArray* contents = reinterpret_cast<FixedArray*>(
- descriptors->get(DescriptorArray::kContentArrayIndex));
- ASSERT(contents->IsHeapObject());
- ASSERT(!IsMarked(contents));
- ASSERT(contents->IsFixedArray());
- ASSERT(contents->length() >= 2);
- MarkBit contents_mark = Marking::MarkBitFrom(contents);
- SetMark(contents, contents_mark);
- // Contents contains (value, details) pairs. If the details say that the type
- // of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION,
- // EXTERNAL_ARRAY_TRANSITION or NULL_DESCRIPTOR, we don't mark the value as
- // live. Only for MAP_TRANSITION, EXTERNAL_ARRAY_TRANSITION and
- // CONSTANT_TRANSITION is the value an Object* (a Map*).
- for (int i = 0; i < contents->length(); i += 2) {
- // If the pair (value, details) at index i, i+1 is not
- // a transition or null descriptor, mark the value.
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
-
- Object** slot = contents->data_start() + i;
- if (!(*slot)->IsHeapObject()) continue;
- HeapObject* value = HeapObject::cast(*slot);
-
- RecordSlot(slot, slot, *slot);
+ mark_compact_collector()->RecordSlot(descriptor_start,
+ value_slot,
+ value);
+
+ PropertyDetails details(descriptors->GetDetails(i));
switch (details.type()) {
case NORMAL:
@@ -1899,48 +1921,33 @@ void MarkCompactCollector::MarkDescriptorArray(
case CONSTANT_FUNCTION:
case HANDLER:
case INTERCEPTOR:
- MarkObjectAndPush(value);
+ base_marker()->MarkObjectAndPush(value);
break;
case CALLBACKS:
if (!value->IsAccessorPair()) {
- MarkObjectAndPush(value);
- } else if (!MarkObjectWithoutPush(value)) {
- MarkAccessorPairSlot(value, AccessorPair::kGetterOffset);
- MarkAccessorPairSlot(value, AccessorPair::kSetterOffset);
+ base_marker()->MarkObjectAndPush(value);
+ } else if (base_marker()->MarkObjectWithoutPush(value)) {
+ AccessorPair* accessors = AccessorPair::cast(value);
+ MarkAccessorPairSlot(accessors, AccessorPair::kGetterOffset);
+ MarkAccessorPairSlot(accessors, AccessorPair::kSetterOffset);
}
break;
- case ELEMENTS_TRANSITION:
- // For maps with multiple elements transitions, the transition maps are
- // stored in a FixedArray. Keep the fixed array alive but not the maps
- // that it refers to.
- if (value->IsFixedArray()) MarkObjectWithoutPush(value);
- break;
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
break;
}
}
- // The DescriptorArray descriptors contains a pointer to its contents array,
- // but the contents array is already marked.
- marking_deque_.PushBlack(descriptors);
}
-void MarkCompactCollector::CreateBackPointers() {
- HeapObjectIterator iterator(heap()->map_space());
- for (HeapObject* next_object = iterator.Next();
- next_object != NULL; next_object = iterator.Next()) {
- if (next_object->IsMap()) { // Could also be FreeSpace object on free list.
- Map* map = Map::cast(next_object);
- STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
- if (map->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
- map->CreateBackPointers();
- } else {
- ASSERT(map->instance_descriptors() == heap()->empty_descriptor_array());
- }
- }
- }
+template <class T>
+void Marker<T>::MarkAccessorPairSlot(AccessorPair* accessors, int offset) {
+ Object** slot = HeapObject::RawField(accessors, offset);
+ HeapObject* accessor = HeapObject::cast(*slot);
+ if (accessor->IsMap()) return;
+ mark_compact_collector()->RecordSlot(slot, slot, accessor);
+ base_marker()->MarkObjectAndPush(accessor);
}
@@ -1974,6 +1981,7 @@ static inline int MarkWordToObjectStarts(uint32_t mark_bits, int* starts);
static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
+ ASSERT(!marking_deque->IsFull());
ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
@@ -2466,15 +2474,8 @@ void MarkCompactCollector::ReattachInitialMaps() {
void MarkCompactCollector::ClearNonLiveTransitions() {
HeapObjectIterator map_iterator(heap()->map_space());
// Iterate over the map space, setting map transitions that go from
- // a marked map to an unmarked map to null transitions. At the same time,
- // set all the prototype fields of maps back to their original value,
- // dropping the back pointers temporarily stored in the prototype field.
- // Setting the prototype field requires following the linked list of
- // back pointers, reversing them all at once. This allows us to find
- // those maps with map transitions that need to be nulled, and only
- // scan the descriptor arrays of those maps, not all maps.
- // All of these actions are carried out only on maps of JSObjects
- // and related subtypes.
+ // a marked map to an unmarked map to null transitions. This action
+ // is carried out only on maps of JSObjects and related subtypes.
for (HeapObject* obj = map_iterator.Next();
obj != NULL; obj = map_iterator.Next()) {
Map* map = reinterpret_cast<Map*>(obj);
@@ -2550,36 +2551,16 @@ void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
MarkBit map_mark) {
- // Follow the chain of back pointers to find the prototype.
- Object* real_prototype = map;
- while (real_prototype->IsMap()) {
- real_prototype = Map::cast(real_prototype)->prototype();
- ASSERT(real_prototype->IsHeapObject());
- }
+ Object* potential_parent = map->GetBackPointer();
+ if (!potential_parent->IsMap()) return;
+ Map* parent = Map::cast(potential_parent);
- // Follow back pointers, setting them to prototype, clearing map transitions
- // when necessary.
- Map* current = map;
+ // Follow back pointer, check whether we are dealing with a map transition
+ // from a live map to a dead path and in case clear transitions of parent.
bool current_is_alive = map_mark.Get();
- bool on_dead_path = !current_is_alive;
- while (current->IsMap()) {
- Object* next = current->prototype();
- // There should never be a dead map above a live map.
- ASSERT(on_dead_path || current_is_alive);
-
- // A live map above a dead map indicates a dead transition. This test will
- // always be false on the first iteration.
- if (on_dead_path && current_is_alive) {
- on_dead_path = false;
- current->ClearNonLiveTransitions(heap(), real_prototype);
- }
-
- Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset);
- *slot = real_prototype;
- if (current_is_alive) RecordSlot(slot, slot, real_prototype);
-
- current = reinterpret_cast<Map*>(next);
- current_is_alive = Marking::MarkBitFrom(current).Get();
+ bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
+ if (!current_is_alive && parent_is_alive) {
+ parent->ClearNonLiveTransitions(heap());
}
}
@@ -2590,14 +2571,17 @@ void MarkCompactCollector::ProcessWeakMaps() {
ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj)));
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
+ Object** anchor = reinterpret_cast<Object**>(table->address());
for (int i = 0; i < table->Capacity(); i++) {
if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
- Object* value = table->get(table->EntryToValueIndex(i));
- StaticMarkingVisitor::VisitPointer(heap(), &value);
- table->set_unchecked(heap(),
- table->EntryToValueIndex(i),
- value,
- UPDATE_WRITE_BARRIER);
+ Object** key_slot =
+ HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
+ ObjectHashTable::EntryToIndex(i)));
+ RecordSlot(anchor, key_slot, *key_slot);
+ Object** value_slot =
+ HeapObject::RawField(table, FixedArray::OffsetOfElementAt(
+ ObjectHashTable::EntryToValueIndex(i)));
+ StaticMarkingVisitor::MarkObjectByPointer(this, anchor, value_slot);
}
}
weak_map_obj = weak_map->next();
@@ -2775,7 +2759,9 @@ static void UpdatePointer(HeapObject** p, HeapObject* object) {
// We have to zap this pointer, because the store buffer may overflow later,
// and then we have to scan the entire heap and we don't want to find
// spurious newspace pointers in the old space.
- *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0));
+ // TODO(mstarzinger): This was changed to a sentinel value to track down
+ // rare crashes, change it back to Smi::FromInt(0) later.
+ *p = reinterpret_cast<HeapObject*>(Smi::FromInt(0x0f100d00 >> 1)); // flood
}
}
@@ -3417,6 +3403,8 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
// under it.
ProcessInvalidatedCode(&updating_visitor);
+ heap_->isolate()->inner_pointer_to_code_cache()->Flush();
+
#ifdef DEBUG
if (FLAG_verify_heap) {
VerifyEvacuation(heap_);
@@ -3829,7 +3817,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
bool lazy_sweeping_active = false;
bool unused_page_present = false;
- intptr_t old_space_size = heap()->PromotedSpaceSize();
+ intptr_t old_space_size = heap()->PromotedSpaceSizeOfObjects();
intptr_t space_left =
Min(heap()->OldGenPromotionLimit(old_space_size),
heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
diff --git a/deps/v8/src/mark-compact.h b/deps/v8/src/mark-compact.h
index 66ffd19535..dbc28697f0 100644
--- a/deps/v8/src/mark-compact.h
+++ b/deps/v8/src/mark-compact.h
@@ -42,6 +42,7 @@ typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset);
// Forward declarations.
class CodeFlusher;
class GCTracer;
+class MarkCompactCollector;
class MarkingVisitor;
class RootMarkingVisitor;
@@ -166,7 +167,6 @@ class Marking {
// ----------------------------------------------------------------------------
// Marking deque for tracing live objects.
-
class MarkingDeque {
public:
MarkingDeque()
@@ -383,6 +383,34 @@ class SlotsBuffer {
};
+// -------------------------------------------------------------------------
+// Marker shared between incremental and non-incremental marking
+template<class BaseMarker> class Marker {
+ public:
+ Marker(BaseMarker* base_marker, MarkCompactCollector* mark_compact_collector)
+ : base_marker_(base_marker),
+ mark_compact_collector_(mark_compact_collector) {}
+
+ // Mark pointers in a Map and its DescriptorArray together, possibly
+ // treating transitions or back pointers weak.
+ void MarkMapContents(Map* map);
+ void MarkDescriptorArray(DescriptorArray* descriptors);
+ void MarkAccessorPairSlot(AccessorPair* accessors, int offset);
+
+ private:
+ BaseMarker* base_marker() {
+ return base_marker_;
+ }
+
+ MarkCompactCollector* mark_compact_collector() {
+ return mark_compact_collector_;
+ }
+
+ BaseMarker* base_marker_;
+ MarkCompactCollector* mark_compact_collector_;
+};
+
+
// Defined in isolate.h.
class ThreadLocalTop;
@@ -544,6 +572,8 @@ class MarkCompactCollector {
void ClearMarkbits();
+ bool is_compacting() const { return compacting_; }
+
private:
MarkCompactCollector();
~MarkCompactCollector();
@@ -582,8 +612,6 @@ class MarkCompactCollector {
bool was_marked_incrementally_;
- bool collect_maps_;
-
bool flush_monomorphic_ics_;
// A pointer to the current stack-allocated GC tracer object during a full
@@ -606,12 +634,13 @@ class MarkCompactCollector {
//
// After: Live objects are marked and non-live objects are unmarked.
-
friend class RootMarkingVisitor;
friend class MarkingVisitor;
friend class StaticMarkingVisitor;
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
+ friend class Marker<IncrementalMarking>;
+ friend class Marker<MarkCompactCollector>;
// Mark non-optimize code for functions inlined into the given optimized
// code. This will prevent it from being flushed.
@@ -629,29 +658,25 @@ class MarkCompactCollector {
void AfterMarking();
// Marks the object black and pushes it on the marking stack.
- // This is for non-incremental marking.
+ // Returns true if object needed marking and false otherwise.
+ // This is for non-incremental marking only.
+ INLINE(bool MarkObjectAndPush(HeapObject* obj));
+
+ // Marks the object black and pushes it on the marking stack.
+ // This is for non-incremental marking only.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
- INLINE(bool MarkObjectWithoutPush(HeapObject* object));
- INLINE(void MarkObjectAndPush(HeapObject* value));
+ // Marks the object black without pushing it on the marking stack.
+ // Returns true if object needed marking and false otherwise.
+ // This is for non-incremental marking only.
+ INLINE(bool MarkObjectWithoutPush(HeapObject* obj));
- // Marks the object black. This is for non-incremental marking.
+ // Marks the object black assuming that it is not yet marked.
+ // This is for non-incremental marking only.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
void ProcessNewlyMarkedObject(HeapObject* obj);
- // Creates back pointers for all map transitions, stores them in
- // the prototype field. The original prototype pointers are restored
- // in ClearNonLiveTransitions(). All JSObject maps
- // connected by map transitions have the same prototype object, which
- // is why we can use this field temporarily for back pointers.
- void CreateBackPointers();
-
- // Mark a Map and its DescriptorArray together, skipping transitions.
- void MarkMapContents(Map* map);
- void MarkAccessorPairSlot(HeapObject* accessors, int offset);
- void MarkDescriptorArray(DescriptorArray* descriptors);
-
// Mark the heap roots and all objects reachable from them.
void MarkRoots(RootMarkingVisitor* visitor);
@@ -754,6 +779,7 @@ class MarkCompactCollector {
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_maps_;
+ Marker<MarkCompactCollector> marker_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
diff --git a/deps/v8/src/math.js b/deps/v8/src/math.js
index 8e735c4a68..aee56af4f9 100644
--- a/deps/v8/src/math.js
+++ b/deps/v8/src/math.js
@@ -30,7 +30,6 @@
// has the added benefit that the code in this file is isolated from
// changes to these properties.
var $floor = MathFloor;
-var $random = MathRandom;
var $abs = MathAbs;
// Instance class name can only be set on functions. That is the only
diff --git a/deps/v8/src/messages.js b/deps/v8/src/messages.js
index a3adcf8634..2a00ba8469 100644
--- a/deps/v8/src/messages.js
+++ b/deps/v8/src/messages.js
@@ -61,18 +61,21 @@ function FormatString(format, message) {
// To check if something is a native error we need to check the
-// concrete native error types. It is not enough to check "obj
-// instanceof $Error" because user code can replace
-// NativeError.prototype.__proto__. User code cannot replace
-// NativeError.prototype though and therefore this is a safe test.
+// concrete native error types. It is not sufficient to use instanceof
+// since it possible to create an object that has Error.prototype on
+// its prototype chain. This is the case for DOMException for example.
function IsNativeErrorObject(obj) {
- return (obj instanceof $Error) ||
- (obj instanceof $EvalError) ||
- (obj instanceof $RangeError) ||
- (obj instanceof $ReferenceError) ||
- (obj instanceof $SyntaxError) ||
- (obj instanceof $TypeError) ||
- (obj instanceof $URIError);
+ switch (%_ClassOf(obj)) {
+ case 'Error':
+ case 'EvalError':
+ case 'RangeError':
+ case 'ReferenceError':
+ case 'SyntaxError':
+ case 'TypeError':
+ case 'URIError':
+ return true;
+ }
+ return false;
}
@@ -745,7 +748,7 @@ function GetPositionInLine(message) {
function GetStackTraceLine(recv, fun, pos, isGlobal) {
- return FormatSourcePosition(new CallSite(recv, fun, pos));
+ return new CallSite(recv, fun, pos).toString();
}
// ----------------------------------------------------------------------------
@@ -785,15 +788,7 @@ function CallSiteGetThis() {
}
function CallSiteGetTypeName() {
- var constructor = this.receiver.constructor;
- if (!constructor) {
- return %_CallFunction(this.receiver, ObjectToString);
- }
- var constructorName = constructor.name;
- if (!constructorName) {
- return %_CallFunction(this.receiver, ObjectToString);
- }
- return constructorName;
+ return GetTypeName(this, false);
}
function CallSiteIsToplevel() {
@@ -827,8 +822,10 @@ function CallSiteGetFunctionName() {
var name = this.fun.name;
if (name) {
return name;
- } else {
- return %FunctionGetInferredName(this.fun);
+ }
+ name = %FunctionGetInferredName(this.fun);
+ if (name) {
+ return name;
}
// Maybe this is an evaluation?
var script = %FunctionGetScript(this.fun);
@@ -919,6 +916,69 @@ function CallSiteIsConstructor() {
return this.fun === constructor;
}
+function CallSiteToString() {
+ var fileName;
+ var fileLocation = "";
+ if (this.isNative()) {
+ fileLocation = "native";
+ } else if (this.isEval()) {
+ fileName = this.getScriptNameOrSourceURL();
+ if (!fileName) {
+ fileLocation = this.getEvalOrigin();
+ }
+ } else {
+ fileName = this.getFileName();
+ }
+
+ if (fileName) {
+ fileLocation += fileName;
+ var lineNumber = this.getLineNumber();
+ if (lineNumber != null) {
+ fileLocation += ":" + lineNumber;
+ var columnNumber = this.getColumnNumber();
+ if (columnNumber) {
+ fileLocation += ":" + columnNumber;
+ }
+ }
+ }
+
+ if (!fileLocation) {
+ fileLocation = "unknown source";
+ }
+ var line = "";
+ var functionName = this.getFunctionName();
+ var addSuffix = true;
+ var isConstructor = this.isConstructor();
+ var isMethodCall = !(this.isToplevel() || isConstructor);
+ if (isMethodCall) {
+ var typeName = GetTypeName(this, true);
+ var methodName = this.getMethodName();
+ if (functionName) {
+ if (typeName && functionName.indexOf(typeName) != 0) {
+ line += typeName + ".";
+ }
+ line += functionName;
+ if (methodName && functionName.lastIndexOf("." + methodName) !=
+ functionName.length - methodName.length - 1) {
+ line += " [as " + methodName + "]";
+ }
+ } else {
+ line += typeName + "." + (methodName || "<anonymous>");
+ }
+ } else if (isConstructor) {
+ line += "new " + (functionName || "<anonymous>");
+ } else if (functionName) {
+ line += functionName;
+ } else {
+ line += fileLocation;
+ addSuffix = false;
+ }
+ if (addSuffix) {
+ line += " (" + fileLocation + ")";
+ }
+ return line;
+}
+
SetUpLockedPrototype(CallSite, $Array("receiver", "fun", "pos"), $Array(
"getThis", CallSiteGetThis,
"getTypeName", CallSiteGetTypeName,
@@ -934,7 +994,8 @@ SetUpLockedPrototype(CallSite, $Array("receiver", "fun", "pos"), $Array(
"getColumnNumber", CallSiteGetColumnNumber,
"isNative", CallSiteIsNative,
"getPosition", CallSiteGetPosition,
- "isConstructor", CallSiteIsConstructor
+ "isConstructor", CallSiteIsConstructor,
+ "toString", CallSiteToString
));
@@ -976,65 +1037,6 @@ function FormatEvalOrigin(script) {
return eval_origin;
}
-function FormatSourcePosition(frame) {
- var fileName;
- var fileLocation = "";
- if (frame.isNative()) {
- fileLocation = "native";
- } else if (frame.isEval()) {
- fileName = frame.getScriptNameOrSourceURL();
- if (!fileName) {
- fileLocation = frame.getEvalOrigin();
- }
- } else {
- fileName = frame.getFileName();
- }
-
- if (fileName) {
- fileLocation += fileName;
- var lineNumber = frame.getLineNumber();
- if (lineNumber != null) {
- fileLocation += ":" + lineNumber;
- var columnNumber = frame.getColumnNumber();
- if (columnNumber) {
- fileLocation += ":" + columnNumber;
- }
- }
- }
-
- if (!fileLocation) {
- fileLocation = "unknown source";
- }
- var line = "";
- var functionName = frame.getFunction().name;
- var addPrefix = true;
- var isConstructor = frame.isConstructor();
- var isMethodCall = !(frame.isToplevel() || isConstructor);
- if (isMethodCall) {
- var methodName = frame.getMethodName();
- line += frame.getTypeName() + ".";
- if (functionName) {
- line += functionName;
- if (methodName && (methodName != functionName)) {
- line += " [as " + methodName + "]";
- }
- } else {
- line += methodName || "<anonymous>";
- }
- } else if (isConstructor) {
- line += "new " + (functionName || "<anonymous>");
- } else if (functionName) {
- line += functionName;
- } else {
- line += fileLocation;
- addPrefix = false;
- }
- if (addPrefix) {
- line += " (" + fileLocation + ")";
- }
- return line;
-}
-
function FormatStackTrace(error, frames) {
var lines = [];
try {
@@ -1050,7 +1052,7 @@ function FormatStackTrace(error, frames) {
var frame = frames[i];
var line;
try {
- line = FormatSourcePosition(frame);
+ line = frame.toString();
} catch (e) {
try {
line = "<error: " + e + ">";
@@ -1081,6 +1083,19 @@ function FormatRawStackTrace(error, raw_stack) {
}
}
+function GetTypeName(obj, requireConstructor) {
+ var constructor = obj.receiver.constructor;
+ if (!constructor) {
+ return requireConstructor ? null :
+ %_CallFunction(obj.receiver, ObjectToString);
+ }
+ var constructorName = constructor.name;
+ if (!constructorName) {
+ return requireConstructor ? null :
+ %_CallFunction(obj.receiver, ObjectToString);
+ }
+ return constructorName;
+}
function captureStackTrace(obj, cons_opt) {
var stackTraceLimit = $Error.stackTraceLimit;
@@ -1125,13 +1140,7 @@ function SetUpError() {
}
%FunctionSetInstanceClassName(f, 'Error');
%SetProperty(f.prototype, 'constructor', f, DONT_ENUM);
- // The name property on the prototype of error objects is not
- // specified as being read-one and dont-delete. However, allowing
- // overwriting allows leaks of error objects between script blocks
- // in the same context in a browser setting. Therefore we fix the
- // name.
- %SetProperty(f.prototype, "name", name,
- DONT_ENUM | DONT_DELETE | READ_ONLY) ;
+ %SetProperty(f.prototype, "name", name, DONT_ENUM);
%SetCode(f, function(m) {
if (%_IsConstructCall()) {
// Define all the expected properties directly on the error
@@ -1147,10 +1156,8 @@ function SetUpError() {
return FormatMessage(%NewMessageObject(obj.type, obj.arguments));
});
} else if (!IS_UNDEFINED(m)) {
- %IgnoreAttributesAndSetProperty(this,
- 'message',
- ToString(m),
- DONT_ENUM);
+ %IgnoreAttributesAndSetProperty(
+ this, 'message', ToString(m), DONT_ENUM);
}
captureStackTrace(this, f);
} else {
@@ -1180,16 +1187,41 @@ $Error.captureStackTrace = captureStackTrace;
var visited_errors = new InternalArray();
var cyclic_error_marker = new $Object();
+function GetPropertyWithoutInvokingMonkeyGetters(error, name) {
+ // Climb the prototype chain until we find the holder.
+ while (error && !%HasLocalProperty(error, name)) {
+ error = error.__proto__;
+ }
+ if (error === null) return void 0;
+ if (!IS_OBJECT(error)) return error[name];
+ // If the property is an accessor on one of the predefined errors that can be
+ // generated statically by the compiler, don't touch it. This is to address
+ // http://code.google.com/p/chromium/issues/detail?id=69187
+ var desc = %GetOwnProperty(error, name);
+ if (desc && desc[IS_ACCESSOR_INDEX]) {
+ var isName = name === "name";
+ if (error === $ReferenceError.prototype)
+ return isName ? "ReferenceError" : void 0;
+ if (error === $SyntaxError.prototype)
+ return isName ? "SyntaxError" : void 0;
+ if (error === $TypeError.prototype)
+ return isName ? "TypeError" : void 0;
+ }
+ // Otherwise, read normally.
+ return error[name];
+}
+
function ErrorToStringDetectCycle(error) {
if (!%PushIfAbsent(visited_errors, error)) throw cyclic_error_marker;
try {
- var type = error.type;
- var name = error.name;
+ var type = GetPropertyWithoutInvokingMonkeyGetters(error, "type");
+ var name = GetPropertyWithoutInvokingMonkeyGetters(error, "name");
name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name);
- var message = error.message;
+ var message = GetPropertyWithoutInvokingMonkeyGetters(error, "message");
var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty);
if (type && !hasMessage) {
- message = FormatMessage(%NewMessageObject(type, error.arguments));
+ var args = GetPropertyWithoutInvokingMonkeyGetters(error, "arguments");
+ message = FormatMessage(%NewMessageObject(type, args));
}
message = IS_UNDEFINED(message) ? "" : TO_STRING_INLINE(message);
if (name === "") return message;
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index eeb84c3a94..5a2074e652 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -118,7 +118,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -214,7 +214,8 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
+ __ LoadInitialArrayMap(array_function, scratch2,
+ elements_array_storage, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert(
@@ -449,10 +450,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ Branch(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// a3: JSArray
__ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
a2,
t5,
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 5719d2cca3..a7c259732a 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -3130,7 +3130,7 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
} else {
// Tail call that writes the int32 in a2 to the heap number in v0, using
// a3 and a0 as scratch. v0 is preserved and returned.
- __ mov(a0, t1);
+ __ mov(v0, t1);
WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
__ TailCallStub(&stub);
}
@@ -5043,7 +5043,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
1, a0, a2);
// Isolates: note we add an additional parameter here (isolate pointer).
- const int kRegExpExecuteArguments = 8;
+ const int kRegExpExecuteArguments = 9;
const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
@@ -5054,27 +5054,33 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// allocating space for the c argument slots, we don't need to calculate
// that into the argument positions on the stack. This is how the stack will
// look (sp meaning the value of sp at this moment):
+ // [sp + 5] - Argument 9
// [sp + 4] - Argument 8
// [sp + 3] - Argument 7
// [sp + 2] - Argument 6
// [sp + 1] - Argument 5
// [sp + 0] - saved ra
- // Argument 8: Pass current isolate address.
+ // Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
__ li(a0, Operand(ExternalReference::isolate_address()));
- __ sw(a0, MemOperand(sp, 4 * kPointerSize));
+ __ sw(a0, MemOperand(sp, 5 * kPointerSize));
- // Argument 7: Indicate that this is a direct call from JavaScript.
+ // Argument 8: Indicate that this is a direct call from JavaScript.
__ li(a0, Operand(1));
- __ sw(a0, MemOperand(sp, 3 * kPointerSize));
+ __ sw(a0, MemOperand(sp, 4 * kPointerSize));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ li(a0, Operand(address_of_regexp_stack_memory_address));
__ lw(a0, MemOperand(a0, 0));
__ li(a2, Operand(address_of_regexp_stack_memory_size));
__ lw(a2, MemOperand(a2, 0));
__ addu(a0, a0, a2);
+ __ sw(a0, MemOperand(sp, 3 * kPointerSize));
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ __ mov(a0, zero_reg);
__ sw(a0, MemOperand(sp, 2 * kPointerSize));
// Argument 5: static offsets vector buffer.
@@ -5125,7 +5131,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ Branch(&success, eq, v0, Operand(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
Label failure;
__ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
// If not exception it can only be retry. Handle that in the runtime system.
@@ -5400,9 +5408,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&call, ne, t0, Operand(at));
// Patch the receiver on the stack with the global receiver object.
- __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
- __ sw(a2, MemOperand(sp, argc_ * kPointerSize));
+ __ lw(a3, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalReceiverOffset));
+ __ sw(a3, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
}
@@ -5410,8 +5418,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// a1: pushed function (to be verified)
__ JumpIfSmi(a1, &non_function);
// Get the map of the function object.
- __ GetObjectType(a1, a2, a2);
- __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
+ __ GetObjectType(a1, a3, a3);
+ __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
+
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
// Fast-case: Invoke the function now.
// a1: pushed function
@@ -5436,8 +5448,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Slow-case: Non-function called.
__ bind(&slow);
+ if (RecordCallTarget()) {
+ // If there is a call target cache, mark it megamorphic in the
+ // non-function case. MegamorphicSentinel is an immortal immovable
+ // object (undefined) so no write barrier is needed.
+ ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
+ masm->isolate()->heap()->undefined_value());
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+ }
// Check for function proxy.
- __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ Branch(&non_function, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
__ push(a1); // Put proxy as additional argument.
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE));
__ li(a2, Operand(0, RelocInfo::NONE));
@@ -6093,37 +6114,11 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// a2: result string length
__ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
__ sra(t0, t0, 1);
+ // Return original string.
__ Branch(&return_v0, eq, a2, Operand(t0));
-
-
- Label result_longer_than_two;
- // Check for special case of two character ASCII string, in which case
- // we do a lookup in the symbol table first.
- __ li(t0, 2);
- __ Branch(&result_longer_than_two, gt, a2, Operand(t0));
- __ Branch(&runtime, lt, a2, Operand(t0));
-
- __ JumpIfInstanceTypeIsNotSequentialAscii(a1, a1, &runtime);
-
- // Get the two characters forming the sub string.
- __ Addu(v0, v0, Operand(a3));
- __ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
- __ lbu(t0, FieldMemOperand(v0, SeqAsciiString::kHeaderSize + 1));
-
- // Try to lookup two character string in symbol table.
- Label make_two_character_string;
- StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string);
- __ jmp(&return_v0);
-
- // a2: result string length.
- // a3: two characters combined into halfword in little endian byte order.
- __ bind(&make_two_character_string);
- __ AllocateAsciiString(v0, a2, t0, t1, t4, &runtime);
- __ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
- __ jmp(&return_v0);
-
- __ bind(&result_longer_than_two);
+ // Longer than original string's length or negative: unsafe arguments.
+ __ Branch(&runtime, hi, a2, Operand(t0));
+ // Shorter than original string's length: an actual substring.
// Deal with different string types: update the index if necessary
// and put the underlying string into t1.
@@ -7375,8 +7370,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
@@ -7642,9 +7637,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label fast_elements;
__ CheckFastElements(a2, t1, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
__ JumpIfSmi(a0, &smi_element);
- __ CheckFastSmiOnlyElements(a2, t1, &fast_elements);
+ __ CheckFastSmiElements(a2, t1, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -7656,7 +7651,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Push(t1, t0);
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
__ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
@@ -7669,8 +7664,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
+ // and value is Smi.
__ bind(&smi_element);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
__ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
@@ -7679,7 +7674,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
__ bind(&double_elements);
__ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
__ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,
diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc
index 9acccdc2ca..44e0359e44 100644
--- a/deps/v8/src/mips/codegen-mips.cc
+++ b/deps/v8/src/mips/codegen-mips.cc
@@ -72,7 +72,7 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
// -------------------------------------------------------------------------
// Code generators
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : value
@@ -95,7 +95,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- a0 : value
diff --git a/deps/v8/src/mips/constants-mips.h b/deps/v8/src/mips/constants-mips.h
index fd04722792..3d585717cb 100644
--- a/deps/v8/src/mips/constants-mips.h
+++ b/deps/v8/src/mips/constants-mips.h
@@ -788,11 +788,6 @@ const int kBArgsSlotsSize = 0 * Instruction::kInstrSize;
const int kBranchReturnOffset = 2 * Instruction::kInstrSize;
-const int kDoubleAlignmentBits = 3;
-const int kDoubleAlignment = (1 << kDoubleAlignmentBits);
-const int kDoubleAlignmentMask = kDoubleAlignment - 1;
-
-
} } // namespace v8::internal
#endif // #ifndef V8_MIPS_CONSTANTS_H_
diff --git a/deps/v8/src/mips/debug-mips.cc b/deps/v8/src/mips/debug-mips.cc
index 83f5f50172..3be1e4d8b2 100644
--- a/deps/v8/src/mips/debug-mips.cc
+++ b/deps/v8/src/mips/debug-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -116,6 +116,8 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() {
Assembler::kDebugBreakSlotInstructions);
}
+const bool Debug::FramePaddingLayout::kIsSupported = false;
+
#define __ ACCESS_MASM(masm)
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index 4b58fc8c14..263656ea01 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -120,13 +120,6 @@ class JumpPatchSite BASE_EMBEDDED {
};
-// TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
-int FullCodeGenerator::self_optimization_header_size() {
- UNREACHABLE();
- return 10 * Instruction::kInstrSize;
-}
-
-
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
@@ -282,11 +275,11 @@ void FullCodeGenerator::Generate() {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- VariableProxy* proxy = scope()->function();
- ASSERT(proxy->var()->mode() == CONST ||
- proxy->var()->mode() == CONST_HARMONY);
- ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+ VariableDeclaration* function = scope()->function();
+ ASSERT(function->proxy()->var()->mode() == CONST ||
+ function->proxy()->var()->mode() == CONST_HARMONY);
+ ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+ VisitVariableDeclaration(function);
}
VisitDeclarations(scope()->declarations());
}
@@ -796,64 +789,54 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
}
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+ // The variable in the declaration always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
+ __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
+ __ Check(ne, "Declaration in with context.",
+ a1, Operand(t0));
+ __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
+ __ Check(ne, "Declaration in catch context.",
+ a1, Operand(t0));
+ }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+ VariableDeclaration* declaration) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
+ VariableProxy* proxy = declaration->proxy();
+ VariableMode mode = declaration->mode();
Variable* variable = proxy->var();
- bool binding_needs_init = (function == NULL) &&
- (mode == CONST || mode == CONST_HARMONY || mode == LET);
+ bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++global_count_;
+ globals_->Add(variable->name(), zone());
+ globals_->Add(variable->binding_needs_init()
+ ? isolate()->factory()->the_hole_value()
+ : isolate()->factory()->undefined_value(),
+ zone());
break;
case Variable::PARAMETER:
case Variable::LOCAL:
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ sw(result_register(), StackOperand(variable));
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
- __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ sw(t0, StackOperand(variable));
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
+ __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
+ __ sw(t0, StackOperand(variable));
}
break;
case Variable::CONTEXT:
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
- __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.",
- a1, Operand(t0));
- __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.",
- a1, Operand(t0));
- }
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ sw(result_register(), ContextOperand(cp, variable->index()));
- int offset = Context::SlotOffset(variable->index());
- // We know that we have written a function, which is not a smi.
- __ RecordWriteContextSlot(cp,
- offset,
- result_register(),
- a2,
- kRAHasBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ sw(at, ContextOperand(cp, variable->index()));
// No write barrier since the_hole_value is in old space.
@@ -862,13 +845,11 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
break;
case Variable::LOOKUP: {
- Comment cmnt(masm_, "[ Declaration");
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ li(a2, Operand(variable->name()));
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ ASSERT(mode == VAR || mode == LET ||
+ mode == CONST || mode == CONST_HARMONY);
PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
? READ_ONLY : NONE;
__ li(a1, Operand(Smi::FromInt(attr)));
@@ -876,13 +857,9 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (function != NULL) {
- __ Push(cp, a2, a1);
- // Push initial value for function declaration.
- VisitForStackValue(function);
- } else if (binding_needs_init) {
- __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
- __ Push(cp, a2, a1, a0);
+ if (hole_init) {
+ __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
+ __ Push(cp, a2, a1, a0);
} else {
ASSERT(Smi::FromInt(0) == 0);
__ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
@@ -895,6 +872,122 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
+void FullCodeGenerator::VisitFunctionDeclaration(
+ FunctionDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ globals_->Add(variable->name(), zone());
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(declaration->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) return SetStackOverflow();
+ globals_->Add(function, zone());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ VisitForAccumulatorValue(declaration->fun());
+ __ sw(result_register(), StackOperand(variable));
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ VisitForAccumulatorValue(declaration->fun());
+ __ sw(result_register(), ContextOperand(cp, variable->index()));
+ int offset = Context::SlotOffset(variable->index());
+ // We know that we have written a function, which is not a smi.
+ __ RecordWriteContextSlot(cp,
+ offset,
+ result_register(),
+ a2,
+ kRAHasBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+ break;
+ }
+
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ __ li(a2, Operand(variable->name()));
+ __ li(a1, Operand(Smi::FromInt(NONE)));
+ __ Push(cp, a2, a1);
+ // Push initial value for function declaration.
+ VisitForStackValue(declaration->fun());
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
+ }
+ }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ Handle<JSModule> instance = declaration->module()->interface()->Instance();
+ ASSERT(!instance.is_null());
+
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ __ li(a1, Operand(instance));
+ __ sw(a1, ContextOperand(cp, variable->index()));
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ // TODO(rossberg)
+ break;
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ImportDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ // TODO(rossberg)
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+ // TODO(rossberg)
+}
+
+
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
@@ -1519,7 +1612,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
@@ -1619,7 +1712,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1641,8 +1735,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode = has_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1671,7 +1764,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ lw(t2, MemOperand(sp)); // Copy of array literal.
__ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
@@ -2296,6 +2389,18 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
+
+ // Record call targets in unoptimized code, but not in the snapshot.
+ if (!Serializer::enabled()) {
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ li(a2, Operand(cell));
+ }
+
CallFunctionStub stub(arg_count, flags);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
@@ -2984,7 +3089,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
__ Move(f14, zero_reg, a1);
// Subtract and store the result in the heap number.
__ sub_d(f0, f12, f14);
- __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(s0, HeapNumber::kValueOffset));
__ mov(v0, s0);
} else {
__ PrepareCallCFunction(2, a0);
@@ -4393,7 +4498,8 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
- if (declaration_scope->is_global_scope()) {
+ if (declaration_scope->is_global_scope() ||
+ declaration_scope->is_module_scope()) {
// Contexts nested in the global context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
@@ -4424,14 +4530,55 @@ void FullCodeGenerator::EnterFinallyBlock() {
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
STATIC_ASSERT(0 == kSmiTag);
__ Addu(a1, a1, Operand(a1)); // Convert to smi.
+
+ // Store result register while executing finally block.
+ __ push(a1);
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ li(at, Operand(pending_message_obj));
+ __ lw(a1, MemOperand(at));
+ __ push(a1);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ li(at, Operand(has_pending_message));
+ __ lw(a1, MemOperand(at));
+ __ push(a1);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ li(at, Operand(pending_message_script));
+ __ lw(a1, MemOperand(at));
__ push(a1);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(a1));
+ // Restore pending message from stack.
+ __ pop(a1);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ li(at, Operand(pending_message_script));
+ __ sw(a1, MemOperand(at));
+
+ __ pop(a1);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ li(at, Operand(has_pending_message));
+ __ sw(a1, MemOperand(at));
+
+ __ pop(a1);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ li(at, Operand(pending_message_obj));
+ __ sw(a1, MemOperand(at));
+
// Restore result register from stack.
__ pop(a1);
+
// Uncook return address and return.
__ pop(result_register());
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc
index 32da2df182..5d530d0e9b 100644
--- a/deps/v8/src/mips/ic-mips.cc
+++ b/deps/v8/src/mips/ic-mips.cc
@@ -1347,34 +1347,35 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
__ Branch(&non_double_value, ne, t0, Operand(at));
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
- // FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+
+ // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
+ // and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
t0,
&slow);
ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
__ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
__ bind(&transition_double_elements);
- // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
- // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
- // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ // Elements are double, but value is an Object that's not a HeapNumber. Make
+ // sure that the receiver is a Array with Object elements and transition array
+ // from double elements to Object elements.
__ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
FAST_ELEMENTS,
receiver_map,
@@ -1471,7 +1472,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in v0.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a2);
__ bind(&fail);
@@ -1688,12 +1689,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
Address andi_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1727,33 +1728,30 @@ void PatchInlinedSmiCode(Address address) {
Instr instr_at_patch = Assembler::instr_at(patch_address);
Instr branch_instr =
Assembler::instr_at(patch_address + Instruction::kInstrSize);
- ASSERT(Assembler::IsAndImmediate(instr_at_patch));
- ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
+ // This is patching a conditional "jump if not smi/jump if smi" site.
+ // Enabling by changing from
+ // andi at, rx, 0
+ // Branch <target>, eq, at, Operand(zero_reg)
+ // to:
+ // andi at, rx, #kSmiTagMask
+ // Branch <target>, ne, at, Operand(zero_reg)
+ // and vice-versa to be disabled again.
+ CodePatcher patcher(patch_address, 2);
+ Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
+ if (check == ENABLE_INLINED_SMI_CHECK) {
+ ASSERT(Assembler::IsAndImmediate(instr_at_patch));
+ ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
+ patcher.masm()->andi(at, reg, kSmiTagMask);
+ } else {
+ ASSERT(check == DISABLE_INLINED_SMI_CHECK);
+ ASSERT(Assembler::IsAndImmediate(instr_at_patch));
+ patcher.masm()->andi(at, reg, 0);
+ }
ASSERT(Assembler::IsBranch(branch_instr));
if (Assembler::IsBeq(branch_instr)) {
- // This is patching a "jump if not smi" site to be active.
- // Changing:
- // andi at, rx, 0
- // Branch <target>, eq, at, Operand(zero_reg)
- // to:
- // andi at, rx, #kSmiTagMask
- // Branch <target>, ne, at, Operand(zero_reg)
- CodePatcher patcher(patch_address, 2);
- Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
- patcher.masm()->andi(at, reg, kSmiTagMask);
patcher.ChangeBranchCondition(ne);
} else {
ASSERT(Assembler::IsBne(branch_instr));
- // This is patching a "jump if smi" site to be active.
- // Changing:
- // andi at, rx, 0
- // Branch <target>, ne, at, Operand(zero_reg)
- // to:
- // andi at, rx, #kSmiTagMask
- // Branch <target>, eq, at, Operand(zero_reg)
- CodePatcher patcher(patch_address, 2);
- Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
- patcher.masm()->andi(at, reg, kSmiTagMask);
patcher.ChangeBranchCondition(eq);
}
}
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index f21ed8f3ee..67dbe69def 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -592,14 +592,14 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
+ Translation translation(&translations_, frame_count, jsframe_count, zone());
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, zone());
}
}
@@ -678,7 +678,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -724,14 +724,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register cp always contains a pointer to the context.
- safepoint.DefinePointerRegister(cp);
+ safepoint.DefinePointerRegister(cp, zone());
}
}
@@ -743,7 +743,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -2044,7 +2044,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
};
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->InputAt(0));
@@ -2139,8 +2139,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- ASSERT(instr->HasDeoptimizationEnvironment());
- LEnvironment* env = instr->deoptimization_environment();
+ LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
@@ -2316,12 +2315,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsFound() && lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2333,9 +2332,23 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
__ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ HeapObject* current = HeapObject::cast((*type)->prototype());
+ Heap* heap = type->GetHeap();
+ while (current != heap->null_value()) {
+ Handle<HeapObject> link(current);
+ __ LoadHeapObject(result, link);
+ __ lw(result, FieldMemOperand(result, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, env,
+ result, Operand(Handle<Map>(JSObject::cast(current)->map())));
+ current = HeapObject::cast(current->map()->prototype());
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
@@ -2343,41 +2356,46 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
- Register scratch = scratch0();
+ Register object_map = scratch0();
+
int map_count = instr->hydrogen()->types()->length();
+ bool need_generic = instr->hydrogen()->need_generic();
+
+ if (map_count == 0 && !need_generic) {
+ DeoptimizeIf(al, instr->environment());
+ return;
+ }
Handle<String> name = instr->hydrogen()->name();
- if (map_count == 0) {
- ASSERT(instr->hydrogen()->need_generic());
- __ li(a2, Operand(name));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- Label done;
- __ lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
- for (int i = 0; i < map_count - 1; ++i) {
- Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label done;
+ __ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
+ for (int i = 0; i < map_count; ++i) {
+ bool last = (i == map_count - 1);
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMapAndBranch(
+ object_map, map, &check_passed,
+ eq, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
+ if (last && !need_generic) {
+ DeoptimizeIf(al, instr->environment());
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ } else {
Label next;
- __ Branch(&next, ne, scratch, Operand(map));
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ Branch(&next);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
__ Branch(&done);
__ bind(&next);
}
- Handle<Map> map = instr->hydrogen()->types()->last();
- if (instr->hydrogen()->need_generic()) {
- Label generic;
- __ Branch(&generic, ne, scratch, Operand(map));
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ Branch(&done);
- __ bind(&generic);
- __ li(a2, Operand(name));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- DeoptimizeIf(ne, instr->environment(), scratch, Operand(map));
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- }
- __ bind(&done);
}
+ if (need_generic) {
+ __ li(a2, Operand(name));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ }
+ __ bind(&done);
}
@@ -2452,8 +2470,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ lbu(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
__ Ext(scratch, scratch, Map::kElementsKindShift,
Map::kElementsKindBitCount);
- __ Branch(&done, eq, scratch,
- Operand(FAST_ELEMENTS));
+ __ Branch(&fail, lt, scratch,
+ Operand(GetInitialFastElementsKind()));
+ __ Branch(&done, le, scratch,
+ Operand(TERMINAL_FAST_ELEMENTS_KIND));
__ Branch(&fail, lt, scratch,
Operand(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ Branch(&done, le, scratch,
@@ -2506,12 +2526,19 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
// Load the result.
__ sll(scratch, key, kPointerSizeLog2); // Key indexes words.
__ addu(scratch, elements, scratch);
- __ lw(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
+ uint32_t offset = FixedArray::kHeaderSize +
+ (instr->additional_index() << kPointerSizeLog2);
+ __ lw(result, FieldMemOperand(scratch, offset));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ __ And(scratch, result, Operand(kSmiTagMask));
+ DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
+ } else {
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), result, Operand(scratch));
+ }
}
}
@@ -2537,17 +2564,21 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
}
if (key_is_constant) {
- __ Addu(elements, elements, Operand(constant_key * (1 << shift_size) +
- FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ __ Addu(elements, elements,
+ Operand(((constant_key + instr->additional_index()) << shift_size) +
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag));
} else {
__ sll(scratch, key, shift_size);
__ Addu(elements, elements, Operand(scratch));
__ Addu(elements, elements,
- Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
+ Operand((FixedDoubleArray::kHeaderSize - kHeapObjectTag) +
+ (instr->additional_index() << shift_size)));
}
- __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
- DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ lw(scratch, MemOperand(elements, sizeof(kHoleNanLower32)));
+ DeoptimizeIf(eq, instr->environment(), scratch, Operand(kHoleNanUpper32));
+ }
__ ldc1(result, MemOperand(elements));
}
@@ -2569,32 +2600,41 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
key = ToRegister(instr->key());
}
int shift_size = ElementsKindToShiftSize(elements_kind);
+ int additional_offset = instr->additional_index() << shift_size;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
FPURegister result = ToDoubleRegister(instr->result());
if (key_is_constant) {
- __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
+ __ Addu(scratch0(), external_pointer, constant_key << shift_size);
} else {
__ sll(scratch0(), key, shift_size);
__ Addu(scratch0(), scratch0(), external_pointer);
}
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
- __ lwc1(result, MemOperand(scratch0()));
+ __ lwc1(result, MemOperand(scratch0(), additional_offset));
__ cvt_d_s(result, result);
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ ldc1(result, MemOperand(scratch0()));
+ __ ldc1(result, MemOperand(scratch0(), additional_offset));
}
} else {
Register result = ToRegister(instr->result());
Register scratch = scratch0();
+ if (instr->additional_index() != 0 && !key_is_constant) {
+ __ Addu(scratch, key, instr->additional_index());
+ }
MemOperand mem_operand(zero_reg);
if (key_is_constant) {
- mem_operand = MemOperand(external_pointer,
- constant_key * (1 << shift_size));
+ mem_operand =
+ MemOperand(external_pointer,
+ (constant_key << shift_size) + additional_offset);
} else {
- __ sll(scratch, key, shift_size);
+ if (instr->additional_index() == 0) {
+ __ sll(scratch, key, shift_size);
+ } else {
+ __ sll(scratch, scratch, shift_size);
+ }
__ Addu(scratch, scratch, external_pointer);
mem_operand = MemOperand(scratch);
}
@@ -2627,7 +2667,10 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2651,16 +2694,20 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Register temp = scratch1();
Register result = ToRegister(instr->result());
- // Check if the calling frame is an arguments adaptor frame.
- Label done, adapted;
- __ lw(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ lw(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
- __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
-
- // Result is the frame pointer for the frame if not adapted and for the real
- // frame below the adaptor frame if adapted.
- __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne).
- __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq).
+ if (instr->hydrogen()->from_inlined()) {
+ __ Subu(result, sp, 2 * kPointerSize);
+ } else {
+ // Check if the calling frame is an arguments adaptor frame.
+ Label done, adapted;
+ __ lw(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ lw(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
+ __ Xor(temp, result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+
+ // Result is the frame pointer for the frame if not adapted and for the real
+ // frame below the adaptor frame if adapted.
+ __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne).
+ __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq).
+ }
}
@@ -2768,7 +2815,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ sll(scratch, length, 2);
__ bind(&invoke);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -2793,6 +2840,11 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
}
+void LCodeGen::DoDrop(LDrop* instr) {
+ __ Drop(instr->count());
+}
+
+
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
__ LoadHeapObject(result, instr->hydrogen()->closure());
@@ -2838,7 +2890,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind) {
+ CallKind call_kind,
+ A1State a1_state) {
bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
function->shared()->formal_parameter_count() == arity;
@@ -2846,7 +2899,10 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
if (can_invoke_directly) {
- __ LoadHeapObject(a1, function);
+ if (a1_state == A1_UNINITIALIZED) {
+ __ LoadHeapObject(a1, function);
+ }
+
// Change context if needed.
bool change_context =
(info()->closure()->context() != function->context()) ||
@@ -2883,7 +2939,11 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
__ mov(a0, v0);
- CallKnownFunction(instr->function(), instr->arity(), instr, CALL_AS_METHOD);
+ CallKnownFunction(instr->function(),
+ instr->arity(),
+ instr,
+ CALL_AS_METHOD,
+ A1_UNINITIALIZED);
}
@@ -2992,7 +3052,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
} else {
// Representation is tagged.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Register input = ToRegister(instr->InputAt(0));
// Smi check.
__ JumpIfNotSmi(input, deferred->entry());
@@ -3183,7 +3243,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
ASSERT(ToDoubleRegister(instr->result()).is(f0));
@@ -3319,13 +3379,21 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(ToRegister(instr->function()).is(a1));
ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- RecordPosition(pointers->position());
- SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
- __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+
+ if (instr->known_function().is_null()) {
+ LPointerMap* pointers = instr->pointer_map();
+ RecordPosition(pointers->position());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(instr->arity());
+ __ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ } else {
+ CallKnownFunction(instr->known_function(),
+ instr->arity(),
+ instr,
+ CALL_AS_METHOD,
+ A1_CONTAINS_TARGET);
+ }
}
@@ -3380,7 +3448,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
+ CallKnownFunction(instr->target(),
+ instr->arity(),
+ instr,
+ CALL_AS_FUNCTION,
+ A1_UNINITIALIZED);
}
@@ -3410,6 +3482,18 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
if (!instr->transition().is_null()) {
__ li(scratch, Operand(instr->transition()));
__ sw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
+ if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ Register temp = ToRegister(instr->TempAt(0));
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ scratch,
+ temp,
+ kRAHasBeenSaved,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3480,11 +3564,17 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
int offset =
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
+ (ToInteger32(const_operand) + instr->additional_index()) * kPointerSize
+ + FixedArray::kHeaderSize;
__ sw(value, FieldMemOperand(elements, offset));
} else {
__ sll(scratch, key, kPointerSizeLog2);
__ addu(scratch, elements, scratch);
+ if (instr->additional_index() != 0) {
+ __ Addu(scratch,
+ scratch,
+ instr->additional_index() << kPointerSizeLog2);
+ }
__ sw(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
}
@@ -3527,7 +3617,7 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
}
int shift_size = ElementsKindToShiftSize(FAST_DOUBLE_ELEMENTS);
if (key_is_constant) {
- __ Addu(scratch, elements, Operand(constant_key * (1 << shift_size) +
+ __ Addu(scratch, elements, Operand((constant_key << shift_size) +
FixedDoubleArray::kHeaderSize - kHeapObjectTag));
} else {
__ sll(scratch, key, shift_size);
@@ -3536,17 +3626,19 @@ void LCodeGen::DoStoreKeyedFastDoubleElement(
Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
}
- Label is_nan;
- // Check for NaN. All NaNs must be canonicalized.
- __ BranchF(NULL, &is_nan, eq, value, value);
- __ Branch(&not_nan);
+ if (instr->NeedsCanonicalization()) {
+ Label is_nan;
+ // Check for NaN. All NaNs must be canonicalized.
+ __ BranchF(NULL, &is_nan, eq, value, value);
+ __ Branch(&not_nan);
- // Only load canonical NaN if the comparison above set the overflow.
- __ bind(&is_nan);
- __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
+ // Only load canonical NaN if the comparison above set the overflow.
+ __ bind(&is_nan);
+ __ Move(value, FixedDoubleArray::canonical_not_the_hole_nan_as_double());
+ }
__ bind(&not_nan);
- __ sdc1(value, MemOperand(scratch));
+ __ sdc1(value, MemOperand(scratch, instr->additional_index() << shift_size));
}
@@ -3567,12 +3659,13 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
key = ToRegister(instr->key());
}
int shift_size = ElementsKindToShiftSize(elements_kind);
+ int additional_offset = instr->additional_index() << shift_size;
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS ||
elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
FPURegister value(ToDoubleRegister(instr->value()));
if (key_is_constant) {
- __ Addu(scratch0(), external_pointer, constant_key * (1 << shift_size));
+ __ Addu(scratch0(), external_pointer, constant_key << shift_size);
} else {
__ sll(scratch0(), key, shift_size);
__ Addu(scratch0(), scratch0(), external_pointer);
@@ -3580,19 +3673,27 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
__ cvt_s_d(double_scratch0(), value);
- __ swc1(double_scratch0(), MemOperand(scratch0()));
+ __ swc1(double_scratch0(), MemOperand(scratch0(), additional_offset));
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
- __ sdc1(value, MemOperand(scratch0()));
+ __ sdc1(value, MemOperand(scratch0(), additional_offset));
}
} else {
Register value(ToRegister(instr->value()));
- MemOperand mem_operand(zero_reg);
Register scratch = scratch0();
+ if (instr->additional_index() != 0 && !key_is_constant) {
+ __ Addu(scratch, key, instr->additional_index());
+ }
+ MemOperand mem_operand(zero_reg);
if (key_is_constant) {
mem_operand = MemOperand(external_pointer,
- constant_key * (1 << shift_size));
+ ((constant_key + instr->additional_index())
+ << shift_size));
} else {
- __ sll(scratch, key, shift_size);
+ if (instr->additional_index() == 0) {
+ __ sll(scratch, key, shift_size);
+ } else {
+ __ sll(scratch, scratch, shift_size);
+ }
__ Addu(scratch, scratch, external_pointer);
mem_operand = MemOperand(scratch);
}
@@ -3614,7 +3715,10 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3652,20 +3756,21 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ Branch(&not_applicable, ne, scratch, Operand(from_map));
__ li(new_map_reg, Operand(to_map));
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastSmiElementsKind(from_kind) && IsFastObjectElementsKind(to_kind)) {
__ sw(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset));
// Write barrier.
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
scratch, kRAHasBeenSaved, kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(a2));
ASSERT(new_map_reg.is(a3));
__ mov(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(a2));
ASSERT(new_map_reg.is(a3));
@@ -3699,7 +3804,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
ToRegister(instr->index()),
@@ -3753,7 +3858,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3829,7 +3934,7 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
Register dst = ToRegister(instr->result());
Register overflow = scratch0();
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
+ DeferredNumberTagI* deferred = new(zone()) DeferredNumberTagI(this, instr);
__ SmiTagCheckOverflow(dst, src, overflow);
__ BranchOnOverflow(deferred->entry(), overflow);
__ bind(deferred->exit());
@@ -3897,7 +4002,7 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Register temp1 = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
@@ -4091,7 +4196,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
// Let the deferred code handle the HeapObject case.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -4235,14 +4340,21 @@ void LCodeGen::DoCheckMapCommon(Register reg,
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Handle<Map> map = instr->hydrogen()->map();
- DoCheckMapCommon(reg, scratch, map, instr->hydrogen()->mode(),
- instr->environment());
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMapAndBranch(
+ reg, scratch, map, &success, eq, &success, REQUIRE_EXACT_MAP);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4335,7 +4447,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->TempAt(0));
@@ -4358,6 +4471,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
deferred->entry(),
TAG_OBJECT);
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
// Load the initial map.
Register map = scratch;
__ LoadHeapObject(map, constructor);
@@ -4376,14 +4497,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ sw(scratch, FieldMemOperand(result, property_offset));
}
}
-
- __ bind(deferred->exit());
}
void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
Register result = ToRegister(instr->result());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
@@ -4391,9 +4512,9 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
__ mov(result, zero_reg);
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ LoadHeapObject(a0, constructor);
+ __ li(a0, Operand(Smi::FromInt(instance_size)));
__ push(a0);
- CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
__ StoreToSafepointRegisterSlot(v0, result);
}
@@ -4405,8 +4526,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object());
// Load map into a2.
__ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
@@ -4529,9 +4651,10 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ sw(a2, FieldMemOperand(result, total_offset + 4));
}
} else if (elements->IsFixedArray()) {
+ Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
for (int i = 0; i < elements_length; i++) {
int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
- Handle<Object> value = JSObject::GetElement(object, i);
+ Handle<Object> value(fast_elements->get(i));
if (value->IsJSObject()) {
Handle<JSObject> value_object = Handle<JSObject>::cast(value);
__ Addu(a2, result, Operand(*offset));
@@ -4555,6 +4678,24 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
int size = instr->hydrogen()->total_size();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate()->GetElementsKind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
+ __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
+ // Load map into a2.
+ __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ Ext(a2, a2, Map::kElementsKindShift, Map::kElementsKindBitCount);
+ DeoptimizeIf(ne, instr->environment(), a2,
+ Operand(boilerplate_elements_kind));
+ }
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
@@ -4890,7 +5031,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
Register strict = scratch0();
__ li(strict, Operand(Smi::FromInt(strict_mode_flag())));
__ Push(object, key, strict);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -4903,7 +5044,7 @@ void LCodeGen::DoIn(LIn* instr) {
Register obj = ToRegister(instr->object());
Register key = ToRegister(instr->key());
__ Push(key, obj);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
@@ -4952,7 +5093,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ LoadRoot(at, Heap::kStackLimitRootIndex);
__ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
EnsureSpaceForLazyDeopt();
diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h
index b5082561e0..32a696bc30 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.h
+++ b/deps/v8/src/mips/lithium-codegen-mips.h
@@ -43,22 +43,26 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
- LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
+ LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info,
+ Zone* zone)
: chunk_(chunk),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- deopt_jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, zone),
+ deopt_jump_table_(4, zone),
+ deoptimization_literals_(8, zone),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(zone),
+ deferred_(8, zone),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(zone),
+ zone_(zone),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -71,6 +75,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
// LOperand must be a register.
@@ -173,7 +178,7 @@ class LCodeGen BASE_EMBEDDED {
void Abort(const char* format, ...);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -212,12 +217,18 @@ class LCodeGen BASE_EMBEDDED {
int argc,
LInstruction* instr);
+ enum A1State {
+ A1_UNINITIALIZED,
+ A1_CONTAINS_TARGET
+ };
+
// Generate a direct call to a known function. Expects the function
// to be in a1.
void CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind);
+ CallKind call_kind,
+ A1State a1_state);
void LoadHeapObject(Register result, Handle<HeapObject> object);
@@ -323,7 +334,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits optimized code to deep-copy the contents of statically known
// object graphs (e.g. object literal boilerplate).
@@ -364,6 +376,8 @@ class LCodeGen BASE_EMBEDDED {
// itself is emitted at the end of the generated code.
SafepointTableBuilder safepoints_;
+ Zone* zone_;
+
// Compiler from a set of parallel moves to a sequential list of moves.
LGapResolver resolver_;
diff --git a/deps/v8/src/mips/lithium-gap-resolver-mips.cc b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
index 4a5fbe39c5..87efae5f4d 100644
--- a/deps/v8/src/mips/lithium-gap-resolver-mips.cc
+++ b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
@@ -35,7 +35,7 @@ namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner),
- moves_(32),
+ moves_(32, owner->zone()),
root_index_(0),
in_cycle_(false),
saved_destination_(NULL) {}
@@ -80,7 +80,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc
index 32c8875853..842001ddf2 100644
--- a/deps/v8/src/mips/lithium-mips.cc
+++ b/deps/v8/src/mips/lithium-mips.cc
@@ -108,22 +108,17 @@ void LInstruction::PrintTo(StringStream* stream) {
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintDataTo(StringStream* stream) {
+void LInstruction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- for (int i = 0; i < inputs_.length(); i++) {
+ for (int i = 0; i < InputCount(); i++) {
if (i > 0) stream->Add(" ");
- inputs_[i]->PrintTo(stream);
+ InputAt(i)->PrintTo(stream);
}
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintOutputOperandTo(StringStream* stream) {
- for (int i = 0; i < results_.length(); i++) {
- if (i > 0) stream->Add(" ");
- results_[i]->PrintTo(stream);
- }
+void LInstruction::PrintOutputOperandTo(StringStream* stream) {
+ if (HasResult()) result()->PrintTo(stream);
}
@@ -416,9 +411,9 @@ LChunk::LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) {
+ instructions_(32, graph->zone()),
+ pointer_maps_(8, graph->zone()),
+ inlined_closures_(1, graph->zone()) {
}
@@ -432,9 +427,9 @@ int LChunk::GetNextSpillIndex(bool is_double) {
LOperand* LChunk::GetNextSpillSlot(bool is_double) {
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
+ return LStackSlot::Create(index, zone());
}
}
@@ -479,23 +474,23 @@ void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
- instructions_.Add(gap);
+ instructions_.Add(gap, zone());
index = instructions_.length();
- instructions_.Add(instr);
+ instructions_.Add(instr, zone());
} else {
index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
+ instructions_.Add(instr, zone());
+ instructions_.Add(gap, zone());
}
if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
+ pointer_maps_.Add(instr->pointer_map(), zone());
instr->pointer_map()->set_lithium_position(index);
}
}
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
+ return LConstantOperand::Create(constant->id(), zone());
}
@@ -534,7 +529,8 @@ int LChunk::NearestGapPos(int index) const {
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
+ GetGapAt(index)->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(from, to, zone());
}
@@ -732,22 +728,6 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
}
-LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id) {
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = ast_id;
- return instr;
-}
-
-
-void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
-}
-
-
LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize) {
@@ -760,8 +740,10 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
- instr = SetInstructionPendingDeoptimizationEnvironment(
- instr, sim->ast_id());
+ ASSERT(instruction_pending_deoptimization_environment_ == NULL);
+ ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ instruction_pending_deoptimization_environment_ = instr;
+ pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
@@ -779,15 +761,9 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
}
-LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
- instr->MarkAsSaveDoubles();
- return instr;
-}
-
-
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -1010,7 +986,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1296,6 +1273,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
LOperand* value = UseRegisterAtStart(instr->value());
return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1320,6 +1298,12 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1613,7 +1597,8 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), a0);
- LDateField* result = new LDateField(object, FixedTemp(a1), instr->index());
+ LDateField* result =
+ new(zone()) LDateField(object, FixedTemp(a1), instr->index());
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1662,10 +1647,9 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegisterAtStart(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
- if (!needs_check) {
- res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
+ if (instr->value()->type().IsSmi()) {
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, false));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
@@ -1754,9 +1738,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -2041,8 +2025,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
@@ -2063,16 +2048,28 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new(zone()) LStoreNamedField(obj, val);
+ // We need a temporary register for write barrier of the map field.
+ LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
+
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2115,8 +2112,8 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new(zone()) LAllocateObject(
- TempRegister(), TempRegister());
+ LAllocateObject* result =
+ new(zone()) LAllocateObject(TempRegister(), TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2247,9 +2244,12 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
+ // Store the lazy deopt environment with the instruction if needed. Right
+ // now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
- set_deoptimization_environment(result->environment());
- ClearInstructionPendingDeoptimizationEnvironment();
+ SetDeferredLazyDeoptimizationEnvironment(result->environment());
+ instruction_pending_deoptimization_environment_ = NULL;
+ pending_deoptimization_ast_id_ = AstNode::kNoNumber;
return result;
}
@@ -2276,8 +2276,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
undefined,
instr->call_kind(),
instr->is_construct());
- if (instr->arguments() != NULL) {
- inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ if (instr->arguments_var() != NULL) {
+ inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
@@ -2286,10 +2286,21 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
+ LInstruction* pop = NULL;
+
+ HEnvironment* env = current_block_->last_environment();
+
+ if (instr->arguments_pushed()) {
+ int argument_count = env->arguments_environment()->parameter_count();
+ pop = new(zone()) LDrop(argument_count);
+ argument_count_ -= argument_count;
+ }
+
HEnvironment* outer = current_block_->last_environment()->
DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
- return NULL;
+
+ return pop;
}
diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h
index 5a7bf4d941..e21c921eec 100644
--- a/deps/v8/src/mips/lithium-mips.h
+++ b/deps/v8/src/mips/lithium-mips.h
@@ -71,7 +71,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -179,7 +179,8 @@ class LCodeGen;
V(CheckMapValue) \
V(LoadFieldByIndex) \
V(DateField) \
- V(WrapReceiver)
+ V(WrapReceiver) \
+ V(Drop)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
virtual Opcode opcode() const { return LInstruction::k##type; } \
@@ -202,15 +203,14 @@ class LInstruction: public ZoneObject {
LInstruction()
: environment_(NULL),
hydrogen_value_(NULL),
- is_call_(false),
- is_save_doubles_(false) { }
+ is_call_(false) { }
virtual ~LInstruction() { }
virtual void CompileToNative(LCodeGen* generator) = 0;
virtual const char* Mnemonic() const = 0;
virtual void PrintTo(StringStream* stream);
- virtual void PrintDataTo(StringStream* stream) = 0;
- virtual void PrintOutputOperandTo(StringStream* stream) = 0;
+ virtual void PrintDataTo(StringStream* stream);
+ virtual void PrintOutputOperandTo(StringStream* stream);
enum Opcode {
// Declare a unique enum value for each instruction.
@@ -245,22 +245,12 @@ class LInstruction: public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
- void set_deoptimization_environment(LEnvironment* env) {
- deoptimization_environment_.set(env);
- }
- LEnvironment* deoptimization_environment() const {
- return deoptimization_environment_.get();
- }
- bool HasDeoptimizationEnvironment() const {
- return deoptimization_environment_.is_set();
- }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { is_call_ = true; }
- void MarkAsSaveDoubles() { is_save_doubles_ = true; }
// Interface to the register allocator and iterators.
bool IsMarkedAsCall() const { return is_call_; }
- bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; }
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
@@ -281,7 +271,6 @@ class LInstruction: public ZoneObject {
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
- SetOncePointer<LEnvironment> deoptimization_environment_;
bool is_call_;
bool is_save_doubles_;
};
@@ -305,9 +294,6 @@ class LTemplateInstruction: public LInstruction {
int TempCount() { return T; }
LOperand* TempAt(int i) { return temps_[i]; }
- virtual void PrintDataTo(StringStream* stream);
- virtual void PrintOutputOperandTo(StringStream* stream);
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
@@ -346,8 +332,10 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos, Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -533,9 +521,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
class LArgumentsElements: public LTemplateInstruction<1, 0, 0> {
public:
- LArgumentsElements() { }
-
DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements")
+ DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements)
};
@@ -833,6 +820,15 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
+ LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
+ return lazy_deopt_env_;
+ }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) {
+ lazy_deopt_env_ = env;
+ }
+
+ private:
+ LEnvironment* lazy_deopt_env_;
};
@@ -1207,6 +1203,7 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1223,13 +1220,14 @@ class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key) {
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
+ LOperand* key) {
inputs_[0] = external_pointer;
inputs_[1] = key;
}
@@ -1243,6 +1241,7 @@ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1358,6 +1357,19 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
};
+class LDrop: public LTemplateInstruction<0, 0, 0> {
+ public:
+ explicit LDrop(int count) : count_(count) { }
+
+ int count() const { return count_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Drop, "drop")
+
+ private:
+ int count_;
+};
+
+
class LThisFunction: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
@@ -1440,6 +1452,7 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
+ Handle<JSFunction> known_function() { return hydrogen()->known_function(); }
};
@@ -1639,11 +1652,12 @@ class LSmiUntag: public LTemplateInstruction<1, 1, 0> {
};
-class LStoreNamedField: public LTemplateInstruction<0, 2, 0> {
+class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
public:
- LStoreNamedField(LOperand* obj, LOperand* val) {
+ LStoreNamedField(LOperand* obj, LOperand* val, LOperand* temp) {
inputs_[0] = obj;
inputs_[1] = val;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(StoreNamedField, "store-named-field")
@@ -1697,6 +1711,7 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1719,6 +1734,9 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
+
+ bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
};
@@ -1761,6 +1779,7 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1869,14 +1888,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -2216,9 +2235,11 @@ class LChunk: public ZoneObject {
}
void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
+ inlined_closures_.Add(closure, zone());
}
+ Zone* zone() const { return graph_->zone(); }
+
private:
int spill_slot_count_;
CompilationInfo* info_;
@@ -2235,7 +2256,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2349,11 +2370,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
- LInstruction* MarkAsSaveDoubles(LInstruction* instr);
-
- LInstruction* SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id);
- void ClearInstructionPendingDeoptimizationEnvironment();
LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
int* argument_index_accumulator);
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index e93a4175b3..51b3a3823f 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -3341,33 +3341,39 @@ void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
void MacroAssembler::CheckFastElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
- Branch(fail, hi, scratch, Operand(Map::kMaximumBitField2FastElementValue));
+ Branch(fail, hi, scratch,
+ Operand(Map::kMaximumBitField2FastHoleyElementValue));
}
void MacroAssembler::CheckFastObjectElements(Register map,
Register scratch,
Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Branch(fail, ls, scratch,
- Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
Branch(fail, hi, scratch,
- Operand(Map::kMaximumBitField2FastElementValue));
+ Operand(Map::kMaximumBitField2FastHoleyElementValue));
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset));
Branch(fail, hi, scratch,
- Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
}
@@ -3467,28 +3473,33 @@ void MacroAssembler::CompareMapAndBranch(Register obj,
Label* branch_to,
CompareMapMode mode) {
lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
+ CompareMapAndBranch(scratch, map, early_success, cond, branch_to, mode);
+}
+
+
+void MacroAssembler::CompareMapAndBranch(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode) {
Operand right = Operand(map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- Branch(early_success, eq, scratch, right);
- right = Operand(Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- Branch(early_success, eq, scratch, right);
- right = Operand(Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ Branch(early_success, eq, obj_map, right);
+ right = Operand(Handle<Map>(current_map));
+ }
}
}
- Branch(branch_to, cond, scratch, right);
+ Branch(branch_to, cond, obj_map, right);
}
@@ -4443,27 +4454,37 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
- Branch(no_map_match, ne, map_in_out, Operand(at));
+ lw(scratch,
+ MemOperand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+ size_t offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ Branch(no_map_match, ne, map_in_out, Operand(scratch));
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ lw(map_in_out, FieldMemOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
lw(map_out, FieldMemOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -5378,7 +5399,7 @@ CodePatcher::CodePatcher(byte* address, int instructions)
: address_(address),
instructions_(instructions),
size_(instructions * Assembler::kInstrSize),
- masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
+ masm_(NULL, address, size_ + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints.
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index f57418f386..bb3dc01e39 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -819,7 +819,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
void LoadGlobalFunction(int index, Register function);
@@ -961,9 +962,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Register scratch,
- Label* fail);
+ void CheckFastSmiElements(Register map,
+ Register scratch,
+ Label* fail);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
@@ -991,6 +992,15 @@ class MacroAssembler: public Assembler {
Label* branch_to,
CompareMapMode mode = REQUIRE_EXACT_MAP);
+ // As above, but the map of the object is already loaded into the register
+ // which is preserved by the code generated.
+ void CompareMapAndBranch(Register obj_map,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
// Check if the map of an object is equal to a specified map and branch to
// label if not. Skip the smi check if not required (object is known to be a
// heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.cc b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
index ae4da936ce..21d1ce11b5 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -43,44 +43,49 @@ namespace internal {
#ifndef V8_INTERPRETED_REGEXP
/*
* This assembler uses the following register assignment convention
+ * - t7 : Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
* - t1 : Pointer to current code object (Code*) including heap object tag.
* - t2 : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character offset!
* - t3 : Currently loaded character. Must be loaded using
* LoadCurrentCharacter before using any of the dispatch methods.
- * - t4 : points to tip of backtrack stack
+ * - t4 : Points to tip of backtrack stack
* - t5 : Unused.
* - t6 : End of input (points to byte after last character in input).
* - fp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - sp : points to tip of C stack.
+ * - sp : Points to tip of C stack.
*
* The remaining registers are free for computations.
* Each call to a public method should retain this convention.
*
* The stack will have the following structure:
*
- * - fp[56] direct_call (if 1, direct call from JavaScript code,
+ * - fp[64] Isolate* isolate (address of the current isolate)
+ * - fp[60] direct_call (if 1, direct call from JavaScript code,
* if 0, call through the runtime system).
- * - fp[52] stack_area_base (High end of the memory area to use as
+ * - fp[56] stack_area_base (High end of the memory area to use as
* backtracking stack).
+ * - fp[52] capture array size (may fit multiple sets of matches)
* - fp[48] int* capture_array (int[num_saved_registers_], for output).
* - fp[44] secondary link/return address used by native call.
* --- sp when called ---
- * - fp[40] return address (lr).
- * - fp[36] old frame pointer (r11).
+ * - fp[40] return address (lr).
+ * - fp[36] old frame pointer (r11).
* - fp[0..32] backup of registers s0..s7.
* --- frame pointer ----
- * - fp[-4] end of input (Address of end of string).
- * - fp[-8] start of input (Address of first character in string).
+ * - fp[-4] end of input (address of end of string).
+ * - fp[-8] start of input (address of first character in string).
* - fp[-12] start index (character index of start).
* - fp[-16] void* input_string (location of a handle containing the string).
- * - fp[-20] Offset of location before start of input (effectively character
+ * - fp[-20] success counter (only for global regexps to count matches).
+ * - fp[-24] Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a
* non-position.
- * - fp[-24] At start (if 1, we are starting at the start of the
+ * - fp[-28] At start (if 1, we are starting at the start of the
* string, otherwise 0)
- * - fp[-28] register 0 (Only positions must be stored in the first
+ * - fp[-32] register 0 (Only positions must be stored in the first
* - register 1 num_saved_registers_ registers)
* - ...
* - register num_registers-1
@@ -114,8 +119,10 @@ namespace internal {
RegExpMacroAssemblerMIPS::RegExpMacroAssemblerMIPS(
Mode mode,
- int registers_to_save)
- : masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(new MacroAssembler(Isolate::Current(), NULL, kRegExpCodeSize)),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -158,7 +165,7 @@ int RegExpMacroAssemblerMIPS::stack_limit_slack() {
void RegExpMacroAssemblerMIPS::AdvanceCurrentPosition(int by) {
if (by != 0) {
__ Addu(current_input_offset(),
- current_input_offset(), Operand(by * char_size()));
+ current_input_offset(), Operand(by * char_size()));
}
}
@@ -201,8 +208,8 @@ void RegExpMacroAssemblerMIPS::CheckCharacterGT(uc16 limit, Label* on_greater) {
void RegExpMacroAssemblerMIPS::CheckAtStart(Label* on_at_start) {
Label not_at_start;
// Did we start the match at the start of the string at all?
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- BranchOrBacktrack(&not_at_start, eq, a0, Operand(zero_reg));
+ __ lw(a0, MemOperand(frame_pointer(), kStartIndex));
+ BranchOrBacktrack(&not_at_start, ne, a0, Operand(zero_reg));
// If we did, are we still at the start of the input?
__ lw(a1, MemOperand(frame_pointer(), kInputStart));
@@ -214,8 +221,8 @@ void RegExpMacroAssemblerMIPS::CheckAtStart(Label* on_at_start) {
void RegExpMacroAssemblerMIPS::CheckNotAtStart(Label* on_not_at_start) {
// Did we start the match at the start of the string at all?
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- BranchOrBacktrack(on_not_at_start, eq, a0, Operand(zero_reg));
+ __ lw(a0, MemOperand(frame_pointer(), kStartIndex));
+ BranchOrBacktrack(on_not_at_start, ne, a0, Operand(zero_reg));
// If we did, are we still at the start of the input?
__ lw(a1, MemOperand(frame_pointer(), kInputStart));
__ Addu(a0, end_of_input_address(), Operand(current_input_offset()));
@@ -229,9 +236,9 @@ void RegExpMacroAssemblerMIPS::CheckCharacterLT(uc16 limit, Label* on_less) {
void RegExpMacroAssemblerMIPS::CheckCharacters(Vector<const uc16> str,
- int cp_offset,
- Label* on_failure,
- bool check_end_of_string) {
+ int cp_offset,
+ Label* on_failure,
+ bool check_end_of_string) {
if (on_failure == NULL) {
// Instead of inlining a backtrack for each test, (re)use the global
// backtrack target.
@@ -444,32 +451,27 @@ void RegExpMacroAssemblerMIPS::CheckNotBackReference(
}
-void RegExpMacroAssemblerMIPS::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- UNIMPLEMENTED_MIPS();
-}
-
-
void RegExpMacroAssemblerMIPS::CheckNotCharacter(uint32_t c,
- Label* on_not_equal) {
+ Label* on_not_equal) {
BranchOrBacktrack(on_not_equal, ne, current_character(), Operand(c));
}
void RegExpMacroAssemblerMIPS::CheckCharacterAfterAnd(uint32_t c,
- uint32_t mask,
- Label* on_equal) {
+ uint32_t mask,
+ Label* on_equal) {
__ And(a0, current_character(), Operand(mask));
- BranchOrBacktrack(on_equal, eq, a0, Operand(c));
+ Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c);
+ BranchOrBacktrack(on_equal, eq, a0, rhs);
}
void RegExpMacroAssemblerMIPS::CheckNotCharacterAfterAnd(uint32_t c,
- uint32_t mask,
- Label* on_not_equal) {
+ uint32_t mask,
+ Label* on_not_equal) {
__ And(a0, current_character(), Operand(mask));
- BranchOrBacktrack(on_not_equal, ne, a0, Operand(c));
+ Operand rhs = (c == 0) ? Operand(zero_reg) : Operand(c);
+ BranchOrBacktrack(on_not_equal, ne, a0, rhs);
}
@@ -478,12 +480,51 @@ void RegExpMacroAssemblerMIPS::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(minus < String::kMaxUtf16CodeUnit);
+ __ Subu(a0, current_character(), Operand(minus));
+ __ And(a0, a0, Operand(mask));
+ BranchOrBacktrack(on_not_equal, ne, a0, Operand(c));
+}
+
+
+void RegExpMacroAssemblerMIPS::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ Subu(a0, current_character(), Operand(from));
+ // Unsigned lower-or-same condition.
+ BranchOrBacktrack(on_in_range, ls, a0, Operand(to - from));
+}
+
+
+void RegExpMacroAssemblerMIPS::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ Subu(a0, current_character(), Operand(from));
+ // Unsigned higher condition.
+ BranchOrBacktrack(on_not_in_range, hi, a0, Operand(to - from));
+}
+
+
+void RegExpMacroAssemblerMIPS::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ li(a0, Operand(table));
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ And(a1, current_character(), Operand(kTableSize - 1));
+ __ Addu(a0, a0, a1);
+ } else {
+ __ Addu(a0, a0, current_character());
+ }
+
+ __ lbu(a0, FieldMemOperand(a0, ByteArray::kHeaderSize));
+ BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg));
}
bool RegExpMacroAssemblerMIPS::CheckSpecialCharacterClass(uc16 type,
- Label* on_no_match) {
+ Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
// (c - min) <= (max - min) check.
switch (type) {
@@ -599,6 +640,7 @@ void RegExpMacroAssemblerMIPS::Fail() {
Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
+ Label return_v0;
if (masm_->has_exception()) {
// If the code gets corrupted due to long regular expressions and lack of
// space on trampolines, an internal exception flag is set. If this case
@@ -628,8 +670,9 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Set frame pointer in space for it if this is not a direct call
// from generated code.
__ Addu(frame_pointer(), sp, Operand(4 * kPointerSize));
+ __ mov(a0, zero_reg);
+ __ push(a0); // Make room for success counter and initialize it to 0.
__ push(a0); // Make room for "position - 1" constant (value irrelevant).
- __ push(a0); // Make room for "at start" constant (value irrelevant).
// Check if we have space on the stack for registers.
Label stack_limit_hit;
@@ -648,12 +691,12 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ li(v0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_v0);
__ bind(&stack_limit_hit);
CallCheckStackGuardState(a0);
// If returned value is non-zero, we exit with the returned value as result.
- __ Branch(&exit_label_, ne, v0, Operand(zero_reg));
+ __ Branch(&return_v0, ne, v0, Operand(zero_reg));
__ bind(&stack_ok);
// Allocate space on stack for registers.
@@ -674,39 +717,44 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// position registers.
__ sw(a0, MemOperand(frame_pointer(), kInputStartMinusOne));
- // Determine whether the start index is zero, that is at the start of the
- // string, and store that value in a local variable.
- __ mov(t5, a1);
- __ li(a1, Operand(1));
- __ Movn(a1, zero_reg, t5);
- __ sw(a1, MemOperand(frame_pointer(), kAtStart));
+ // Initialize code pointer register
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ Branch(&load_char_start_regexp, ne, a1, Operand(zero_reg));
+ __ li(current_character(), Operand('\n'));
+ __ jmp(&start_regexp);
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+
+ // Initialize on-stack registers.
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1.
-
- // Address of register 0.
- __ Addu(a1, frame_pointer(), Operand(kRegisterZero));
- __ li(a2, Operand(num_saved_registers_));
- Label init_loop;
- __ bind(&init_loop);
- __ sw(a0, MemOperand(a1));
- __ Addu(a1, a1, Operand(-kPointerSize));
- __ Subu(a2, a2, Operand(1));
- __ Branch(&init_loop, ne, a2, Operand(zero_reg));
+ if (num_saved_registers_ > 8) {
+ // Address of register 0.
+ __ Addu(a1, frame_pointer(), Operand(kRegisterZero));
+ __ li(a2, Operand(num_saved_registers_));
+ Label init_loop;
+ __ bind(&init_loop);
+ __ sw(a0, MemOperand(a1));
+ __ Addu(a1, a1, Operand(-kPointerSize));
+ __ Subu(a2, a2, Operand(1));
+ __ Branch(&init_loop, ne, a2, Operand(zero_reg));
+ } else {
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ sw(a0, register_location(i));
+ }
+ }
}
// Initialize backtrack stack pointer.
__ lw(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackHighEnd));
- // Initialize code pointer register
- __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
- // Load previous char as initial value of current character register.
- Label at_start;
- __ lw(a0, MemOperand(frame_pointer(), kAtStart));
- __ Branch(&at_start, ne, a0, Operand(zero_reg));
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
- __ li(current_character(), Operand('\n'));
+
__ jmp(&start_label_);
@@ -735,6 +783,10 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
for (int i = 0; i < num_saved_registers_; i += 2) {
__ lw(a2, register_location(i));
__ lw(a3, register_location(i + 1));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in a4 for the zero-length check later.
+ __ mov(t7, a2);
+ }
if (mode_ == UC16) {
__ sra(a2, a2, 1);
__ Addu(a2, a2, a1);
@@ -750,10 +802,57 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ Addu(a0, a0, kPointerSize);
}
}
- __ li(v0, Operand(SUCCESS));
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ __ lw(a0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ __ lw(a1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ __ lw(a2, MemOperand(frame_pointer(), kRegisterOutput));
+ // Increment success counter.
+ __ Addu(a0, a0, 1);
+ __ sw(a0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ Subu(a1, a1, num_saved_registers_);
+ // Check whether we have enough room for another set of capture results.
+ __ mov(v0, a0);
+ __ Branch(&return_v0, lt, a1, Operand(num_saved_registers_));
+
+ __ sw(a1, MemOperand(frame_pointer(), kNumOutputRegisters));
+ // Advance the location for output.
+ __ Addu(a2, a2, num_saved_registers_ * kPointerSize);
+ __ sw(a2, MemOperand(frame_pointer(), kRegisterOutput));
+
+ // Prepare a0 to initialize registers with its value in the next run.
+ __ lw(a0, MemOperand(frame_pointer(), kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // t7: capture start index
+ // Not a zero-length match, restart.
+ __ Branch(
+ &load_char_start_regexp, ne, current_input_offset(), Operand(t7));
+ // Offset from the end is zero if we already reached the end.
+ __ Branch(&exit_label_, eq, current_input_offset(),
+ Operand(zero_reg));
+ // Advance current position after a zero-length match.
+ __ Addu(current_input_offset(),
+ current_input_offset(),
+ Operand((mode_ == UC16) ? 2 : 1));
+ }
+
+ __ Branch(&load_char_start_regexp);
+ } else {
+ __ li(v0, Operand(SUCCESS));
+ }
}
// Exit and return v0.
__ bind(&exit_label_);
+ if (global()) {
+ __ lw(v0, MemOperand(frame_pointer(), kSuccessfulCaptures));
+ }
+
+ __ bind(&return_v0);
// Skip sp past regexp registers and local variables..
__ mov(sp, frame_pointer());
// Restore registers s0..s7 and return (restoring ra to pc).
@@ -779,7 +878,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ MultiPop(regexp_registers_to_retain);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ Branch(&exit_label_, ne, v0, Operand(zero_reg));
+ __ Branch(&return_v0, ne, v0, Operand(zero_reg));
// String might have moved: Reload end of string from frame.
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
@@ -823,7 +922,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ li(v0, Operand(EXCEPTION));
- __ jmp(&exit_label_);
+ __ jmp(&return_v0);
}
}
@@ -848,23 +947,23 @@ void RegExpMacroAssemblerMIPS::GoTo(Label* to) {
void RegExpMacroAssemblerMIPS::IfRegisterGE(int reg,
- int comparand,
- Label* if_ge) {
+ int comparand,
+ Label* if_ge) {
__ lw(a0, register_location(reg));
BranchOrBacktrack(if_ge, ge, a0, Operand(comparand));
}
void RegExpMacroAssemblerMIPS::IfRegisterLT(int reg,
- int comparand,
- Label* if_lt) {
+ int comparand,
+ Label* if_lt) {
__ lw(a0, register_location(reg));
BranchOrBacktrack(if_lt, lt, a0, Operand(comparand));
}
void RegExpMacroAssemblerMIPS::IfRegisterEqPos(int reg,
- Label* if_eq) {
+ Label* if_eq) {
__ lw(a0, register_location(reg));
BranchOrBacktrack(if_eq, eq, a0, Operand(current_input_offset()));
}
@@ -877,9 +976,9 @@ RegExpMacroAssembler::IrregexpImplementation
void RegExpMacroAssemblerMIPS::LoadCurrentCharacter(int cp_offset,
- Label* on_end_of_input,
- bool check_bounds,
- int characters) {
+ Label* on_end_of_input,
+ bool check_bounds,
+ int characters) {
ASSERT(cp_offset >= -1); // ^ and \b can look behind one character.
ASSERT(cp_offset < (1<<30)); // Be sane! (And ensure negation works).
if (check_bounds) {
@@ -930,7 +1029,7 @@ void RegExpMacroAssemblerMIPS::PushCurrentPosition() {
void RegExpMacroAssemblerMIPS::PushRegister(int register_index,
- StackCheckFlag check_stack_limit) {
+ StackCheckFlag check_stack_limit) {
__ lw(a0, register_location(register_index));
Push(a0);
if (check_stack_limit) CheckStackLimit();
@@ -971,13 +1070,14 @@ void RegExpMacroAssemblerMIPS::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerMIPS::Succeed() {
+bool RegExpMacroAssemblerMIPS::Succeed() {
__ jmp(&success_label_);
+ return global();
}
void RegExpMacroAssemblerMIPS::WriteCurrentPositionToRegister(int reg,
- int cp_offset) {
+ int cp_offset) {
if (cp_offset == 0) {
__ sw(current_input_offset(), register_location(reg));
} else {
@@ -1134,7 +1234,7 @@ MemOperand RegExpMacroAssemblerMIPS::register_location(int register_index) {
void RegExpMacroAssemblerMIPS::CheckPosition(int cp_offset,
- Label* on_outside_input) {
+ Label* on_outside_input) {
BranchOrBacktrack(on_outside_input,
ge,
current_input_offset(),
@@ -1162,8 +1262,10 @@ void RegExpMacroAssemblerMIPS::BranchOrBacktrack(Label* to,
}
-void RegExpMacroAssemblerMIPS::SafeCall(Label* to, Condition cond, Register rs,
- const Operand& rt) {
+void RegExpMacroAssemblerMIPS::SafeCall(Label* to,
+ Condition cond,
+ Register rs,
+ const Operand& rt) {
__ BranchAndLink(to, cond, rs, rt);
}
@@ -1234,11 +1336,12 @@ void RegExpMacroAssemblerMIPS::CallCFunctionUsingStub(
void RegExpMacroAssemblerMIPS::LoadCurrentCharacterUnchecked(int cp_offset,
- int characters) {
+ int characters) {
Register offset = current_input_offset();
if (cp_offset != 0) {
- __ Addu(a0, current_input_offset(), Operand(cp_offset * char_size()));
- offset = a0;
+ // t7 is not being used to store the capture start index at this point.
+ __ Addu(t7, current_input_offset(), Operand(cp_offset * char_size()));
+ offset = t7;
}
// We assume that we cannot do unaligned loads on MIPS, so this function
// must only be used to load a single character at a time.
diff --git a/deps/v8/src/mips/regexp-macro-assembler-mips.h b/deps/v8/src/mips/regexp-macro-assembler-mips.h
index d42d4cf67e..d3fff0db2b 100644
--- a/deps/v8/src/mips/regexp-macro-assembler-mips.h
+++ b/deps/v8/src/mips/regexp-macro-assembler-mips.h
@@ -47,7 +47,7 @@ class RegExpMacroAssemblerMIPS: public RegExpMacroAssembler {
#else // V8_INTERPRETED_REGEXP
class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerMIPS(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerMIPS(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerMIPS();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -72,7 +72,6 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -81,6 +80,14 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
@@ -107,7 +114,7 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -133,7 +140,8 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
static const int kStackFrameHeader = kReturnAddress + kPointerSize;
// Stack parameters placed by caller.
static const int kRegisterOutput = kStackFrameHeader + 20;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -145,10 +153,10 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
static const int kInputString = kStartIndex - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne = kInputString - kPointerSize;
- static const int kAtStart = kInputStartMinusOne - kPointerSize;
+ static const int kSuccessfulCaptures = kInputString - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
- static const int kRegisterZero = kAtStart - kPointerSize;
+ static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
// Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024;
diff --git a/deps/v8/src/mips/simulator-mips.cc b/deps/v8/src/mips/simulator-mips.cc
index f31ce7ea48..66d0da71fa 100644
--- a/deps/v8/src/mips/simulator-mips.cc
+++ b/deps/v8/src/mips/simulator-mips.cc
@@ -1502,10 +1502,15 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
break;
}
}
- double result = target(arg0, arg1, arg2, arg3);
if (redirection->type() != ExternalReference::BUILTIN_COMPARE_CALL) {
- SetFpResult(result);
+ SimulatorRuntimeFPCall target =
+ reinterpret_cast<SimulatorRuntimeFPCall>(external);
+ double result = target(arg0, arg1, arg2, arg3);
+ SetFpResult(result);
} else {
+ SimulatorRuntimeCall target =
+ reinterpret_cast<SimulatorRuntimeCall>(external);
+ uint64_t result = target(arg0, arg1, arg2, arg3, arg4, arg5);
int32_t gpreg_pair[2];
memcpy(&gpreg_pair[0], &result, 2 * sizeof(int32_t));
set_register(v0, gpreg_pair[0]);
diff --git a/deps/v8/src/mips/simulator-mips.h b/deps/v8/src/mips/simulator-mips.h
index 1e72939876..776badc29b 100644
--- a/deps/v8/src/mips/simulator-mips.h
+++ b/deps/v8/src/mips/simulator-mips.h
@@ -50,16 +50,16 @@ namespace internal {
entry(p0, p1, p2, p3, p4)
typedef int (*mips_regexp_matcher)(String*, int, const byte*, const byte*,
- void*, int*, Address, int, Isolate*);
+ void*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address
// should act as a function matching the type arm_regexp_matcher.
// The fifth argument is a dummy that reserves the space used for
// the return address added by the ExitFrame in native calls.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
(FUNCTION_CAST<mips_regexp_matcher>(entry)( \
- p0, p1, p2, p3, NULL, p4, p5, p6, p7))
+ p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address)
@@ -403,9 +403,9 @@ class Simulator {
reinterpret_cast<Object*>(Simulator::current(Isolate::Current())->Call( \
FUNCTION_ADDR(entry), 5, p0, p1, p2, p3, p4))
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
Simulator::current(Isolate::Current())->Call( \
- entry, 9, p0, p1, p2, p3, NULL, p4, p5, p6, p7)
+ entry, 10, p0, p1, p2, p3, NULL, p4, p5, p6, p7, p8)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == NULL ? \
diff --git a/deps/v8/src/mips/stub-cache-mips.cc b/deps/v8/src/mips/stub-cache-mips.cc
index 54f55b3ceb..967ce4a605 100644
--- a/deps/v8/src/mips/stub-cache-mips.cc
+++ b/deps/v8/src/mips/stub-cache-mips.cc
@@ -422,21 +422,59 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
// a0 : value.
Label exit;
+
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
- __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ __ CheckMap(receiver_reg, scratch1, Handle<Map>(object->map()), miss_label,
DO_SMI_CHECK, mode);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -458,10 +496,20 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ li(t0, Operand(transition));
- __ sw(t0, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+ // Update the map of the object.
+ __ li(scratch1, Operand(transition));
+ __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -475,7 +523,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ sw(a0, FieldMemOperand(receiver_reg, offset));
// Skip updating write barrier if storing a smi.
- __ JumpIfSmi(a0, &exit, scratch);
+ __ JumpIfSmi(a0, &exit, scratch1);
// Update the write barrier for the array address.
// Pass the now unused name_reg as a scratch register.
@@ -483,15 +531,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
__ RecordWriteField(receiver_reg,
offset,
name_reg,
- scratch,
+ scratch1,
kRAHasNotBeenSaved,
kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array.
- __ lw(scratch, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ sw(a0, FieldMemOperand(scratch, offset));
+ __ lw(scratch1,
+ FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ sw(a0, FieldMemOperand(scratch1, offset));
// Skip updating write barrier if storing a smi.
__ JumpIfSmi(a0, &exit);
@@ -499,7 +548,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Update the write barrier for the array address.
// Ok to clobber receiver_reg and name_reg, since we return.
__ mov(name_reg, a0);
- __ RecordWriteField(scratch,
+ __ RecordWriteField(scratch1,
offset,
name_reg,
receiver_reg,
@@ -565,6 +614,8 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ Push(scratch, receiver, holder);
__ lw(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
__ push(scratch);
+ __ li(scratch, Operand(ExternalReference::isolate_address()));
+ __ push(scratch);
}
@@ -579,7 +630,7 @@ static void CompileCallLoadPropertyWithInterceptor(
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
masm->isolate());
- __ PrepareCEntryArgs(5);
+ __ PrepareCEntryArgs(6);
__ PrepareCEntryFunction(ref);
CEntryStub stub(1);
@@ -587,10 +638,10 @@ static void CompileCallLoadPropertyWithInterceptor(
}
-static const int kFastApiCallArguments = 3;
+static const int kFastApiCallArguments = 4;
-// Reserves space for the extra arguments to FastHandleApiCall in the
+// Reserves space for the extra arguments to API function in the
// caller's frame.
//
// These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
@@ -616,7 +667,8 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
// -- sp[0] : holder (set by CheckPrototypes)
// -- sp[4] : callee JS function
// -- sp[8] : call data
- // -- sp[12] : last JS argument
+ // -- sp[12] : isolate
+ // -- sp[16] : last JS argument
// -- ...
// -- sp[(argc + 3) * 4] : first JS argument
// -- sp[(argc + 4) * 4] : receiver
@@ -626,7 +678,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
__ LoadHeapObject(t1, function);
__ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
- // Pass the additional arguments FastHandleApiCall expects.
+ // Pass the additional arguments.
Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
Handle<Object> call_data(api_call_info->data());
if (masm->isolate()->heap()->InNewSpace(*call_data)) {
@@ -636,14 +688,17 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
__ li(t2, call_data);
}
- // Store JS function and call data.
+ __ li(t3, Operand(ExternalReference::isolate_address()));
+ // Store JS function, call data and isolate.
__ sw(t1, MemOperand(sp, 1 * kPointerSize));
__ sw(t2, MemOperand(sp, 2 * kPointerSize));
+ __ sw(t3, MemOperand(sp, 3 * kPointerSize));
- // a2 points to call data as expected by Arguments
- // (refer to layout above).
- __ Addu(a2, sp, Operand(2 * kPointerSize));
+ // Prepare arguments.
+ __ Addu(a2, sp, Operand(3 * kPointerSize));
+ // Allocate the v8::Arguments structure in the arguments' space since
+ // it's not controlled by GC.
const int kApiStackSpace = 4;
FrameScope frame_scope(masm, StackFrame::MANUAL);
@@ -658,9 +713,9 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
// Arguments is built at sp + 1 (sp is a reserved spot for ra).
__ Addu(a1, sp, kPointerSize);
- // v8::Arguments::implicit_args = data
+ // v8::Arguments::implicit_args_
__ sw(a2, MemOperand(a1, 0 * kPointerSize));
- // v8::Arguments::values = last argument
+ // v8::Arguments::values_
__ Addu(t0, a2, Operand(argc * kPointerSize));
__ sw(t0, MemOperand(a1, 1 * kPointerSize));
// v8::Arguments::length_ = argc
@@ -838,7 +893,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
masm->isolate()),
- 5);
+ 6);
// Restore the name_ register.
__ pop(name_);
// Leave the internal frame.
@@ -1206,7 +1261,13 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
} else {
__ li(scratch3, Handle<Object>(callback->data()));
}
- __ Push(reg, scratch3, name_reg);
+ __ Subu(sp, sp, 4 * kPointerSize);
+ __ sw(reg, MemOperand(sp, 3 * kPointerSize));
+ __ sw(scratch3, MemOperand(sp, 2 * kPointerSize));
+ __ li(scratch3, Operand(ExternalReference::isolate_address()));
+ __ sw(scratch3, MemOperand(sp, 1 * kPointerSize));
+ __ sw(name_reg, MemOperand(sp, 0 * kPointerSize));
+
__ mov(a2, scratch2); // Saved in case scratch2 == a1.
__ mov(a1, sp); // a1 (first argument - see note below) = Handle<String>
@@ -1225,7 +1286,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
// a2 (second argument - see note above) = AccessorInfo&
__ Addu(a2, sp, kPointerSize);
- const int kStackUnwindSpace = 4;
+ const int kStackUnwindSpace = 5;
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
ExternalReference ref =
@@ -1261,8 +1322,9 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1275,12 +1337,19 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
name, miss);
ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
+ // Preserve the receiver register explicitly whenever it is different from
+ // the holder and it is needed should the interceptor return without any
+ // result. The CALLBACKS case needs the receiver to be passed into C++ code,
+ // the FIELD case might cause a miss during the prototype check.
+ bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
+ bool must_preserve_receiver_reg = !receiver.is(holder_reg) &&
+ (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
+
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
{
FrameScope frame_scope(masm(), StackFrame::INTERNAL);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
+ if (must_preserve_receiver_reg) {
__ Push(receiver, holder_reg, name_reg);
} else {
__ Push(holder_reg, name_reg);
@@ -1304,14 +1373,14 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
__ bind(&interceptor_failed);
__ pop(name_reg);
__ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ if (must_preserve_receiver_reg) {
__ pop(receiver);
}
// Leave the internal frame.
}
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into |holder| register.
- if (*interceptor_holder != lookup->holder()) {
+ if (must_perfrom_prototype_check) {
holder_reg = CheckPrototypes(interceptor_holder,
holder_reg,
Handle<JSObject>(lookup->holder()),
@@ -1341,24 +1410,17 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// Important invariant in CALLBACKS case: the code above must be
// structured to never clobber |receiver| register.
__ li(scratch2, callback);
- // holder_reg is either receiver or scratch1.
- if (!receiver.is(holder_reg)) {
- ASSERT(scratch1.is(holder_reg));
- __ Push(receiver, holder_reg);
- __ lw(scratch3,
- FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
- __ Push(scratch3, scratch2, name_reg);
- } else {
- __ push(receiver);
- __ lw(scratch3,
- FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
- __ Push(holder_reg, scratch3, scratch2, name_reg);
- }
+
+ __ Push(receiver, holder_reg);
+ __ lw(scratch3,
+ FieldMemOperand(scratch2, AccessorInfo::kDataOffset));
+ __ li(scratch1, Operand(ExternalReference::isolate_address()));
+ __ Push(scratch3, scratch1, scratch2, name_reg);
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
masm()->isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
@@ -1371,7 +1433,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
}
@@ -1573,16 +1635,29 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
+ __ CheckFastSmiElements(a3, t3, &call_builtin);
// edx: receiver
// r3: map
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
a3,
t3,
+ &try_holey_map);
+ __ mov(a2, receiver);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ a3,
+ t3,
&call_builtin);
__ mov(a2, receiver);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(a3, a3, &call_builtin);
@@ -1738,14 +1813,14 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
// expensive shift first, and use an offset later on.
__ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(elements, elements, t1);
- __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Branch(&call_builtin, eq, v0, Operand(t2));
// Set the array's length.
__ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
- __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Drop(argc + 1);
__ Ret();
@@ -2535,7 +2610,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Name register might be clobbered.
- GenerateStoreField(masm(), object, index, transition, a1, a2, a3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ a1, a2, a3, t0,
+ &miss);
__ bind(&miss);
__ li(a2, Operand(Handle<String>(name))); // Restore name.
Handle<Code> ic = masm()->isolate()->builtins()->Builtins::StoreIC_Miss();
@@ -2591,6 +2672,52 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
}
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ Handle<String> name) {
+ // ----------- S t a t e -------------
+ // -- a0 : value
+ // -- a1 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(a0);
+
+ // Call the JavaScript getter with the receiver and the value on the stack.
+ __ push(a1);
+ __ push(a0);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(v0);
+
+ // Restore context register.
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+
+ __ bind(&miss);
+ Handle<Code> ic = masm()->isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> receiver,
Handle<String> name) {
@@ -2758,6 +2885,44 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
}
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- a0 : receiver
+ // -- a2 : name
+ // -- ra : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(a0, &miss);
+ CheckPrototypes(receiver, a0, holder, a3, t0, a1, name, &miss);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(a0);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // Restore context register.
+ __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ Ret();
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
@@ -3074,7 +3239,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
// a3 is used as scratch register. a1 and a2 keep their values if a jump to
// the miss label is generated.
- GenerateStoreField(masm(), object, index, transition, a2, a1, a3, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ a2, a1, a3, t0,
+ &miss);
__ bind(&miss);
__ DecrementCounter(counters->keyed_store_field(), 1, a3, t0);
@@ -3360,9 +3531,12 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3372,6 +3546,45 @@ static bool IsElementTypeSigned(ElementsKind elements_kind) {
}
+static void GenerateSmiKeyCheck(MacroAssembler* masm,
+ Register key,
+ Register scratch0,
+ Register scratch1,
+ FPURegister double_scratch0,
+ Label* fail) {
+ if (CpuFeatures::IsSupported(FPU)) {
+ CpuFeatures::Scope scope(FPU);
+ Label key_ok;
+ // Check for smi or a smi inside a heap number. We convert the heap
+ // number and check if the conversion is exact and fits into the smi
+ // range.
+ __ JumpIfSmi(key, &key_ok);
+ __ CheckMap(key,
+ scratch0,
+ Heap::kHeapNumberMapRootIndex,
+ fail,
+ DONT_DO_SMI_CHECK);
+ __ ldc1(double_scratch0, FieldMemOperand(key, HeapNumber::kValueOffset));
+ __ EmitFPUTruncate(kRoundToZero,
+ double_scratch0,
+ double_scratch0,
+ scratch0,
+ scratch1,
+ kCheckForInexactConversion);
+
+ __ Branch(fail, ne, scratch1, Operand(zero_reg));
+
+ __ mfc1(scratch0, double_scratch0);
+ __ SmiTagCheckOverflow(key, scratch0, scratch1);
+ __ BranchOnOverflow(fail, scratch1);
+ __ bind(&key_ok);
+ } else {
+ // Check that the key is a smi.
+ __ JumpIfNotSmi(key, fail);
+ }
+}
+
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
@@ -3388,8 +3601,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
__ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
// a3: elements array
@@ -3457,8 +3670,11 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3497,7 +3713,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
CpuFeatures::Scope scope(FPU);
__ mtc1(value, f0);
__ cvt_d_w(f0, f0);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
Register dst1 = t2;
@@ -3545,7 +3761,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ Cvt_d_uw(f0, value, f22);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
@@ -3599,7 +3815,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ AllocateHeapNumber(v0, t3, t5, t6, &slow);
// The float (single) value is already in fpu reg f0 (if we use float).
__ cvt_d_s(f0, f0);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
// Allocate a HeapNumber for the result. Don't use a0 and a1 as
@@ -3625,7 +3841,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ li(t0, 0x7ff);
__ Xor(t1, t5, Operand(0xFF));
__ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
- __ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
+ __ Branch(&exponent_rebiased, eq, t1, Operand(zero_reg));
// Rebias exponent.
__ Addu(t5,
@@ -3727,8 +3943,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key, t0, t1, f2, &miss_force_generic);
__ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
@@ -3818,8 +4034,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
}
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3883,8 +4102,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3919,7 +4141,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ xor_(t1, t6, t5);
__ li(t2, kBinary32ExponentMask);
__ Movz(t6, t2, t1); // Only if t6 is equal to t5.
- __ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
+ __ Branch(&nan_or_infinity_or_zero, eq, t1, Operand(zero_reg));
// Rebias exponent.
__ srl(t6, t6, HeapNumber::kExponentShift);
@@ -3950,7 +4172,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ bind(&done);
__ sll(t9, key, 1);
- __ addu(t9, a2, t9);
+ __ addu(t9, a3, t9);
__ sw(t3, MemOperand(t9, 0));
// Entry registers are intact, a0 holds the value which is the return
@@ -3968,7 +4190,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ or_(t3, t6, t4);
__ Branch(&done);
} else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
- __ sll(t8, t0, 3);
+ __ sll(t8, key, 2);
__ addu(t8, a3, t8);
// t8: effective address of destination element.
__ sw(t4, MemOperand(t8, 0));
@@ -4055,8 +4277,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -4107,9 +4332,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT);
- // The delay slot can be safely used here, a1 is an object pointer.
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, a0, t0, t1, f2, &miss_force_generic);
// Get the elements array.
__ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
@@ -4159,8 +4383,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
// Get the elements array.
__ lw(elements_reg,
@@ -4233,10 +4457,10 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(value_reg, &transition_elements_kind);
}
@@ -4264,7 +4488,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ Addu(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4273,7 +4497,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ Addu(scratch, scratch, scratch2);
__ sw(value_reg, MemOperand(scratch));
} else {
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ Addu(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4282,7 +4506,6 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ Addu(scratch, scratch, scratch2);
__ sw(value_reg, MemOperand(scratch));
__ mov(receiver_reg, value_reg);
- ASSERT(elements_kind == FAST_ELEMENTS);
__ RecordWrite(elements_reg, // Object.
scratch, // Address.
receiver_reg, // Value.
@@ -4400,7 +4623,9 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- __ JumpIfNotSmi(key_reg, &miss_force_generic);
+
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, key_reg, t0, t1, f2, &miss_force_generic);
__ lw(elements_reg,
FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
diff --git a/deps/v8/src/mirror-debugger.js b/deps/v8/src/mirror-debugger.js
index c43dd228ec..c7f0dccb7b 100644
--- a/deps/v8/src/mirror-debugger.js
+++ b/deps/v8/src/mirror-debugger.js
@@ -596,6 +596,23 @@ ObjectMirror.prototype.protoObject = function() {
};
+/**
+ * Return the primitive value if this is object of Boolean, Number or String
+ * type (but not Date). Otherwise return undefined.
+ */
+ObjectMirror.prototype.primitiveValue = function() {
+ if (!IS_STRING_WRAPPER(this.value_) && !IS_NUMBER_WRAPPER(this.value_) &&
+ !IS_BOOLEAN_WRAPPER(this.value_)) {
+ return void 0;
+ }
+ var primitiveValue = %_ValueOf(this.value_);
+ if (IS_UNDEFINED(primitiveValue)) {
+ return void 0;
+ }
+ return MakeMirror(primitiveValue);
+};
+
+
ObjectMirror.prototype.hasNamedInterceptor = function() {
// Get information on interceptors for this object.
var x = %GetInterceptorInfo(this.value_);
@@ -896,6 +913,22 @@ FunctionMirror.prototype.constructedBy = function(opt_max_instances) {
};
+FunctionMirror.prototype.scopeCount = function() {
+ if (this.resolved()) {
+ return %GetFunctionScopeCount(this.value());
+ } else {
+ return 0;
+ }
+};
+
+
+FunctionMirror.prototype.scope = function(index) {
+ if (this.resolved()) {
+ return new ScopeMirror(void 0, this, index);
+ }
+};
+
+
FunctionMirror.prototype.toText = function() {
return this.source();
};
@@ -1572,7 +1605,7 @@ FrameMirror.prototype.scopeCount = function() {
FrameMirror.prototype.scope = function(index) {
- return new ScopeMirror(this, index);
+ return new ScopeMirror(this, void 0, index);
};
@@ -1735,39 +1768,54 @@ FrameMirror.prototype.toText = function(opt_locals) {
var kScopeDetailsTypeIndex = 0;
var kScopeDetailsObjectIndex = 1;
-function ScopeDetails(frame, index) {
- this.break_id_ = frame.break_id_;
- this.details_ = %GetScopeDetails(frame.break_id_,
- frame.details_.frameId(),
- frame.details_.inlinedFrameIndex(),
- index);
+function ScopeDetails(frame, fun, index) {
+ if (frame) {
+ this.break_id_ = frame.break_id_;
+ this.details_ = %GetScopeDetails(frame.break_id_,
+ frame.details_.frameId(),
+ frame.details_.inlinedFrameIndex(),
+ index);
+ } else {
+ this.details_ = %GetFunctionScopeDetails(fun.value(), index);
+ this.break_id_ = undefined;
+ }
}
ScopeDetails.prototype.type = function() {
- %CheckExecutionState(this.break_id_);
+ if (!IS_UNDEFINED(this.break_id_)) {
+ %CheckExecutionState(this.break_id_);
+ }
return this.details_[kScopeDetailsTypeIndex];
};
ScopeDetails.prototype.object = function() {
- %CheckExecutionState(this.break_id_);
+ if (!IS_UNDEFINED(this.break_id_)) {
+ %CheckExecutionState(this.break_id_);
+ }
return this.details_[kScopeDetailsObjectIndex];
};
/**
- * Mirror object for scope.
+ * Mirror object for scope of frame or function. Either frame or function must
+ * be specified.
* @param {FrameMirror} frame The frame this scope is a part of
+ * @param {FunctionMirror} function The function this scope is a part of
* @param {number} index The scope index in the frame
* @constructor
* @extends Mirror
*/
-function ScopeMirror(frame, index) {
+function ScopeMirror(frame, function, index) {
%_CallFunction(this, SCOPE_TYPE, Mirror);
- this.frame_index_ = frame.index_;
+ if (frame) {
+ this.frame_index_ = frame.index_;
+ } else {
+ this.frame_index_ = undefined;
+ }
this.scope_index_ = index;
- this.details_ = new ScopeDetails(frame, index);
+ this.details_ = new ScopeDetails(frame, function, index);
}
inherits(ScopeMirror, Mirror);
@@ -2234,6 +2282,11 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
content.protoObject = this.serializeReference(mirror.protoObject());
content.prototypeObject = this.serializeReference(mirror.prototypeObject());
+ var primitiveValue = mirror.primitiveValue();
+ if (!IS_UNDEFINED(primitiveValue)) {
+ content.primitiveValue = this.serializeReference(primitiveValue);
+ }
+
// Add flags to indicate whether there are interceptors.
if (mirror.hasNamedInterceptor()) {
content.namedInterceptor = true;
@@ -2259,6 +2312,15 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
serializeLocationFields(mirror.sourceLocation(), content);
}
+
+ content.scopes = [];
+ for (var i = 0; i < mirror.scopeCount(); i++) {
+ var scope = mirror.scope(i);
+ content.scopes.push({
+ type: scope.scopeType(),
+ index: i
+ });
+ }
}
// Add date specific properties.
diff --git a/deps/v8/src/mksnapshot.cc b/deps/v8/src/mksnapshot.cc
index d1620bfff5..e426a58092 100644
--- a/deps/v8/src/mksnapshot.cc
+++ b/deps/v8/src/mksnapshot.cc
@@ -303,7 +303,11 @@ int main(int argc, char** argv) {
#endif
i::Serializer::Enable();
Persistent<Context> context = v8::Context::New();
- ASSERT(!context.IsEmpty());
+ if (context.IsEmpty()) {
+ fprintf(stderr,
+ "\nException thrown while compiling natives - see above.\n\n");
+ exit(1);
+ }
// Make sure all builtin scripts are cached.
{ HandleScope scope;
for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
diff --git a/deps/v8/src/objects-debug.cc b/deps/v8/src/objects-debug.cc
index 8eefb23db2..5aac50319d 100644
--- a/deps/v8/src/objects-debug.cc
+++ b/deps/v8/src/objects-debug.cc
@@ -135,6 +135,9 @@ void HeapObject::HeapObjectVerify() {
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
JSObject::cast(this)->JSObjectVerify();
break;
+ case JS_MODULE_TYPE:
+ JSModule::cast(this)->JSModuleVerify();
+ break;
case JS_VALUE_TYPE:
JSValue::cast(this)->JSValueVerify();
break;
@@ -283,12 +286,11 @@ void JSObject::JSObjectVerify() {
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
- ASSERT_EQ((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT_EQ((map()->has_fast_smi_or_object_elements() ||
(elements() == GetHeap()->empty_fixed_array())),
(elements()->map() == GetHeap()->fixed_array_map() ||
elements()->map() == GetHeap()->fixed_cow_array_map()));
- ASSERT(map()->has_fast_elements() == HasFastElements());
+ ASSERT(map()->has_fast_object_elements() == HasFastObjectElements());
}
@@ -300,6 +302,8 @@ void Map::MapVerify() {
instance_size() < HEAP->Capacity()));
VerifyHeapPointer(prototype());
VerifyHeapPointer(instance_descriptors());
+ SLOW_ASSERT(instance_descriptors()->IsSortedNoDuplicates());
+ SLOW_ASSERT(instance_descriptors()->IsConsistentWithBackPointers(this));
}
@@ -366,6 +370,15 @@ void FixedDoubleArray::FixedDoubleArrayVerify() {
}
+void JSModule::JSModuleVerify() {
+ Object* v = context();
+ if (v->IsHeapObject()) {
+ VerifyHeapPointer(v);
+ }
+ CHECK(v->IsUndefined() || v->IsModuleContext());
+}
+
+
void JSValue::JSValueVerify() {
Object* v = value();
if (v->IsHeapObject()) {
@@ -444,10 +457,17 @@ void String::StringVerify() {
ConsString::cast(this)->ConsStringVerify();
} else if (IsSlicedString()) {
SlicedString::cast(this)->SlicedStringVerify();
+ } else if (IsSeqAsciiString()) {
+ SeqAsciiString::cast(this)->SeqAsciiStringVerify();
}
}
+void SeqAsciiString::SeqAsciiStringVerify() {
+ CHECK(String::IsAscii(GetChars(), length()));
+}
+
+
void ConsString::ConsStringVerify() {
CHECK(this->first()->IsString());
CHECK(this->second() == GetHeap()->empty_string() ||
@@ -496,7 +516,7 @@ void JSGlobalProxy::JSGlobalProxyVerify() {
VerifyObjectField(JSGlobalProxy::kContextOffset);
// Make sure that this object has no properties, elements.
CHECK_EQ(0, properties()->length());
- CHECK(HasFastElements());
+ CHECK(HasFastObjectElements());
CHECK_EQ(0, FixedArray::cast(elements())->length());
}
@@ -664,6 +684,7 @@ void AccessorInfo::AccessorInfoVerify() {
VerifyPointer(name());
VerifyPointer(data());
VerifyPointer(flag());
+ VerifyPointer(expected_receiver_type());
}
@@ -791,6 +812,11 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
}
// Indexed properties
switch (GetElementsKind()) {
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
info->number_of_objects_with_fast_elements_++;
int holes = 0;
@@ -804,6 +830,14 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
info->number_of_fast_unused_elements_ += holes;
break;
}
+ case EXTERNAL_BYTE_ELEMENTS:
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case EXTERNAL_SHORT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case EXTERNAL_INT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case EXTERNAL_FLOAT_ELEMENTS:
+ case EXTERNAL_DOUBLE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS: {
info->number_of_objects_with_fast_elements_++;
ExternalPixelArray* e = ExternalPixelArray::cast(elements());
@@ -817,8 +851,7 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
dict->Capacity() - dict->NumberOfElements();
break;
}
- default:
- UNREACHABLE();
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
break;
}
}
@@ -882,6 +915,46 @@ bool DescriptorArray::IsSortedNoDuplicates() {
}
+static bool CheckOneBackPointer(Map* current_map, Object* target) {
+ return !target->IsMap() || Map::cast(target)->GetBackPointer() == current_map;
+}
+
+
+bool DescriptorArray::IsConsistentWithBackPointers(Map* current_map) {
+ for (int i = 0; i < number_of_descriptors(); ++i) {
+ switch (GetType(i)) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ if (!CheckOneBackPointer(current_map, GetValue(i))) {
+ return false;
+ }
+ break;
+ case CALLBACKS: {
+ Object* object = GetValue(i);
+ if (object->IsAccessorPair()) {
+ AccessorPair* accessors = AccessorPair::cast(object);
+ if (!CheckOneBackPointer(current_map, accessors->getter())) {
+ return false;
+ }
+ if (!CheckOneBackPointer(current_map, accessors->setter())) {
+ return false;
+ }
+ }
+ break;
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ break;
+ }
+ }
+ return true;
+}
+
+
void JSFunctionResultCache::JSFunctionResultCacheVerify() {
JSFunction::cast(get(kFactoryIndex))->Verify();
@@ -923,6 +996,23 @@ void NormalizedMapCache::NormalizedMapCacheVerify() {
}
+void Map::ZapInstanceDescriptors() {
+ DescriptorArray* descriptors = instance_descriptors();
+ if (descriptors == GetHeap()->empty_descriptor_array()) return;
+ MemsetPointer(descriptors->data_start(),
+ GetHeap()->the_hole_value(),
+ descriptors->length());
+}
+
+
+void Map::ZapPrototypeTransitions() {
+ FixedArray* proto_transitions = prototype_transitions();
+ MemsetPointer(proto_transitions->data_start(),
+ GetHeap()->the_hole_value(),
+ proto_transitions->length());
+}
+
+
#endif // DEBUG
} } // namespace v8::internal
diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h
index 68feda46b4..da1a35b08b 100644
--- a/deps/v8/src/objects-inl.h
+++ b/deps/v8/src/objects-inl.h
@@ -128,18 +128,6 @@ PropertyDetails PropertyDetails::AsDeleted() {
}
-bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
- ElementsKind to_kind) {
- if (to_kind == FAST_ELEMENTS) {
- return from_kind == FAST_SMI_ONLY_ELEMENTS ||
- from_kind == FAST_DOUBLE_ELEMENTS;
- } else {
- return to_kind == FAST_DOUBLE_ELEMENTS &&
- from_kind == FAST_SMI_ONLY_ELEMENTS;
- }
-}
-
-
bool Object::IsFixedArrayBase() {
return IsFixedArray() || IsFixedDoubleArray();
}
@@ -581,7 +569,8 @@ bool Object::IsContext() {
map == heap->catch_context_map() ||
map == heap->with_context_map() ||
map == heap->global_context_map() ||
- map == heap->block_context_map());
+ map == heap->block_context_map() ||
+ map == heap->module_context_map());
}
return false;
}
@@ -594,6 +583,13 @@ bool Object::IsGlobalContext() {
}
+bool Object::IsModuleContext() {
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map() ==
+ HeapObject::cast(this)->GetHeap()->module_context_map();
+}
+
+
bool Object::IsScopeInfo() {
return Object::IsHeapObject() &&
HeapObject::cast(this)->map() ==
@@ -613,6 +609,7 @@ TYPE_CHECKER(Code, CODE_TYPE)
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
+TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
TYPE_CHECKER(JSDate, JS_DATE_TYPE)
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
@@ -1235,35 +1232,26 @@ FixedArrayBase* JSObject::elements() {
return static_cast<FixedArrayBase*>(array);
}
-void JSObject::ValidateSmiOnlyElements() {
+
+void JSObject::ValidateElements() {
#if DEBUG
- if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
- Heap* heap = GetHeap();
- // Don't use elements, since integrity checks will fail if there
- // are filler pointers in the array.
- FixedArray* fixed_array =
- reinterpret_cast<FixedArray*>(READ_FIELD(this, kElementsOffset));
- Map* map = fixed_array->map();
- // Arrays that have been shifted in place can't be verified.
- if (map != heap->raw_unchecked_one_pointer_filler_map() &&
- map != heap->raw_unchecked_two_pointer_filler_map() &&
- map != heap->free_space_map()) {
- for (int i = 0; i < fixed_array->length(); i++) {
- Object* current = fixed_array->get(i);
- ASSERT(current->IsSmi() || current->IsTheHole());
- }
- }
+ if (FLAG_enable_slow_asserts) {
+ ElementsAccessor* accessor = GetElementsAccessor();
+ accessor->Validate(this);
}
#endif
}
MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
-#if DEBUG
- ValidateSmiOnlyElements();
-#endif
- if ((map()->elements_kind() != FAST_ELEMENTS)) {
- return TransitionElementsKind(FAST_ELEMENTS);
+ ValidateElements();
+ ElementsKind elements_kind = map()->elements_kind();
+ if (!IsFastObjectElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ return TransitionElementsKind(FAST_HOLEY_ELEMENTS);
+ } else {
+ return TransitionElementsKind(FAST_ELEMENTS);
+ }
}
return this;
}
@@ -1275,20 +1263,29 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
ElementsKind current_kind = map()->elements_kind();
ElementsKind target_kind = current_kind;
ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (current_kind == FAST_ELEMENTS) return this;
-
+ bool is_holey = IsFastHoleyElementsKind(current_kind);
+ if (current_kind == FAST_HOLEY_ELEMENTS) return this;
Heap* heap = GetHeap();
Object* the_hole = heap->the_hole_value();
- Object* heap_number_map = heap->heap_number_map();
for (uint32_t i = 0; i < count; ++i) {
Object* current = *objects++;
- if (!current->IsSmi() && current != the_hole) {
- if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
- HeapObject::cast(current)->map() == heap_number_map) {
- target_kind = FAST_DOUBLE_ELEMENTS;
+ if (current == the_hole) {
+ is_holey = true;
+ target_kind = GetHoleyElementsKind(target_kind);
+ } else if (!current->IsSmi()) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
+ if (IsFastSmiElementsKind(target_kind)) {
+ if (is_holey) {
+ target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ }
+ }
+ } else if (is_holey) {
+ target_kind = FAST_HOLEY_ELEMENTS;
+ break;
} else {
target_kind = FAST_ELEMENTS;
- break;
}
}
}
@@ -1301,6 +1298,7 @@ MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
+ uint32_t length,
EnsureElementsMode mode) {
if (elements->map() != GetHeap()->fixed_double_array_map()) {
ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
@@ -1309,11 +1307,19 @@ MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
mode = DONT_ALLOW_DOUBLE_ELEMENTS;
}
Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
- return EnsureCanContainElements(objects, elements->length(), mode);
+ return EnsureCanContainElements(objects, length, mode);
}
ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
- if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
+ if (GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ } else if (GetElementsKind() == FAST_SMI_ELEMENTS) {
+ FixedDoubleArray* double_array = FixedDoubleArray::cast(elements);
+ for (uint32_t i = 0; i < length; ++i) {
+ if (double_array->is_the_hole(i)) {
+ return TransitionElementsKind(FAST_HOLEY_DOUBLE_ELEMENTS);
+ }
+ }
return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
}
@@ -1325,21 +1331,20 @@ MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
ElementsKind to_kind) {
Map* current_map = map();
ElementsKind from_kind = current_map->elements_kind();
-
if (from_kind == to_kind) return current_map;
Context* global_context = isolate->context()->global_context();
- if (current_map == global_context->smi_js_array_map()) {
- if (to_kind == FAST_ELEMENTS) {
- return global_context->object_js_array_map();
- } else {
- if (to_kind == FAST_DOUBLE_ELEMENTS) {
- return global_context->double_js_array_map();
- } else {
- ASSERT(to_kind == DICTIONARY_ELEMENTS);
+ Object* maybe_array_maps = global_context->js_array_maps();
+ if (maybe_array_maps->IsFixedArray()) {
+ FixedArray* array_maps = FixedArray::cast(maybe_array_maps);
+ if (array_maps->get(from_kind) == current_map) {
+ Object* maybe_transitioned_map = array_maps->get(to_kind);
+ if (maybe_transitioned_map->IsMap()) {
+ return Map::cast(maybe_transitioned_map);
}
}
}
+
return GetElementsTransitionMapSlow(to_kind);
}
@@ -1348,9 +1353,6 @@ void JSObject::set_map_and_elements(Map* new_map,
FixedArrayBase* value,
WriteBarrierMode mode) {
ASSERT(value->HasValidElements());
-#ifdef DEBUG
- ValidateSmiOnlyElements();
-#endif
if (new_map != NULL) {
if (mode == UPDATE_WRITE_BARRIER) {
set_map(new_map);
@@ -1359,8 +1361,7 @@ void JSObject::set_map_and_elements(Map* new_map,
set_map_no_write_barrier(new_map);
}
}
- ASSERT((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT((map()->has_fast_smi_or_object_elements() ||
(value == GetHeap()->empty_fixed_array())) ==
(value->map() == GetHeap()->fixed_array_map() ||
value->map() == GetHeap()->fixed_cow_array_map()));
@@ -1383,8 +1384,7 @@ void JSObject::initialize_properties() {
void JSObject::initialize_elements() {
- ASSERT(map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements() ||
+ ASSERT(map()->has_fast_smi_or_object_elements() ||
map()->has_fast_double_elements());
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, GetHeap()->empty_fixed_array());
@@ -1393,9 +1393,10 @@ void JSObject::initialize_elements() {
MaybeObject* JSObject::ResetElements() {
Object* obj;
- ElementsKind elements_kind = FLAG_smi_only_arrays
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
+ ElementsKind elements_kind = GetInitialFastElementsKind();
+ if (!FLAG_smi_only_arrays) {
+ elements_kind = FastSmiToObjectElementsKind(elements_kind);
+ }
MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
elements_kind);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -1438,6 +1439,8 @@ int JSObject::GetHeaderSize() {
// field operations considerably on average.
if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
switch (type) {
+ case JS_MODULE_TYPE:
+ return JSModule::kSize;
case JS_GLOBAL_PROXY_TYPE:
return JSGlobalProxy::kSize;
case JS_GLOBAL_OBJECT_TYPE:
@@ -1603,13 +1606,23 @@ bool JSObject::HasFastProperties() {
}
-int JSObject::MaxFastProperties() {
+bool JSObject::TooManyFastProperties(int properties,
+ JSObject::StoreFromKeyed store_mode) {
// Allow extra fast properties if the object has more than
- // kMaxFastProperties in-object properties. When this is the case,
+ // kFastPropertiesSoftLimit in-object properties. When this is the case,
// it is very unlikely that the object is being used as a dictionary
// and there is a good chance that allowing more map transitions
// will be worth it.
- return Max(map()->inobject_properties(), kMaxFastProperties);
+ int inobject = map()->inobject_properties();
+
+ int limit;
+ if (store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ||
+ map()->used_for_prototype()) {
+ limit = Max(inobject, kMaxFastProperties);
+ } else {
+ limit = Max(inobject, kFastPropertiesSoftLimit);
+ }
+ return properties > limit;
}
@@ -1665,6 +1678,11 @@ Object* FixedArray::get(int index) {
}
+bool FixedArray::is_the_hole(int index) {
+ return get(index) == GetHeap()->the_hole_value();
+}
+
+
void FixedArray::set(int index, Smi* value) {
ASSERT(map() != HEAP->fixed_cow_array_map());
ASSERT(index >= 0 && index < this->length());
@@ -1858,9 +1876,14 @@ Object** FixedArray::data_start() {
bool DescriptorArray::IsEmpty() {
ASSERT(this->IsSmi() ||
- this->length() > kFirstIndex ||
+ this->MayContainTransitions() ||
this == HEAP->empty_descriptor_array());
- return this->IsSmi() || length() <= kFirstIndex;
+ return this->IsSmi() || length() < kFirstIndex;
+}
+
+
+bool DescriptorArray::MayContainTransitions() {
+ return length() >= kTransitionsIndex;
}
@@ -1870,7 +1893,7 @@ int DescriptorArray::bit_field3_storage() {
}
void DescriptorArray::set_bit_field3_storage(int value) {
- ASSERT(!IsEmpty());
+ ASSERT(this->MayContainTransitions());
WRITE_FIELD(this, kBitField3StorageOffset, Smi::FromInt(value));
}
@@ -1894,7 +1917,7 @@ int DescriptorArray::Search(String* name) {
// Fast case: do linear search for small arrays.
const int kMaxElementsForLinearSearch = 8;
if (StringShape(name).IsSymbol() && nof < kMaxElementsForLinearSearch) {
- return LinearSearch(name, nof);
+ return LinearSearch(EXPECT_SORTED, name, nof);
}
// Slow case: perform binary search.
@@ -1912,27 +1935,79 @@ int DescriptorArray::SearchWithCache(String* name) {
}
+Map* DescriptorArray::elements_transition_map() {
+ if (!this->MayContainTransitions()) {
+ return NULL;
+ }
+ Object* transition_map = get(kTransitionsIndex);
+ if (transition_map == Smi::FromInt(0)) {
+ return NULL;
+ } else {
+ return Map::cast(transition_map);
+ }
+}
+
+
+void DescriptorArray::set_elements_transition_map(
+ Map* transition_map, WriteBarrierMode mode) {
+ ASSERT(this->length() > kTransitionsIndex);
+ Heap* heap = GetHeap();
+ WRITE_FIELD(this, kTransitionsOffset, transition_map);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kTransitionsOffset, transition_map, mode);
+ ASSERT(DescriptorArray::cast(this));
+}
+
+
+Object** DescriptorArray::GetKeySlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToKeyIndex(descriptor_number)));
+}
+
+
String* DescriptorArray::GetKey(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
return String::cast(get(ToKeyIndex(descriptor_number)));
}
+Object** DescriptorArray::GetValueSlot(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ return HeapObject::RawField(
+ reinterpret_cast<HeapObject*>(this),
+ OffsetOfElementAt(ToValueIndex(descriptor_number)));
+}
+
+
Object* DescriptorArray::GetValue(int descriptor_number) {
ASSERT(descriptor_number < number_of_descriptors());
- return GetContentArray()->get(ToValueIndex(descriptor_number));
+ return get(ToValueIndex(descriptor_number));
}
-Smi* DescriptorArray::GetDetails(int descriptor_number) {
+void DescriptorArray::SetNullValueUnchecked(int descriptor_number, Heap* heap) {
ASSERT(descriptor_number < number_of_descriptors());
- return Smi::cast(GetContentArray()->get(ToDetailsIndex(descriptor_number)));
+ set_null_unchecked(heap, ToValueIndex(descriptor_number));
}
-PropertyType DescriptorArray::GetType(int descriptor_number) {
+PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
+ ASSERT(descriptor_number < number_of_descriptors());
+ Object* details = get(ToDetailsIndex(descriptor_number));
+ return PropertyDetails(Smi::cast(details));
+}
+
+
+void DescriptorArray::SetDetailsUnchecked(int descriptor_number, Smi* value) {
ASSERT(descriptor_number < number_of_descriptors());
- return PropertyDetails(GetDetails(descriptor_number)).type();
+ set_unchecked(ToDetailsIndex(descriptor_number), value);
+}
+
+
+PropertyType DescriptorArray::GetType(int descriptor_number) {
+ return GetDetails(descriptor_number).type();
}
@@ -1969,7 +2044,6 @@ bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
switch (GetType(descriptor_number)) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
return true;
case CALLBACKS: {
Object* value = GetValue(descriptor_number);
@@ -1995,15 +2069,10 @@ bool DescriptorArray::IsNullDescriptor(int descriptor_number) {
}
-bool DescriptorArray::IsDontEnum(int descriptor_number) {
- return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum();
-}
-
-
void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
desc->Init(GetKey(descriptor_number),
GetValue(descriptor_number),
- PropertyDetails(GetDetails(descriptor_number)));
+ GetDetails(descriptor_number));
}
@@ -2016,11 +2085,10 @@ void DescriptorArray::Set(int descriptor_number,
NoIncrementalWriteBarrierSet(this,
ToKeyIndex(descriptor_number),
desc->GetKey());
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToValueIndex(descriptor_number),
desc->GetValue());
- NoIncrementalWriteBarrierSet(content_array,
+ NoIncrementalWriteBarrierSet(this,
ToDetailsIndex(descriptor_number),
desc->GetDetails().AsSmi());
}
@@ -2029,11 +2097,10 @@ void DescriptorArray::Set(int descriptor_number,
void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
int first, int second) {
NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
- FixedArray* content_array = GetContentArray();
- NoIncrementalWriteBarrierSwap(content_array,
+ NoIncrementalWriteBarrierSwap(this,
ToValueIndex(first),
ToValueIndex(second));
- NoIncrementalWriteBarrierSwap(content_array,
+ NoIncrementalWriteBarrierSwap(this,
ToDetailsIndex(first),
ToDetailsIndex(second));
}
@@ -2044,7 +2111,6 @@ DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
marking_->EnterNoMarkingScope();
if (array->number_of_descriptors() > 0) {
ASSERT(Marking::Color(array) == Marking::WHITE_OBJECT);
- ASSERT(Marking::Color(array->GetContentArray()) == Marking::WHITE_OBJECT);
}
}
@@ -2851,15 +2917,15 @@ bool Map::has_non_instance_prototype() {
void Map::set_function_with_prototype(bool value) {
if (value) {
- set_bit_field2(bit_field2() | (1 << kFunctionWithPrototype));
+ set_bit_field3(bit_field3() | (1 << kFunctionWithPrototype));
} else {
- set_bit_field2(bit_field2() & ~(1 << kFunctionWithPrototype));
+ set_bit_field3(bit_field3() & ~(1 << kFunctionWithPrototype));
}
}
bool Map::function_with_prototype() {
- return ((1 << kFunctionWithPrototype) & bit_field2()) != 0;
+ return ((1 << kFunctionWithPrototype) & bit_field3()) != 0;
}
@@ -2916,6 +2982,20 @@ bool Map::is_shared() {
}
+void Map::set_used_for_prototype(bool value) {
+ if (value) {
+ set_bit_field3(bit_field3() | (1 << kUsedForPrototype));
+ } else {
+ set_bit_field3(bit_field3() & ~(1 << kUsedForPrototype));
+ }
+}
+
+
+bool Map::used_for_prototype() {
+ return ((1 << kUsedForPrototype) & bit_field3()) != 0;
+}
+
+
JSFunction* Map::unchecked_constructor() {
return reinterpret_cast<JSFunction*>(READ_FIELD(this, kConstructorOffset));
}
@@ -3006,26 +3086,26 @@ void Code::set_is_pregenerated(bool value) {
bool Code::optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
}
void Code::set_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
}
bool Code::has_deoptimization_support() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
}
void Code::set_has_deoptimization_support(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
@@ -3033,14 +3113,14 @@ void Code::set_has_deoptimization_support(bool value) {
bool Code::has_debug_break_slots() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
}
void Code::set_has_debug_break_slots(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
@@ -3048,56 +3128,41 @@ void Code::set_has_debug_break_slots(bool value) {
bool Code::is_compiled_optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsIsCompiledOptimizable::decode(flags);
}
void Code::set_compiled_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
}
-bool Code::has_self_optimization_header() {
- ASSERT(kind() == FUNCTION);
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
- return FullCodeFlagsHasSelfOptimizationHeader::decode(flags);
-}
-
-
-void Code::set_self_optimization_header(bool value) {
- ASSERT(kind() == FUNCTION);
- byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
- flags = FullCodeFlagsHasSelfOptimizationHeader::update(flags, value);
- WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
-}
-
-
int Code::allow_osr_at_loop_nesting_level() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
}
void Code::set_allow_osr_at_loop_nesting_level(int level) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
}
int Code::profiler_ticks() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kProfilerTicksOffset);
}
void Code::set_profiler_ticks(int ticks) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(ticks < 256);
WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
}
@@ -3129,13 +3194,13 @@ void Code::set_safepoint_table_offset(unsigned offset) {
unsigned Code::stack_check_table_offset() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
}
void Code::set_stack_check_table_offset(unsigned offset) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
}
@@ -3202,6 +3267,18 @@ void Code::set_compare_state(byte value) {
}
+byte Code::compare_operation() {
+ ASSERT(is_compare_ic_stub());
+ return READ_BYTE_FIELD(this, kCompareOperationOffset);
+}
+
+
+void Code::set_compare_operation(byte value) {
+ ASSERT(is_compare_ic_stub());
+ WRITE_BYTE_FIELD(this, kCompareOperationOffset, value);
+}
+
+
byte Code::to_boolean_state() {
ASSERT(is_to_boolean_ic_stub());
return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
@@ -3348,6 +3425,9 @@ void Map::clear_instance_descriptors() {
Object* object = READ_FIELD(this,
kInstanceDescriptorsOrBitField3Offset);
if (!object->IsSmi()) {
+#ifdef DEBUG
+ ZapInstanceDescriptors();
+#endif
WRITE_FIELD(
this,
kInstanceDescriptorsOrBitField3Offset,
@@ -3373,6 +3453,11 @@ void Map::set_instance_descriptors(DescriptorArray* value,
}
}
ASSERT(!is_shared());
+#ifdef DEBUG
+ if (value != instance_descriptors()) {
+ ZapInstanceDescriptors();
+ }
+#endif
WRITE_FIELD(this, kInstanceDescriptorsOrBitField3Offset, value);
CONDITIONAL_WRITE_BARRIER(
heap, this, kInstanceDescriptorsOrBitField3Offset, value, mode);
@@ -3404,22 +3489,86 @@ void Map::set_bit_field3(int value) {
}
-FixedArray* Map::unchecked_prototype_transitions() {
- return reinterpret_cast<FixedArray*>(
- READ_FIELD(this, kPrototypeTransitionsOffset));
+Object* Map::GetBackPointer() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ return FixedArray::cast(object)->get(kProtoTransitionBackPointerOffset);
+ } else {
+ return object;
+ }
+}
+
+
+Map* Map::elements_transition_map() {
+ return instance_descriptors()->elements_transition_map();
+}
+
+
+void Map::set_elements_transition_map(Map* transitioned_map) {
+ return instance_descriptors()->set_elements_transition_map(transitioned_map);
+}
+
+
+void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ ASSERT(instance_type() >= FIRST_JS_RECEIVER_TYPE);
+ ASSERT((value->IsUndefined() && GetBackPointer()->IsMap()) ||
+ (value->IsMap() && GetBackPointer()->IsUndefined()));
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ FixedArray::cast(object)->set(
+ kProtoTransitionBackPointerOffset, value, mode);
+ } else {
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+ }
+}
+
+
+FixedArray* Map::prototype_transitions() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ if (object->IsFixedArray()) {
+ return FixedArray::cast(object);
+ } else {
+ return GetHeap()->empty_fixed_array();
+ }
+}
+
+
+void Map::set_prototype_transitions(FixedArray* value, WriteBarrierMode mode) {
+ Heap* heap = GetHeap();
+ ASSERT(value != heap->empty_fixed_array());
+ value->set(kProtoTransitionBackPointerOffset, GetBackPointer());
+#ifdef DEBUG
+ if (value != prototype_transitions()) {
+ ZapPrototypeTransitions();
+ }
+#endif
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ heap, this, kPrototypeTransitionsOrBackPointerOffset, value, mode);
+}
+
+
+void Map::init_prototype_transitions(Object* undefined) {
+ ASSERT(undefined->IsUndefined());
+ WRITE_FIELD(this, kPrototypeTransitionsOrBackPointerOffset, undefined);
+}
+
+
+HeapObject* Map::unchecked_prototype_transitions() {
+ Object* object = READ_FIELD(this, kPrototypeTransitionsOrBackPointerOffset);
+ return reinterpret_cast<HeapObject*>(object);
}
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
-ACCESSORS(Map, prototype_transitions, FixedArray, kPrototypeTransitionsOffset)
ACCESSORS(Map, constructor, Object, kConstructorOffset)
ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
-ACCESSORS(JSFunction,
- next_function_link,
- Object,
- kNextFunctionLinkOffset)
+ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
@@ -3432,6 +3581,8 @@ ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+ACCESSORS(AccessorInfo, expected_receiver_type, Object,
+ kExpectedReceiverTypeOffset)
ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
@@ -3522,7 +3673,7 @@ ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
@@ -3571,8 +3722,10 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
+SMI_ACCESSORS(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@@ -3624,8 +3777,10 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
-PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
-PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, counters, kCountersOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
+ stress_deopt_counter,
+ kStressDeoptCounterOffset)
#endif
@@ -3669,6 +3824,12 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) {
}
+int SharedFunctionInfo::profiler_ticks() {
+ if (code()->kind() != Code::FUNCTION) return 0;
+ return code()->profiler_ticks();
+}
+
+
LanguageMode SharedFunctionInfo::language_mode() {
int hints = compiler_hints();
if (BooleanBit::get(hints, kExtendedModeFunction)) {
@@ -3820,12 +3981,64 @@ void SharedFunctionInfo::set_code_age(int code_age) {
}
+int SharedFunctionInfo::ic_age() {
+ return ICAgeBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_ic_age(int ic_age) {
+ set_counters(ICAgeBits::update(counters(), ic_age));
+}
+
+
+int SharedFunctionInfo::deopt_count() {
+ return DeoptCountBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_deopt_count(int deopt_count) {
+ set_counters(DeoptCountBits::update(counters(), deopt_count));
+}
+
+
+void SharedFunctionInfo::increment_deopt_count() {
+ int value = counters();
+ int deopt_count = DeoptCountBits::decode(value);
+ deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
+ set_counters(DeoptCountBits::update(value, deopt_count));
+}
+
+
+int SharedFunctionInfo::opt_reenable_tries() {
+ return OptReenableTriesBits::decode(counters());
+}
+
+
+void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
+ set_counters(OptReenableTriesBits::update(counters(), tries));
+}
+
+
bool SharedFunctionInfo::has_deoptimization_support() {
Code* code = this->code();
return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
}
+void SharedFunctionInfo::TryReenableOptimization() {
+ int tries = opt_reenable_tries();
+ set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
+ // We reenable optimization whenever the number of tries is a large
+ // enough power of 2.
+ if (tries >= 16 && (((tries - 1) & tries) == 0)) {
+ set_optimization_disabled(false);
+ set_opt_count(0);
+ set_deopt_count(0);
+ code()->set_optimizable(true);
+ }
+}
+
+
bool JSFunction::IsBuiltin() {
return context()->global()->IsJSBuiltinsObject();
}
@@ -3934,27 +4147,29 @@ MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
global_context->get(Context::ARRAY_FUNCTION_INDEX);
if (array_function->IsJSFunction() &&
this == JSFunction::cast(array_function)) {
- ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
-
- MaybeObject* maybe_map = initial_map->CopyDropTransitions();
- Map* new_double_map = NULL;
- if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
- new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
- maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
- new_double_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- maybe_map = new_double_map->CopyDropTransitions();
- Map* new_object_map = NULL;
- if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
- new_object_map->set_elements_kind(FAST_ELEMENTS);
- maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
- new_object_map);
- if (maybe_map->IsFailure()) return maybe_map;
-
- global_context->set_smi_js_array_map(initial_map);
- global_context->set_double_js_array_map(new_double_map);
- global_context->set_object_js_array_map(new_object_map);
+ // Replace all of the cached initial array maps in the global context with
+ // the appropriate transitioned elements kind maps.
+ Heap* heap = GetHeap();
+ MaybeObject* maybe_maps =
+ heap->AllocateFixedArrayWithHoles(kElementsKindCount);
+ FixedArray* maps;
+ if (!maybe_maps->To(&maps)) return maybe_maps;
+
+ Map* current_map = initial_map;
+ ElementsKind kind = current_map->elements_kind();
+ ASSERT(kind == GetInitialFastElementsKind());
+ maps->set(kind, current_map);
+ for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1;
+ i < kFastElementsKindCount; ++i) {
+ Map* new_map;
+ ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i);
+ MaybeObject* maybe_new_map =
+ current_map->CreateNextElementsTransition(next_kind);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ maps->set(next_kind, new_map);
+ current_map = new_map;
+ }
+ global_context->set_js_array_maps(maps);
}
set_initial_map(initial_map);
return this;
@@ -3993,6 +4208,7 @@ Object* JSFunction::prototype() {
return instance_prototype();
}
+
bool JSFunction::should_have_prototype() {
return map()->function_with_prototype();
}
@@ -4095,6 +4311,16 @@ void Foreign::set_foreign_address(Address value) {
}
+ACCESSORS(JSModule, context, Object, kContextOffset)
+
+
+JSModule* JSModule::cast(Object* obj) {
+ ASSERT(obj->IsJSModule());
+ ASSERT(HeapObject::cast(obj)->Size() == JSModule::kSize);
+ return reinterpret_cast<JSModule*>(obj);
+}
+
+
ACCESSORS(JSValue, value, Object, kValueOffset)
@@ -4280,18 +4506,18 @@ ElementsKind JSObject::GetElementsKind() {
FixedArrayBase* fixed_array =
reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
Map* map = fixed_array->map();
- ASSERT(((kind == FAST_ELEMENTS || kind == FAST_SMI_ONLY_ELEMENTS) &&
- (map == GetHeap()->fixed_array_map() ||
- map == GetHeap()->fixed_cow_array_map())) ||
- (kind == FAST_DOUBLE_ELEMENTS &&
- (fixed_array->IsFixedDoubleArray() ||
- fixed_array == GetHeap()->empty_fixed_array())) ||
- (kind == DICTIONARY_ELEMENTS &&
+ ASSERT((IsFastSmiOrObjectElementsKind(kind) &&
+ (map == GetHeap()->fixed_array_map() ||
+ map == GetHeap()->fixed_cow_array_map())) ||
+ (IsFastDoubleElementsKind(kind) &&
+ (fixed_array->IsFixedDoubleArray() ||
+ fixed_array == GetHeap()->empty_fixed_array())) ||
+ (kind == DICTIONARY_ELEMENTS &&
fixed_array->IsFixedArray() &&
- fixed_array->IsDictionary()) ||
- (kind > DICTIONARY_ELEMENTS));
- ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
- (elements()->IsFixedArray() && elements()->length() >= 2));
+ fixed_array->IsDictionary()) ||
+ (kind > DICTIONARY_ELEMENTS));
+ ASSERT((kind != NON_STRICT_ARGUMENTS_ELEMENTS) ||
+ (elements()->IsFixedArray() && elements()->length() >= 2));
#endif
return kind;
}
@@ -4302,25 +4528,28 @@ ElementsAccessor* JSObject::GetElementsAccessor() {
}
-bool JSObject::HasFastElements() {
- return GetElementsKind() == FAST_ELEMENTS;
+bool JSObject::HasFastObjectElements() {
+ return IsFastObjectElementsKind(GetElementsKind());
}
-bool JSObject::HasFastSmiOnlyElements() {
- return GetElementsKind() == FAST_SMI_ONLY_ELEMENTS;
+bool JSObject::HasFastSmiElements() {
+ return IsFastSmiElementsKind(GetElementsKind());
}
-bool JSObject::HasFastTypeElements() {
- ElementsKind elements_kind = GetElementsKind();
- return elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_ELEMENTS;
+bool JSObject::HasFastSmiOrObjectElements() {
+ return IsFastSmiOrObjectElementsKind(GetElementsKind());
}
bool JSObject::HasFastDoubleElements() {
- return GetElementsKind() == FAST_DOUBLE_ELEMENTS;
+ return IsFastDoubleElementsKind(GetElementsKind());
+}
+
+
+bool JSObject::HasFastHoleyElements() {
+ return IsFastHoleyElementsKind(GetElementsKind());
}
@@ -4377,7 +4606,7 @@ bool JSObject::HasIndexedInterceptor() {
MaybeObject* JSObject::EnsureWritableFastElements() {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elems = FixedArray::cast(elements());
Isolate* isolate = GetIsolate();
if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems;
@@ -4618,6 +4847,13 @@ void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
}
+bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
+ Object* function_template = expected_receiver_type();
+ if (!function_template->IsFunctionTemplateInfo()) return true;
+ return receiver->IsInstanceOf(FunctionTemplateInfo::cast(function_template));
+}
+
+
template<typename Shape, typename Key>
void Dictionary<Shape, Key>::SetEntry(int entry,
Object* key,
@@ -4735,7 +4971,7 @@ void Map::ClearCodeCache(Heap* heap) {
void JSArray::EnsureSize(int required_size) {
- ASSERT(HasFastTypeElements());
+ ASSERT(HasFastSmiOrObjectElements());
FixedArray* elts = FixedArray::cast(elements());
const int kArraySizeThatFitsComfortablyInNewSpace = 128;
if (elts->length() < required_size) {
@@ -4767,13 +5003,13 @@ bool JSArray::AllowsSetElementsLength() {
MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
MaybeObject* maybe_result = EnsureCanContainElements(
- storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
+ storage, storage->length(), ALLOW_COPIED_DOUBLE_ELEMENTS);
if (maybe_result->IsFailure()) return maybe_result;
ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
- GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
+ IsFastDoubleElementsKind(GetElementsKind())) ||
((storage->map() != GetHeap()->fixed_double_array_map()) &&
- ((GetElementsKind() == FAST_ELEMENTS) ||
- (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ (IsFastObjectElementsKind(GetElementsKind()) ||
+ (IsFastSmiElementsKind(GetElementsKind()) &&
FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
set_elements(storage);
set_length(Smi::FromInt(storage->length()));
diff --git a/deps/v8/src/objects-printer.cc b/deps/v8/src/objects-printer.cc
index a12b81388c..b886168991 100644
--- a/deps/v8/src/objects-printer.cc
+++ b/deps/v8/src/objects-printer.cc
@@ -135,6 +135,9 @@ void HeapObject::HeapObjectPrint(FILE* out) {
case ODDBALL_TYPE:
Oddball::cast(this)->to_string()->Print(out);
break;
+ case JS_MODULE_TYPE:
+ JSModule::cast(this)->JSModulePrint(out);
+ break;
case JS_FUNCTION_TYPE:
JSFunction::cast(this)->JSFunctionPrint(out);
break;
@@ -152,7 +155,7 @@ void HeapObject::HeapObjectPrint(FILE* out) {
JSValue::cast(this)->value()->Print(out);
break;
case JS_DATE_TYPE:
- JSDate::cast(this)->value()->Print(out);
+ JSDate::cast(this)->JSDatePrint(out);
break;
case CODE_TYPE:
Code::cast(this)->CodePrint(out);
@@ -270,25 +273,6 @@ void JSObject::PrintProperties(FILE* out) {
descs->GetCallbacksObject(i)->ShortPrint(out);
PrintF(out, " (callback)\n");
break;
- case ELEMENTS_TRANSITION: {
- PrintF(out, "(elements transition to ");
- Object* descriptor_contents = descs->GetValue(i);
- if (descriptor_contents->IsMap()) {
- Map* map = Map::cast(descriptor_contents);
- PrintElementsKind(out, map->elements_kind());
- } else {
- FixedArray* map_array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < map_array->length(); ++i) {
- Map* map = Map::cast(map_array->get(i));
- if (i != 0) {
- PrintF(out, ", ");
- }
- PrintElementsKind(out, map->elements_kind());
- }
- }
- PrintF(out, ")\n");
- break;
- }
case MAP_TRANSITION:
PrintF(out, "(map transition)\n");
break;
@@ -315,7 +299,9 @@ void JSObject::PrintElements(FILE* out) {
// Don't call GetElementsKind, its validation code can cause the printer to
// fail when debugging.
switch (map()->elements_kind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case FAST_ELEMENTS: {
// Print in array notation for non-sparse arrays.
FixedArray* p = FixedArray::cast(elements());
@@ -326,6 +312,7 @@ void JSObject::PrintElements(FILE* out) {
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// Print in array notation for non-sparse arrays.
if (elements()->length() > 0) {
@@ -432,6 +419,22 @@ void JSObject::JSObjectPrint(FILE* out) {
PrintF(out,
"]\n - prototype = %p\n",
reinterpret_cast<void*>(GetPrototype()));
+ PrintF(out,
+ " - elements transition to = %p\n",
+ reinterpret_cast<void*>(map()->elements_transition_map()));
+ PrintF(out, " {\n");
+ PrintProperties(out);
+ PrintElements(out);
+ PrintF(out, " }\n");
+}
+
+
+void JSModule::JSModulePrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSModule");
+ PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - context = ");
+ context()->Print(out);
+ PrintElementsKind(out, this->map()->elements_kind());
PrintF(out, " {\n");
PrintProperties(out);
PrintElements(out);
@@ -485,6 +488,7 @@ static const char* TypeToString(InstanceType type) {
case ODDBALL_TYPE: return "ODDBALL";
case JS_GLOBAL_PROPERTY_CELL_TYPE: return "JS_GLOBAL_PROPERTY_CELL";
case SHARED_FUNCTION_INFO_TYPE: return "SHARED_FUNCTION_INFO";
+ case JS_MODULE_TYPE: return "JS_MODULE";
case JS_FUNCTION_TYPE: return "JS_FUNCTION";
case CODE_TYPE: return "CODE";
case JS_ARRAY_TYPE: return "JS_ARRAY";
@@ -539,6 +543,8 @@ void Map::MapPrint(FILE* out) {
prototype()->ShortPrint(out);
PrintF(out, "\n - constructor: ");
constructor()->ShortPrint(out);
+ PrintF(out, "\n - code cache: ");
+ code_cache()->ShortPrint(out);
PrintF(out, "\n");
}
diff --git a/deps/v8/src/objects-visiting-inl.h b/deps/v8/src/objects-visiting-inl.h
index 627d1bc2ef..8ba92f70c9 100644
--- a/deps/v8/src/objects-visiting-inl.h
+++ b/deps/v8/src/objects-visiting-inl.h
@@ -72,9 +72,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
- table_.Register(kVisitJSFunction,
- &JSObjectVisitor::
- template VisitSpecialized<JSFunction::kSize>);
+ table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitFreeSpace, &VisitFreeSpace);
diff --git a/deps/v8/src/objects-visiting.cc b/deps/v8/src/objects-visiting.cc
index c7c8a87895..a2dc43e247 100644
--- a/deps/v8/src/objects-visiting.cc
+++ b/deps/v8/src/objects-visiting.cc
@@ -133,6 +133,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+ case JS_MODULE_TYPE:
case JS_VALUE_TYPE:
case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
diff --git a/deps/v8/src/objects-visiting.h b/deps/v8/src/objects-visiting.h
index 26e79ae5ed..b476dfef2e 100644
--- a/deps/v8/src/objects-visiting.h
+++ b/deps/v8/src/objects-visiting.h
@@ -289,6 +289,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
}
private:
+ static inline int VisitJSFunction(Map* map, HeapObject* object) {
+ Heap* heap = map->GetHeap();
+ VisitPointers(heap,
+ HeapObject::RawField(object, JSFunction::kPropertiesOffset),
+ HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
+
+ // Don't visit code entry. We are using this visitor only during scavenges.
+
+ VisitPointers(
+ heap,
+ HeapObject::RawField(object,
+ JSFunction::kCodeEntryOffset + kPointerSize),
+ HeapObject::RawField(object,
+ JSFunction::kNonWeakFieldsEndOffset));
+ return JSFunction::kSize;
+ }
+
static inline int VisitByteArray(Map* map, HeapObject* object) {
return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
}
diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc
index 904cf524cb..d3e6492479 100644
--- a/deps/v8/src/objects.cc
+++ b/deps/v8/src/objects.cc
@@ -56,11 +56,6 @@
namespace v8 {
namespace internal {
-void PrintElementsKind(FILE* out, ElementsKind kind) {
- ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
- PrintF(out, "%s", accessor->name());
-}
-
MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
Object* value) {
@@ -183,6 +178,16 @@ MaybeObject* JSObject::GetPropertyWithCallback(Object* receiver,
// api style callbacks.
if (structure->IsAccessorInfo()) {
AccessorInfo* data = AccessorInfo::cast(structure);
+ if (!data->IsCompatibleReceiver(receiver)) {
+ Handle<Object> name_handle(name);
+ Handle<Object> receiver_handle(receiver);
+ Handle<Object> args[2] = { name_handle, receiver_handle };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("incompatible_method_receiver",
+ HandleVector(args,
+ ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
Object* fun_obj = data->getter();
v8::AccessorGetter call_fun = v8::ToCData<v8::AccessorGetter>(fun_obj);
HandleScope scope(isolate);
@@ -253,13 +258,14 @@ MaybeObject* JSProxy::GetElementWithHandler(Object* receiver,
}
-MaybeObject* JSProxy::SetElementWithHandler(uint32_t index,
+MaybeObject* JSProxy::SetElementWithHandler(JSReceiver* receiver,
+ uint32_t index,
Object* value,
StrictModeFlag strict_mode) {
String* name;
MaybeObject* maybe = GetHeap()->Uint32ToString(index);
if (!maybe->To<String>(&name)) return maybe;
- return SetPropertyWithHandler(name, value, NONE, strict_mode);
+ return SetPropertyWithHandler(receiver, name, value, NONE, strict_mode);
}
@@ -543,7 +549,7 @@ bool JSObject::IsDirty() {
// If the object is fully fast case and has the same map it was
// created with then no changes can have been made to it.
return map() != fun->initial_map()
- || !HasFastElements()
+ || !HasFastObjectElements()
|| !HasFastProperties();
}
@@ -633,7 +639,6 @@ MaybeObject* Object::GetProperty(Object* receiver,
recvr, name, attributes);
}
case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
break;
@@ -1067,7 +1072,9 @@ void String::StringShortPrint(StringStream* accumulator) {
void JSObject::JSObjectShortPrint(StringStream* accumulator) {
switch (map()->instance_type()) {
case JS_ARRAY_TYPE: {
- double length = JSArray::cast(this)->length()->Number();
+ double length = JSArray::cast(this)->length()->IsUndefined()
+ ? 0
+ : JSArray::cast(this)->length()->Number();
accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
break;
}
@@ -1338,6 +1345,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
break;
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+ case JS_MODULE_TYPE:
case JS_VALUE_TYPE:
case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
@@ -1515,7 +1523,8 @@ static bool IsIdentifier(UnicodeCache* cache,
MaybeObject* JSObject::AddFastProperty(String* name,
Object* value,
- PropertyAttributes attributes) {
+ PropertyAttributes attributes,
+ StoreFromKeyed store_mode) {
ASSERT(!IsJSGlobalProxy());
// Normalize the object if the name is an actual string (not the
@@ -1553,10 +1562,7 @@ MaybeObject* JSObject::AddFastProperty(String* name,
// Element transitions are stored in the descriptor for property "", which is
// not a identifier and should have forced a switch to slow properties above.
- ASSERT(descriptor_index == DescriptorArray::kNotFound ||
- old_descriptors->GetType(descriptor_index) != ELEMENTS_TRANSITION);
- bool can_insert_transition = descriptor_index == DescriptorArray::kNotFound ||
- old_descriptors->GetType(descriptor_index) == ELEMENTS_TRANSITION;
+ bool can_insert_transition = descriptor_index == DescriptorArray::kNotFound;
bool allow_map_transition =
can_insert_transition &&
(isolate->context()->global_context()->object_function()->map() != map());
@@ -1581,7 +1587,7 @@ MaybeObject* JSObject::AddFastProperty(String* name,
}
if (map()->unused_property_fields() == 0) {
- if (properties()->length() > MaxFastProperties()) {
+ if (TooManyFastProperties(properties()->length(), store_mode)) {
Object* obj;
{ MaybeObject* maybe_obj =
NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
@@ -1602,7 +1608,10 @@ MaybeObject* JSObject::AddFastProperty(String* name,
}
// We have now allocated all the necessary objects.
// All the changes can be applied at once, so they are atomic.
- map()->set_instance_descriptors(old_descriptors);
+ if (allow_map_transition) {
+ map()->set_instance_descriptors(old_descriptors);
+ }
+ new_map->SetBackPointer(map());
new_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
set_map(new_map);
return FastPropertyAtPut(index, value);
@@ -1663,6 +1672,7 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
}
}
old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
+ Map::cast(new_map)->SetBackPointer(old_map);
return function;
}
@@ -1709,7 +1719,8 @@ MaybeObject* JSObject::AddSlowProperty(String* name,
MaybeObject* JSObject::AddProperty(String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode) {
ASSERT(!IsJSGlobalProxy());
Map* map_of_this = map();
Heap* heap = GetHeap();
@@ -1732,7 +1743,7 @@ MaybeObject* JSObject::AddProperty(String* name,
JSFunction::cast(value),
attributes);
} else {
- return AddFastProperty(name, value, attributes);
+ return AddFastProperty(name, value, attributes, store_mode);
}
} else {
// Normalize the object to prevent very large instance descriptors.
@@ -1761,14 +1772,11 @@ MaybeObject* JSObject::SetPropertyPostInterceptor(
// found. Use set property to handle all these cases.
return SetProperty(&result, name, value, attributes, strict_mode);
}
- bool found = false;
+ bool done = false;
MaybeObject* result_object;
- result_object = SetPropertyWithCallbackSetterInPrototypes(name,
- value,
- attributes,
- &found,
- strict_mode);
- if (found) return result_object;
+ result_object =
+ SetPropertyViaPrototypes(name, value, attributes, strict_mode, &done);
+ if (done) return result_object;
// Add a new real property.
return AddProperty(name, value, attributes, strict_mode);
}
@@ -1823,6 +1831,7 @@ MaybeObject* JSObject::ConvertDescriptorToFieldAndMapTransition(
}
}
old_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
+ map()->SetBackPointer(old_map);
return result;
}
@@ -1831,7 +1840,7 @@ MaybeObject* JSObject::ConvertDescriptorToField(String* name,
Object* new_value,
PropertyAttributes attributes) {
if (map()->unused_property_fields() == 0 &&
- properties()->length() > MaxFastProperties()) {
+ TooManyFastProperties(properties()->length(), MAY_BE_STORE_FROM_KEYED)) {
Object* obj;
{ MaybeObject* maybe_obj =
NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
@@ -1946,10 +1955,11 @@ Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
MaybeObject* JSReceiver::SetProperty(String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode) {
LookupResult result(GetIsolate());
LocalLookup(name, &result);
- return SetProperty(&result, name, value, attributes, strict_mode);
+ return SetProperty(&result, name, value, attributes, strict_mode, store_mode);
}
@@ -1982,6 +1992,16 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
if (structure->IsAccessorInfo()) {
// api style callbacks
AccessorInfo* data = AccessorInfo::cast(structure);
+ if (!data->IsCompatibleReceiver(this)) {
+ Handle<Object> name_handle(name);
+ Handle<Object> receiver_handle(this);
+ Handle<Object> args[2] = { name_handle, receiver_handle };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("incompatible_method_receiver",
+ HandleVector(args,
+ ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
Object* call_obj = data->setter();
v8::AccessorSetter call_fun = v8::ToCData<v8::AccessorSetter>(call_obj);
if (call_fun == NULL) return value;
@@ -2047,26 +2067,6 @@ MaybeObject* JSReceiver::SetPropertyWithDefinedSetter(JSReceiver* setter,
}
-void JSObject::LookupCallbackSetterInPrototypes(String* name,
- LookupResult* result) {
- Heap* heap = GetHeap();
- for (Object* pt = GetPrototype();
- pt != heap->null_value();
- pt = pt->GetPrototype()) {
- if (pt->IsJSProxy()) {
- return result->HandlerResult(JSProxy::cast(pt));
- }
- JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty()) {
- if (result->type() == CALLBACKS && !result->IsReadOnly()) return;
- // Found non-callback or read-only callback, stop looking.
- break;
- }
- }
- result->NotFound();
-}
-
-
MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
uint32_t index,
Object* value,
@@ -2083,8 +2083,8 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
*found = true; // Force abort
return maybe;
}
- return JSProxy::cast(pt)->SetPropertyWithHandlerIfDefiningSetter(
- name, value, NONE, strict_mode, found);
+ return JSProxy::cast(pt)->SetPropertyViaPrototypesWithHandler(
+ this, name, value, NONE, strict_mode, found);
}
if (!JSObject::cast(pt)->HasDictionaryElements()) {
continue;
@@ -2108,45 +2108,60 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
return heap->the_hole_value();
}
-MaybeObject* JSObject::SetPropertyWithCallbackSetterInPrototypes(
+MaybeObject* JSObject::SetPropertyViaPrototypes(
String* name,
Object* value,
PropertyAttributes attributes,
- bool* found,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ bool* done) {
Heap* heap = GetHeap();
+ Isolate* isolate = heap->isolate();
+
+ *done = false;
// We could not find a local property so let's check whether there is an
- // accessor that wants to handle the property.
- LookupResult accessor_result(heap->isolate());
- LookupCallbackSetterInPrototypes(name, &accessor_result);
- if (accessor_result.IsFound()) {
- *found = true;
- if (accessor_result.type() == CALLBACKS) {
- return SetPropertyWithCallback(accessor_result.GetCallbackObject(),
- name,
- value,
- accessor_result.holder(),
- strict_mode);
- } else if (accessor_result.type() == HANDLER) {
- // There is a proxy in the prototype chain. Invoke its
- // getPropertyDescriptor trap.
- bool found = false;
- // SetPropertyWithHandlerIfDefiningSetter can cause GC,
- // make sure to use the handlified references after calling
- // the function.
- Handle<JSObject> self(this);
- Handle<String> hname(name);
- Handle<Object> hvalue(value);
- MaybeObject* result =
- accessor_result.proxy()->SetPropertyWithHandlerIfDefiningSetter(
- name, value, attributes, strict_mode, &found);
- if (found) return result;
- // The proxy does not define the property as an accessor.
- // Consequently, it has no effect on setting the receiver.
- return self->AddProperty(*hname, *hvalue, attributes, strict_mode);
+ // accessor that wants to handle the property, or whether the property is
+ // read-only on the prototype chain.
+ LookupResult result(isolate);
+ LookupRealNamedPropertyInPrototypes(name, &result);
+ if (result.IsFound()) {
+ switch (result.type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ *done = result.IsReadOnly();
+ break;
+ case INTERCEPTOR: {
+ PropertyAttributes attr =
+ result.holder()->GetPropertyAttributeWithInterceptor(
+ this, name, true);
+ *done = !!(attr & READ_ONLY);
+ break;
+ }
+ case CALLBACKS: {
+ if (!FLAG_es5_readonly && result.IsReadOnly()) break;
+ *done = true;
+ return SetPropertyWithCallback(result.GetCallbackObject(),
+ name, value, result.holder(), strict_mode);
+ }
+ case HANDLER: {
+ return result.proxy()->SetPropertyViaPrototypesWithHandler(
+ this, name, value, attributes, strict_mode, done);
+ }
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ break;
}
}
- *found = false;
+
+ // If we get here with *done true, we have encountered a read-only property.
+ if (!FLAG_es5_readonly) *done = false;
+ if (*done) {
+ if (strict_mode == kNonStrictMode) return value;
+ Handle<Object> args[] = { Handle<Object>(name), Handle<Object>(this)};
+ return isolate->Throw(*isolate->factory()->NewTypeError(
+ "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))));
+ }
return heap->the_hole_value();
}
@@ -2198,216 +2213,90 @@ static Handle<T> MaybeNull(T* p) {
Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) {
- ElementsKind elms_kind = elements_kind();
- if (elms_kind == FAST_DOUBLE_ELEMENTS) {
- bool dummy = true;
- Handle<Map> fast_map =
- MaybeNull(LookupElementsTransitionMap(FAST_ELEMENTS, &dummy));
- if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
- return fast_map;
- }
- return Handle<Map>::null();
- }
- if (elms_kind == FAST_SMI_ONLY_ELEMENTS) {
- bool dummy = true;
- Handle<Map> double_map =
- MaybeNull(LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, &dummy));
- // In the current implementation, if the DOUBLE map doesn't exist, the
- // FAST map can't exist either.
- if (double_map.is_null()) return Handle<Map>::null();
- Handle<Map> fast_map =
- MaybeNull(double_map->LookupElementsTransitionMap(FAST_ELEMENTS,
- &dummy));
- if (!fast_map.is_null() && ContainsMap(candidates, fast_map)) {
- return fast_map;
- }
- if (ContainsMap(candidates, double_map)) return double_map;
- }
- return Handle<Map>::null();
-}
-
-static Map* GetElementsTransitionMapFromDescriptor(Object* descriptor_contents,
- ElementsKind elements_kind) {
- if (descriptor_contents->IsMap()) {
- Map* map = Map::cast(descriptor_contents);
- if (map->elements_kind() == elements_kind) {
- return map;
- }
- return NULL;
- }
-
- FixedArray* map_array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < map_array->length(); ++i) {
- Object* current = map_array->get(i);
- // Skip undefined slots, they are sentinels for reclaimed maps.
- if (!current->IsUndefined()) {
- Map* current_map = Map::cast(map_array->get(i));
- if (current_map->elements_kind() == elements_kind) {
- return current_map;
+ ElementsKind kind = elements_kind();
+ Handle<Map> transitioned_map = Handle<Map>::null();
+ Handle<Map> current_map(this);
+ bool packed = IsFastPackedElementsKind(kind);
+ if (IsTransitionableFastElementsKind(kind)) {
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, false);
+ Handle<Map> maybe_transitioned_map =
+ MaybeNull(current_map->LookupElementsTransitionMap(kind));
+ if (maybe_transitioned_map.is_null()) break;
+ if (ContainsMap(candidates, maybe_transitioned_map) &&
+ (packed || !IsFastPackedElementsKind(kind))) {
+ transitioned_map = maybe_transitioned_map;
+ if (!IsFastPackedElementsKind(kind)) packed = false;
}
+ current_map = maybe_transitioned_map;
}
}
-
- return NULL;
+ return transitioned_map;
}
-static MaybeObject* AddElementsTransitionMapToDescriptor(
- Object* descriptor_contents,
- Map* new_map) {
- // Nothing was in the descriptor for an ELEMENTS_TRANSITION,
- // simply add the map.
- if (descriptor_contents == NULL) {
- return new_map;
- }
-
- // There was already a map in the descriptor, create a 2-element FixedArray
- // to contain the existing map plus the new one.
- FixedArray* new_array;
- Heap* heap = new_map->GetHeap();
- if (descriptor_contents->IsMap()) {
- // Must tenure, DescriptorArray expects no new-space objects.
- MaybeObject* maybe_new_array = heap->AllocateFixedArray(2, TENURED);
- if (!maybe_new_array->To<FixedArray>(&new_array)) {
- return maybe_new_array;
+static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) {
+ Map* current_map = map;
+ int index = GetSequenceIndexFromFastElementsKind(map->elements_kind());
+ int to_index = GetSequenceIndexFromFastElementsKind(to_kind);
+ for (; index < to_index; ++index) {
+ Map* next_map = current_map->elements_transition_map();
+ if (next_map == NULL) {
+ return current_map;
}
- new_array->set(0, descriptor_contents);
- new_array->set(1, new_map);
- return new_array;
+ current_map = next_map;
}
+ ASSERT(current_map->elements_kind() == to_kind);
+ return current_map;
+}
- // The descriptor already contained a list of maps for different ElementKinds
- // of ELEMENTS_TRANSITION, first check the existing array for an undefined
- // slot, and if that's not available, create a FixedArray to hold the existing
- // maps plus the new one and fill it in.
- FixedArray* array = FixedArray::cast(descriptor_contents);
- for (int i = 0; i < array->length(); ++i) {
- if (array->get(i)->IsUndefined()) {
- array->set(i, new_map);
- return array;
- }
- }
- // Must tenure, DescriptorArray expects no new-space objects.
- MaybeObject* maybe_new_array =
- heap->AllocateFixedArray(array->length() + 1, TENURED);
- if (!maybe_new_array->To<FixedArray>(&new_array)) {
- return maybe_new_array;
- }
- int i = 0;
- while (i < array->length()) {
- new_array->set(i, array->get(i));
- ++i;
+Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
+ if (this->instance_descriptors()->MayContainTransitions() &&
+ IsMoreGeneralElementsKindTransition(this->elements_kind(), to_kind)) {
+ Map* to_map = FindClosestElementsTransition(this, to_kind);
+ if (to_map->elements_kind() == to_kind) {
+ return to_map;
+ }
}
- new_array->set(i, new_map);
- return new_array;
+ return NULL;
}
-String* Map::elements_transition_sentinel_name() {
- return GetHeap()->empty_symbol();
-}
+MaybeObject* Map::CreateNextElementsTransition(ElementsKind next_kind) {
+ ASSERT(elements_transition_map() == NULL);
+ ASSERT(GetSequenceIndexFromFastElementsKind(elements_kind()) ==
+ (GetSequenceIndexFromFastElementsKind(next_kind) - 1));
+ Map* next_map;
+ MaybeObject* maybe_next_map =
+ this->CopyDropTransitions(DescriptorArray::CANNOT_BE_SHARED);
+ if (!maybe_next_map->To(&next_map)) return maybe_next_map;
-Object* Map::GetDescriptorContents(String* sentinel_name,
- bool* safe_to_add_transition) {
- // Get the cached index for the descriptors lookup, or find and cache it.
- DescriptorArray* descriptors = instance_descriptors();
- DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
- int index = cache->Lookup(descriptors, sentinel_name);
- if (index == DescriptorLookupCache::kAbsent) {
- index = descriptors->Search(sentinel_name);
- cache->Update(descriptors, sentinel_name, index);
- }
- // If the transition already exists, return its descriptor.
- if (index != DescriptorArray::kNotFound) {
- PropertyDetails details(descriptors->GetDetails(index));
- if (details.type() == ELEMENTS_TRANSITION) {
- return descriptors->GetValue(index);
- } else {
- if (safe_to_add_transition != NULL) {
- *safe_to_add_transition = false;
- }
- }
- }
- return NULL;
+ next_map->set_elements_kind(next_kind);
+ next_map->SetBackPointer(this);
+ this->set_elements_transition_map(next_map);
+ return next_map;
}
-Map* Map::LookupElementsTransitionMap(ElementsKind elements_kind,
- bool* safe_to_add_transition) {
- // Special case: indirect SMI->FAST transition (cf. comment in
- // AddElementsTransition()).
- if (this->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- elements_kind == FAST_ELEMENTS) {
- Map* double_map = this->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS,
- safe_to_add_transition);
- if (double_map == NULL) return double_map;
- return double_map->LookupElementsTransitionMap(FAST_ELEMENTS,
- safe_to_add_transition);
- }
- Object* descriptor_contents = GetDescriptorContents(
- elements_transition_sentinel_name(), safe_to_add_transition);
- if (descriptor_contents != NULL) {
- Map* maybe_transition_map =
- GetElementsTransitionMapFromDescriptor(descriptor_contents,
- elements_kind);
- ASSERT(maybe_transition_map == NULL || maybe_transition_map->IsMap());
- return maybe_transition_map;
- }
- return NULL;
-}
+static MaybeObject* AddMissingElementsTransitions(Map* map,
+ ElementsKind to_kind) {
+ int index = GetSequenceIndexFromFastElementsKind(map->elements_kind()) + 1;
+ int to_index = GetSequenceIndexFromFastElementsKind(to_kind);
+ ASSERT(index <= to_index);
+ Map* current_map = map;
-MaybeObject* Map::AddElementsTransition(ElementsKind elements_kind,
- Map* transitioned_map) {
- // The map transition graph should be a tree, therefore the transition
- // from SMI to FAST elements is not done directly, but by going through
- // DOUBLE elements first.
- if (this->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- elements_kind == FAST_ELEMENTS) {
- bool safe_to_add = true;
- Map* double_map = this->LookupElementsTransitionMap(
- FAST_DOUBLE_ELEMENTS, &safe_to_add);
- // This method is only called when safe_to_add_transition has been found
- // to be true earlier.
- ASSERT(safe_to_add);
-
- if (double_map == NULL) {
- MaybeObject* maybe_map = this->CopyDropTransitions();
- if (!maybe_map->To(&double_map)) return maybe_map;
- double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
- MaybeObject* maybe_double_transition = this->AddElementsTransition(
- FAST_DOUBLE_ELEMENTS, double_map);
- if (maybe_double_transition->IsFailure()) return maybe_double_transition;
- }
- return double_map->AddElementsTransition(FAST_ELEMENTS, transitioned_map);
- }
-
- bool safe_to_add_transition = true;
- Object* descriptor_contents = GetDescriptorContents(
- elements_transition_sentinel_name(), &safe_to_add_transition);
- // This method is only called when safe_to_add_transition has been found
- // to be true earlier.
- ASSERT(safe_to_add_transition);
- MaybeObject* maybe_new_contents =
- AddElementsTransitionMapToDescriptor(descriptor_contents,
- transitioned_map);
- Object* new_contents;
- if (!maybe_new_contents->ToObject(&new_contents)) {
- return maybe_new_contents;
- }
-
- ElementsTransitionDescriptor desc(elements_transition_sentinel_name(),
- new_contents);
- Object* new_descriptors;
- MaybeObject* maybe_new_descriptors =
- instance_descriptors()->CopyInsert(&desc, KEEP_TRANSITIONS);
- if (!maybe_new_descriptors->ToObject(&new_descriptors)) {
- return maybe_new_descriptors;
+ for (; index <= to_index; ++index) {
+ ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(index);
+ MaybeObject* maybe_next_map =
+ current_map->CreateNextElementsTransition(next_kind);
+ if (!maybe_next_map->To(&current_map)) return maybe_next_map;
}
- set_instance_descriptors(DescriptorArray::cast(new_descriptors));
- return this;
+
+ ASSERT(current_map->elements_kind() == to_kind);
+ return current_map;
}
@@ -2420,57 +2309,60 @@ Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
}
-MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
- Map* current_map = map();
- ElementsKind from_kind = current_map->elements_kind();
+// If the map is using the empty descriptor array, install a new empty
+// descriptor array that will contain an element transition.
+// TODO(verwaest) Goes away once the descriptor array is immutable.
+static MaybeObject* EnsureMayContainTransitions(Map* map) {
+ if (map->instance_descriptors()->MayContainTransitions()) return map;
+ DescriptorArray* descriptor_array;
+ MaybeObject* maybe_descriptor_array =
+ DescriptorArray::Allocate(0, DescriptorArray::CANNOT_BE_SHARED);
+ if (!maybe_descriptor_array->To(&descriptor_array)) {
+ return maybe_descriptor_array;
+ }
+ map->set_instance_descriptors(descriptor_array);
+ return map;
+}
- if (from_kind == to_kind) return current_map;
- // Only objects with FastProperties can have DescriptorArrays and can track
- // element-related maps. Also don't add descriptors to maps that are shared.
- bool safe_to_add_transition = HasFastProperties() &&
- !current_map->IsUndefined() &&
- !current_map->is_shared();
+MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
+ Map* start_map = map();
+ ElementsKind from_kind = start_map->elements_kind();
- // Prevent long chains of DICTIONARY -> FAST_ELEMENTS maps caused by objects
- // with elements that switch back and forth between dictionary and fast
- // element mode.
- if (from_kind == DICTIONARY_ELEMENTS && to_kind == FAST_ELEMENTS) {
- safe_to_add_transition = false;
+ if (from_kind == to_kind) {
+ return start_map;
}
- if (safe_to_add_transition) {
- // It's only safe to manipulate the descriptor array if it would be
- // safe to add a transition.
- Map* maybe_transition_map = current_map->LookupElementsTransitionMap(
- to_kind, &safe_to_add_transition);
- if (maybe_transition_map != NULL) {
- return maybe_transition_map;
- }
+ Context* global_context = GetIsolate()->context()->global_context();
+ bool allow_store_transition =
+ // Only remember the map transition if the object's map is NOT equal to
+ // the global object_function's map and there is not an already existing
+ // non-matching element transition.
+ (global_context->object_function()->map() != map()) &&
+ !start_map->IsUndefined() && !start_map->is_shared() &&
+ // Only store fast element maps in ascending generality.
+ IsTransitionableFastElementsKind(from_kind) &&
+ IsFastElementsKind(to_kind) &&
+ IsMoreGeneralElementsKindTransition(from_kind, to_kind);
+
+ if (!allow_store_transition) {
+ // Create a new free-floating map only if we are not allowed to store it.
+ Map* new_map = NULL;
+ MaybeObject* maybe_new_map =
+ start_map->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ new_map->set_elements_kind(to_kind);
+ return new_map;
}
- Map* new_map = NULL;
+ EnsureMayContainTransitions(start_map);
+ Map* closest_map = FindClosestElementsTransition(start_map, to_kind);
- // No transition to an existing map for the given ElementsKind. Make a new
- // one.
- { MaybeObject* maybe_map = current_map->CopyDropTransitions();
- if (!maybe_map->To(&new_map)) return maybe_map;
+ if (closest_map->elements_kind() == to_kind) {
+ return closest_map;
}
- new_map->set_elements_kind(to_kind);
-
- // Only remember the map transition if the object's map is NOT equal to the
- // global object_function's map and there is not an already existing
- // non-matching element transition.
- Context* global_context = GetIsolate()->context()->global_context();
- bool allow_map_transition = safe_to_add_transition &&
- (global_context->object_function()->map() != map());
- if (allow_map_transition) {
- MaybeObject* maybe_transition =
- current_map->AddElementsTransition(to_kind, new_map);
- if (maybe_transition->IsFailure()) return maybe_transition;
- }
- return new_map;
+ return AddMissingElementsTransitions(closest_map, to_kind);
}
@@ -2536,9 +2428,13 @@ void JSObject::LookupRealNamedPropertyInPrototypes(String* name,
Heap* heap = GetHeap();
for (Object* pt = GetPrototype();
pt != heap->null_value();
- pt = JSObject::cast(pt)->GetPrototype()) {
+ pt = pt->GetPrototype()) {
+ if (pt->IsJSProxy()) {
+ return result->HandlerResult(JSProxy::cast(pt));
+ }
JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty() && (result->type() != INTERCEPTOR)) return;
+ ASSERT(!(result->IsProperty() && result->type() == INTERCEPTOR));
+ if (result->IsProperty()) return;
}
result->NotFound();
}
@@ -2552,7 +2448,7 @@ MaybeObject* JSObject::SetPropertyWithFailedAccessCheck(
bool check_prototype,
StrictModeFlag strict_mode) {
if (check_prototype && !result->IsProperty()) {
- LookupCallbackSetterInPrototypes(name, result);
+ LookupRealNamedPropertyInPrototypes(name, result);
}
if (result->IsProperty()) {
@@ -2605,13 +2501,14 @@ MaybeObject* JSReceiver::SetProperty(LookupResult* result,
String* key,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ JSReceiver::StoreFromKeyed store_mode) {
if (result->IsFound() && result->type() == HANDLER) {
return result->proxy()->SetPropertyWithHandler(
- key, value, attributes, strict_mode);
+ this, key, value, attributes, strict_mode);
} else {
return JSObject::cast(this)->SetPropertyForResult(
- result, key, value, attributes, strict_mode);
+ result, key, value, attributes, strict_mode, store_mode);
}
}
@@ -2625,20 +2522,21 @@ bool JSProxy::HasPropertyWithHandler(String* name_raw) {
Handle<Object> args[] = { name };
Handle<Object> result = CallTrap(
"has", isolate->derived_has_trap(), ARRAY_SIZE(args), args);
- if (isolate->has_pending_exception()) return Failure::Exception();
+ if (isolate->has_pending_exception()) return false;
return result->ToBoolean()->IsTrue();
}
MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandler(
+ JSReceiver* receiver_raw,
String* name_raw,
Object* value_raw,
PropertyAttributes attributes,
StrictModeFlag strict_mode) {
Isolate* isolate = GetIsolate();
HandleScope scope(isolate);
- Handle<Object> receiver(this);
+ Handle<JSReceiver> receiver(receiver_raw);
Handle<Object> name(name_raw);
Handle<Object> value(value_raw);
@@ -2650,77 +2548,92 @@ MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandler(
}
-MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyWithHandlerIfDefiningSetter(
+MUST_USE_RESULT MaybeObject* JSProxy::SetPropertyViaPrototypesWithHandler(
+ JSReceiver* receiver_raw,
String* name_raw,
Object* value_raw,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool* found) {
- *found = true; // except where defined otherwise...
- Isolate* isolate = GetHeap()->isolate();
+ bool* done) {
+ Isolate* isolate = GetIsolate();
Handle<JSProxy> proxy(this);
- Handle<Object> handler(this->handler()); // Trap might morph proxy.
+ Handle<JSReceiver> receiver(receiver_raw);
Handle<String> name(name_raw);
Handle<Object> value(value_raw);
+ Handle<Object> handler(this->handler()); // Trap might morph proxy.
+
+ *done = true; // except where redefined...
Handle<Object> args[] = { name };
Handle<Object> result = proxy->CallTrap(
"getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args);
if (isolate->has_pending_exception()) return Failure::Exception();
- if (!result->IsUndefined()) {
- // The proxy handler cares about this property.
- // Check whether it is virtualized as an accessor.
- // Emulate [[GetProperty]] semantics for proxies.
- bool has_pending_exception;
- Handle<Object> argv[] = { result };
- Handle<Object> desc =
- Execution::Call(isolate->to_complete_property_descriptor(), result,
- ARRAY_SIZE(argv), argv, &has_pending_exception);
- if (has_pending_exception) return Failure::Exception();
-
- Handle<String> conf_name =
- isolate->factory()->LookupAsciiSymbol("configurable_");
- Handle<Object> configurable(v8::internal::GetProperty(desc, conf_name));
- ASSERT(!isolate->has_pending_exception());
- if (configurable->IsFalse()) {
- Handle<String> trap =
- isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
- Handle<Object> args[] = { handler, trap, name };
- Handle<Object> error = isolate->factory()->NewTypeError(
- "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
- return isolate->Throw(*error);
- }
- ASSERT(configurable->IsTrue());
+ if (result->IsUndefined()) {
+ *done = false;
+ return GetHeap()->the_hole_value();
+ }
- // Check for AccessorDescriptor.
- Handle<String> set_name = isolate->factory()->LookupAsciiSymbol("set_");
- Handle<Object> setter(v8::internal::GetProperty(desc, set_name));
+ // Emulate [[GetProperty]] semantics for proxies.
+ bool has_pending_exception;
+ Handle<Object> argv[] = { result };
+ Handle<Object> desc =
+ Execution::Call(isolate->to_complete_property_descriptor(), result,
+ ARRAY_SIZE(argv), argv, &has_pending_exception);
+ if (has_pending_exception) return Failure::Exception();
+
+ // [[GetProperty]] requires to check that all properties are configurable.
+ Handle<String> configurable_name =
+ isolate->factory()->LookupAsciiSymbol("configurable_");
+ Handle<Object> configurable(
+ v8::internal::GetProperty(desc, configurable_name));
+ ASSERT(!isolate->has_pending_exception());
+ ASSERT(configurable->IsTrue() || configurable->IsFalse());
+ if (configurable->IsFalse()) {
+ Handle<String> trap =
+ isolate->factory()->LookupAsciiSymbol("getPropertyDescriptor");
+ Handle<Object> args[] = { handler, trap, name };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
+ ASSERT(configurable->IsTrue());
+
+ // Check for DataDescriptor.
+ Handle<String> hasWritable_name =
+ isolate->factory()->LookupAsciiSymbol("hasWritable_");
+ Handle<Object> hasWritable(v8::internal::GetProperty(desc, hasWritable_name));
+ ASSERT(!isolate->has_pending_exception());
+ ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse());
+ if (hasWritable->IsTrue()) {
+ Handle<String> writable_name =
+ isolate->factory()->LookupAsciiSymbol("writable_");
+ Handle<Object> writable(v8::internal::GetProperty(desc, writable_name));
ASSERT(!isolate->has_pending_exception());
- if (!setter->IsUndefined()) {
- // We have a setter -- invoke it.
- // TODO(rossberg): nicer would be to cast to some JSCallable here...
- return proxy->SetPropertyWithDefinedSetter(
- JSReceiver::cast(*setter), *value);
- } else {
- Handle<String> get_name = isolate->factory()->LookupAsciiSymbol("get_");
- Handle<Object> getter(v8::internal::GetProperty(desc, get_name));
- ASSERT(!isolate->has_pending_exception());
- if (!getter->IsUndefined()) {
- // We have a getter but no setter -- the property may not be
- // written. In strict mode, throw an error.
- if (strict_mode == kNonStrictMode) return *value;
- Handle<Object> args[] = { name, proxy };
- Handle<Object> error = isolate->factory()->NewTypeError(
- "no_setter_in_callback", HandleVector(args, ARRAY_SIZE(args)));
- return isolate->Throw(*error);
- }
- }
- // Fall-through.
+ ASSERT(writable->IsTrue() || writable->IsFalse());
+ *done = writable->IsFalse();
+ if (!*done) return GetHeap()->the_hole_value();
+ if (strict_mode == kNonStrictMode) return *value;
+ Handle<Object> args[] = { name, receiver };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
}
- // The proxy does not define the property as an accessor.
- *found = false;
- return *value;
+ // We have an AccessorDescriptor.
+ Handle<String> set_name = isolate->factory()->LookupAsciiSymbol("set_");
+ Handle<Object> setter(v8::internal::GetProperty(desc, set_name));
+ ASSERT(!isolate->has_pending_exception());
+ if (!setter->IsUndefined()) {
+ // TODO(rossberg): nicer would be to cast to some JSCallable here...
+ return receiver->SetPropertyWithDefinedSetter(
+ JSReceiver::cast(*setter), *value);
+ }
+
+ if (strict_mode == kNonStrictMode) return *value;
+ Handle<Object> args2[] = { name, proxy };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2)));
+ return isolate->Throw(*error);
}
@@ -2878,7 +2791,8 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode) {
Heap* heap = GetHeap();
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
@@ -2909,26 +2823,19 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
if (proto->IsNull()) return value;
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->SetPropertyForResult(
- result, name, value, attributes, strict_mode);
+ result, name, value, attributes, strict_mode, store_mode);
}
if (!result->IsProperty() && !IsJSContextExtensionObject()) {
- bool found = false;
- MaybeObject* result_object;
- result_object = SetPropertyWithCallbackSetterInPrototypes(name,
- value,
- attributes,
- &found,
- strict_mode);
- if (found) return result_object;
+ bool done = false;
+ MaybeObject* result_object =
+ SetPropertyViaPrototypes(name, value, attributes, strict_mode, &done);
+ if (done) return result_object;
}
- // At this point, no GC should have happened, as this would invalidate
- // 'result', which we cannot handlify!
-
if (!result->IsFound()) {
// Neither properties nor transitions found.
- return AddProperty(name, value, attributes, strict_mode);
+ return AddProperty(name, value, attributes, strict_mode, store_mode);
}
if (result->IsReadOnly() && result->IsProperty()) {
if (strict_mode == kStrictMode) {
@@ -2962,12 +2869,18 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
// Preserve the attributes of this existing property.
attributes = result->GetAttributes();
return ConvertDescriptorToField(name, value, attributes);
- case CALLBACKS:
- return SetPropertyWithCallback(result->GetCallbackObject(),
+ case CALLBACKS: {
+ Object* callback_object = result->GetCallbackObject();
+ if (callback_object->IsAccessorPair() &&
+ !AccessorPair::cast(callback_object)->ContainsAccessor()) {
+ return ConvertDescriptorToField(name, value, attributes);
+ }
+ return SetPropertyWithCallback(callback_object,
name,
value,
result->holder(),
strict_mode);
+ }
case INTERCEPTOR:
return SetPropertyWithInterceptor(name, value, attributes, strict_mode);
case CONSTANT_TRANSITION: {
@@ -2989,7 +2902,6 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
}
case NULL_DESCRIPTOR:
- case ELEMENTS_TRANSITION:
return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
case HANDLER:
UNREACHABLE();
@@ -3025,7 +2937,6 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
String* name,
Object* value,
PropertyAttributes attributes) {
-
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
AssertNoContextChange ncc;
@@ -3088,13 +2999,10 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
case CONSTANT_TRANSITION:
// Replace with a MAP_TRANSITION to a new map with a FIELD, even
// if the value is a function.
- return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
case NULL_DESCRIPTOR:
- case ELEMENTS_TRANSITION:
return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
case HANDLER:
UNREACHABLE();
- return value;
}
UNREACHABLE(); // keep the compiler happy
return value;
@@ -3250,14 +3158,20 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
Map::cast(result)->SharedMapVerify();
}
if (FLAG_enable_slow_asserts) {
- // The cached map should match newly created normalized map bit-by-bit.
+ // The cached map should match newly created normalized map bit-by-bit,
+ // except for the code cache, which can contain some ics which can be
+ // applied to the shared map.
Object* fresh;
{ MaybeObject* maybe_fresh =
fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
if (maybe_fresh->ToObject(&fresh)) {
ASSERT(memcmp(Map::cast(fresh)->address(),
Map::cast(result)->address(),
- Map::kSize) == 0);
+ Map::kCodeCacheOffset) == 0);
+ int offset = Map::kCodeCacheOffset + kPointerSize;
+ ASSERT(memcmp(Map::cast(fresh)->address() + offset,
+ Map::cast(result)->address() + offset,
+ Map::kSize - offset) == 0);
}
}
}
@@ -3345,7 +3259,7 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
DescriptorArray* descs = map_of_this->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details(descs->GetDetails(i));
+ PropertyDetails details = descs->GetDetails(i);
switch (details.type()) {
case CONSTANT_FUNCTION: {
PropertyDetails d =
@@ -3382,7 +3296,6 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
case INTERCEPTOR:
- case ELEMENTS_TRANSITION:
break;
case HANDLER:
case NORMAL:
@@ -3473,8 +3386,7 @@ MaybeObject* JSObject::NormalizeElements() {
}
if (array->IsDictionary()) return array;
- ASSERT(HasFastElements() ||
- HasFastSmiOnlyElements() ||
+ ASSERT(HasFastSmiOrObjectElements() ||
HasFastDoubleElements() ||
HasFastArgumentsElements());
// Compute the effective length and allocate a new backing store.
@@ -3509,8 +3421,7 @@ MaybeObject* JSObject::NormalizeElements() {
if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
}
} else {
- ASSERT(old_map->has_fast_elements() ||
- old_map->has_fast_smi_only_elements());
+ ASSERT(old_map->has_fast_smi_or_object_elements());
value = FixedArray::cast(array)->get(i);
}
PropertyDetails details = PropertyDetails(NONE, NORMAL);
@@ -3753,13 +3664,11 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
MaybeObject* dict_alloc = StringDictionary::Allocate(kInitialSize);
StringDictionary* dictionary;
if (!dict_alloc->To<StringDictionary>(&dictionary)) return dict_alloc;
- MaybeObject* store_result =
- SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
- dictionary,
- DONT_ENUM,
- kNonStrictMode);
- if (store_result->IsFailure()) return store_result;
- return dictionary;
+ // Using AddProperty or SetPropertyPostInterceptor here could fail, because
+ // object might be non-extensible.
+ return HasFastProperties()
+ ? AddFastProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM)
+ : AddSlowProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM);
}
@@ -3999,9 +3908,9 @@ MaybeObject* JSReceiver::DeleteProperty(String* name, DeleteMode mode) {
bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object) {
- ASSERT(kind == FAST_ELEMENTS ||
+ ASSERT(IsFastObjectElementsKind(kind) ||
kind == DICTIONARY_ELEMENTS);
- if (kind == FAST_ELEMENTS) {
+ if (IsFastObjectElementsKind(kind)) {
int length = IsJSArray()
? Smi::cast(JSArray::cast(this)->length())->value()
: elements->length();
@@ -4053,12 +3962,15 @@ bool JSObject::ReferencesObject(Object* obj) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// Raw pixels and external arrays do not reference other
// objects.
break;
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
break;
case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
case DICTIONARY_ELEMENTS: {
FixedArray* elements = FixedArray::cast(this->elements());
if (ReferencesObjectFromElements(elements, kind, obj)) return true;
@@ -4074,7 +3986,8 @@ bool JSObject::ReferencesObject(Object* obj) {
}
// Check the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : FAST_ELEMENTS;
+ kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS :
+ FAST_HOLEY_ELEMENTS;
if (ReferencesObjectFromElements(arguments, kind, obj)) return true;
break;
}
@@ -4168,7 +4081,8 @@ MaybeObject* JSObject::PreventExtensions() {
// Do a map transition, other objects with this map may still
// be extensible.
Map* new_map;
- { MaybeObject* maybe = map()->CopyDropTransitions();
+ { MaybeObject* maybe =
+ map()->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
if (!maybe->To<Map>(&new_map)) return maybe;
}
new_map->set_is_extensible(false);
@@ -4209,7 +4123,7 @@ int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details(descs->GetDetails(i));
+ PropertyDetails details = descs->GetDetails(i);
if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
result++;
}
@@ -4308,7 +4222,7 @@ void JSReceiver::Lookup(String* name, LookupResult* result) {
}
-// Search object and it's prototype chain for callback properties.
+// Search object and its prototype chain for callback properties.
void JSObject::LookupCallback(String* name, LookupResult* result) {
Heap* heap = GetHeap();
for (Object* current = this;
@@ -4352,9 +4266,12 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
Object* setter,
PropertyAttributes attributes) {
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -4412,37 +4329,56 @@ MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
}
+MaybeObject* JSObject::CreateAccessorPairFor(String* name) {
+ LookupResult result(GetHeap()->isolate());
+ LocalLookupRealNamedProperty(name, &result);
+ if (result.IsProperty() && result.type() == CALLBACKS) {
+ // Note that the result can actually have IsDontDelete() == true when we
+ // e.g. have to fall back to the slow case while adding a setter after
+ // successfully reusing a map transition for a getter. Nevertheless, this is
+ // OK, because the assertion only holds for the whole addition of both
+ // accessors, not for the addition of each part. See first comment in
+ // DefinePropertyAccessor below.
+ Object* obj = result.GetCallbackObject();
+ if (obj->IsAccessorPair()) {
+ return AccessorPair::cast(obj)->CopyWithoutTransitions();
+ }
+ }
+ return GetHeap()->AllocateAccessorPair();
+}
+
+
MaybeObject* JSObject::DefinePropertyAccessor(String* name,
Object* getter,
Object* setter,
PropertyAttributes attributes) {
- // Lookup the name.
- LookupResult result(GetHeap()->isolate());
- LocalLookupRealNamedProperty(name, &result);
- if (result.IsFound()) {
- if (result.type() == CALLBACKS) {
- ASSERT(!result.IsDontDelete());
- Object* obj = result.GetCallbackObject();
- // Need to preserve old getters/setters.
- if (obj->IsAccessorPair()) {
- AccessorPair* copy;
- { MaybeObject* maybe_copy =
- AccessorPair::cast(obj)->CopyWithoutTransitions();
- if (!maybe_copy->To(&copy)) return maybe_copy;
- }
- copy->SetComponents(getter, setter);
- // Use set to update attributes.
- return SetPropertyCallback(name, copy, attributes);
- }
+ // We could assert that the property is configurable here, but we would need
+ // to do a lookup, which seems to be a bit of overkill.
+ Heap* heap = GetHeap();
+ bool only_attribute_changes = getter->IsNull() && setter->IsNull();
+ if (HasFastProperties() && !only_attribute_changes) {
+ MaybeObject* getterOk = heap->undefined_value();
+ if (!getter->IsNull()) {
+ getterOk = DefineFastAccessor(name, ACCESSOR_GETTER, getter, attributes);
+ if (getterOk->IsFailure()) return getterOk;
+ }
+
+ MaybeObject* setterOk = heap->undefined_value();
+ if (getterOk != heap->null_value() && !setter->IsNull()) {
+ setterOk = DefineFastAccessor(name, ACCESSOR_SETTER, setter, attributes);
+ if (setterOk->IsFailure()) return setterOk;
+ }
+
+ if (getterOk != heap->null_value() && setterOk != heap->null_value()) {
+ return heap->undefined_value();
}
}
AccessorPair* accessors;
- { MaybeObject* maybe_accessors = GetHeap()->AllocateAccessorPair();
+ { MaybeObject* maybe_accessors = CreateAccessorPairFor(name);
if (!maybe_accessors->To(&accessors)) return maybe_accessors;
}
accessors->SetComponents(getter, setter);
-
return SetPropertyCallback(name, accessors, attributes);
}
@@ -4452,7 +4388,7 @@ bool JSObject::CanSetCallback(String* name) {
GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET));
// Check if there is an API defined callback object which prohibits
- // callback overwriting in this object or it's prototype chain.
+ // callback overwriting in this object or its prototype chain.
// This mechanism is needed for instance in a browser setting, where
// certain accessors such as window.location should not be allowed
// to be overwritten because allowing overwriting could potentially
@@ -4587,6 +4523,159 @@ MaybeObject* JSObject::DefineAccessor(String* name,
}
+static MaybeObject* CreateFreshAccessor(JSObject* obj,
+ String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes) {
+ // step 1: create a new getter/setter pair with only the accessor in it
+ Heap* heap = obj->GetHeap();
+ AccessorPair* accessors2;
+ { MaybeObject* maybe_accessors2 = heap->AllocateAccessorPair();
+ if (!maybe_accessors2->To(&accessors2)) return maybe_accessors2;
+ }
+ accessors2->set(component, accessor);
+
+ // step 2: create a copy of the descriptors, incl. the new getter/setter pair
+ Map* map1 = obj->map();
+ CallbacksDescriptor callbacks_descr2(name, accessors2, attributes);
+ DescriptorArray* descriptors2;
+ { MaybeObject* maybe_descriptors2 =
+ map1->instance_descriptors()->CopyInsert(&callbacks_descr2,
+ REMOVE_TRANSITIONS);
+ if (!maybe_descriptors2->To(&descriptors2)) return maybe_descriptors2;
+ }
+
+ // step 3: create a new map with the new descriptors
+ Map* map2;
+ { MaybeObject* maybe_map2 = map1->CopyDropDescriptors();
+ if (!maybe_map2->To(&map2)) return maybe_map2;
+ }
+ map2->set_instance_descriptors(descriptors2);
+
+ // step 4: create a new getter/setter pair with a transition to the new map
+ AccessorPair* accessors1;
+ { MaybeObject* maybe_accessors1 = heap->AllocateAccessorPair();
+ if (!maybe_accessors1->To(&accessors1)) return maybe_accessors1;
+ }
+ accessors1->set(component, map2);
+
+ // step 5: create a copy of the descriptors, incl. the new getter/setter pair
+ // with the transition
+ CallbacksDescriptor callbacks_descr1(name, accessors1, attributes);
+ DescriptorArray* descriptors1;
+ { MaybeObject* maybe_descriptors1 =
+ map1->instance_descriptors()->CopyInsert(&callbacks_descr1,
+ KEEP_TRANSITIONS);
+ if (!maybe_descriptors1->To(&descriptors1)) return maybe_descriptors1;
+ }
+
+ // step 6: everything went well so far, so we make our changes visible
+ obj->set_map(map2);
+ map1->set_instance_descriptors(descriptors1);
+ map2->SetBackPointer(map1);
+ return obj;
+}
+
+
+static bool TransitionToSameAccessor(Object* map,
+ String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes ) {
+ DescriptorArray* descs = Map::cast(map)->instance_descriptors();
+ int number = descs->SearchWithCache(name);
+ ASSERT(number != DescriptorArray::kNotFound);
+ Object* target_accessor =
+ AccessorPair::cast(descs->GetCallbacksObject(number))->get(component);
+ PropertyAttributes target_attributes = descs->GetDetails(number).attributes();
+ return target_accessor == accessor && target_attributes == attributes;
+}
+
+
+static MaybeObject* NewCallbackTransition(JSObject* obj,
+ String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes,
+ AccessorPair* accessors2) {
+ // step 1: copy the old getter/setter pair and set the new accessor
+ AccessorPair* accessors3;
+ { MaybeObject* maybe_accessors3 = accessors2->CopyWithoutTransitions();
+ if (!maybe_accessors3->To(&accessors3)) return maybe_accessors3;
+ }
+ accessors3->set(component, accessor);
+
+ // step 2: create a copy of the descriptors, incl. the new getter/setter pair
+ Map* map2 = obj->map();
+ CallbacksDescriptor callbacks_descr3(name, accessors3, attributes);
+ DescriptorArray* descriptors3;
+ { MaybeObject* maybe_descriptors3 =
+ map2->instance_descriptors()->CopyInsert(&callbacks_descr3,
+ REMOVE_TRANSITIONS);
+ if (!maybe_descriptors3->To(&descriptors3)) return maybe_descriptors3;
+ }
+
+ // step 3: create a new map with the new descriptors
+ Map* map3;
+ { MaybeObject* maybe_map3 = map2->CopyDropDescriptors();
+ if (!maybe_map3->To(&map3)) return maybe_map3;
+ }
+ map3->set_instance_descriptors(descriptors3);
+
+ // step 4: everything went well so far, so we make our changes visible
+ obj->set_map(map3);
+ accessors2->set(component, map3);
+ map3->SetBackPointer(map2);
+ return obj;
+}
+
+
+MaybeObject* JSObject::DefineFastAccessor(String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes) {
+ ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined());
+ LookupResult result(GetIsolate());
+ LocalLookup(name, &result);
+
+ // If we have a new property, create a fresh accessor plus a transition to it.
+ if (!result.IsFound()) {
+ return CreateFreshAccessor(this, name, component, accessor, attributes);
+ }
+
+ // If the property is not a JavaScript accessor, fall back to the slow case.
+ if (result.type() != CALLBACKS) return GetHeap()->null_value();
+ Object* callback_value = result.GetCallbackObject();
+ if (!callback_value->IsAccessorPair()) return GetHeap()->null_value();
+ AccessorPair* accessors = AccessorPair::cast(callback_value);
+
+ // Follow a callback transition, if there is a fitting one.
+ Object* entry = accessors->get(component);
+ if (entry->IsMap() &&
+ TransitionToSameAccessor(entry, name, component, accessor, attributes)) {
+ set_map(Map::cast(entry));
+ return this;
+ }
+
+ // When we re-add the same accessor again, there is nothing to do.
+ if (entry == accessor && result.GetAttributes() == attributes) return this;
+
+ // Only the other accessor has been set so far, create a new transition.
+ if (entry->IsTheHole()) {
+ return NewCallbackTransition(this,
+ name,
+ component,
+ accessor,
+ attributes,
+ accessors);
+ }
+
+ // Nothing from the above worked, so we have to fall back to the slow case.
+ return GetHeap()->null_value();
+}
+
+
MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
Isolate* isolate = GetIsolate();
String* name = String::cast(info->name());
@@ -4623,9 +4712,12 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
// Accessors overwrite previous callbacks (cf. with getters/setters).
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -4770,7 +4862,8 @@ MaybeObject* Map::CopyDropDescriptors() {
JSFunction* ctor = JSFunction::cast(constructor());
Object* descriptors;
{ MaybeObject* maybe_descriptors =
- ctor->initial_map()->instance_descriptors()->RemoveTransitions();
+ ctor->initial_map()->instance_descriptors()->RemoveTransitions(
+ DescriptorArray::MAY_BE_SHARED);
if (!maybe_descriptors->ToObject(&descriptors)) return maybe_descriptors;
}
Map::cast(result)->set_instance_descriptors(
@@ -4810,6 +4903,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
Map::cast(result)->set_bit_field(bit_field());
Map::cast(result)->set_bit_field2(bit_field2());
Map::cast(result)->set_bit_field3(bit_field3());
+ Map::cast(result)->set_code_cache(code_cache());
Map::cast(result)->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
@@ -4823,20 +4917,22 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
}
-MaybeObject* Map::CopyDropTransitions() {
+MaybeObject* Map::CopyDropTransitions(
+ DescriptorArray::SharedMode shared_mode) {
Object* new_map;
{ MaybeObject* maybe_new_map = CopyDropDescriptors();
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
}
Object* descriptors;
{ MaybeObject* maybe_descriptors =
- instance_descriptors()->RemoveTransitions();
+ instance_descriptors()->RemoveTransitions(shared_mode);
if (!maybe_descriptors->ToObject(&descriptors)) return maybe_descriptors;
}
cast(new_map)->set_instance_descriptors(DescriptorArray::cast(descriptors));
return new_map;
}
+
void Map::UpdateCodeCache(Handle<Map> map,
Handle<String> name,
Handle<Code> code) {
@@ -4846,6 +4942,8 @@ void Map::UpdateCodeCache(Handle<Map> map,
}
MaybeObject* Map::UpdateCodeCache(String* name, Code* code) {
+ ASSERT(!is_shared() || code->allowed_in_shared_map_code_cache());
+
// Allocate the code cache if not present.
if (code_cache()->IsFixedArray()) {
Object* result;
@@ -4896,39 +4994,47 @@ class IntrusiveMapTransitionIterator {
void Start() {
ASSERT(!IsIterating());
- if (HasContentArray()) *ContentHeader() = Smi::FromInt(0);
+ if (descriptor_array_->MayContainTransitions())
+ *DescriptorArrayHeader() = Smi::FromInt(0);
}
bool IsIterating() {
- return HasContentArray() && (*ContentHeader())->IsSmi();
+ return descriptor_array_->MayContainTransitions() &&
+ (*DescriptorArrayHeader())->IsSmi();
}
Map* Next() {
ASSERT(IsIterating());
- FixedArray* contents = ContentArray();
- // Attention, tricky index manipulation ahead: Every entry in the contents
- // array consists of a value/details pair, so the index is typically even.
- // An exception is made for CALLBACKS entries: An even index means we look
- // at its getter, and an odd index means we look at its setter.
- int index = Smi::cast(*ContentHeader())->value();
- while (index < contents->length()) {
- PropertyDetails details(Smi::cast(contents->get(index | 1)));
+ // Attention, tricky index manipulation ahead: Two consecutive indices are
+ // assigned to each descriptor. Most descriptors directly advance to the
+ // next descriptor by adding 2 to the index. The exceptions are the
+ // CALLBACKS entries: An even index means we look at its getter, and an odd
+ // index means we look at its setter.
+ int raw_index = Smi::cast(*DescriptorArrayHeader())->value();
+ int index = raw_index / 2;
+ int number_of_descriptors = descriptor_array_->number_of_descriptors();
+ while (index < number_of_descriptors) {
+ PropertyDetails details(descriptor_array_->GetDetails(index));
switch (details.type()) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
// We definitely have a map transition.
- *ContentHeader() = Smi::FromInt(index + 2);
- return static_cast<Map*>(contents->get(index));
+ *DescriptorArrayHeader() = Smi::FromInt(raw_index + 2);
+ return static_cast<Map*>(descriptor_array_->GetValue(index));
case CALLBACKS: {
// We might have a map transition in a getter or in a setter.
AccessorPair* accessors =
- static_cast<AccessorPair*>(contents->get(index & ~1));
- Object* accessor =
- ((index & 1) == 0) ? accessors->getter() : accessors->setter();
- index++;
+ static_cast<AccessorPair*>(descriptor_array_->GetValue(index));
+ Object* accessor;
+ if ((raw_index & 1) == 0) {
+ accessor = accessors->setter();
+ } else {
+ ++index;
+ accessor = accessors->getter();
+ }
+ ++raw_index;
if (accessor->IsMap()) {
- *ContentHeader() = Smi::FromInt(index);
+ *DescriptorArrayHeader() = Smi::FromInt(raw_index);
return static_cast<Map*>(accessor);
}
break;
@@ -4940,26 +5046,25 @@ class IntrusiveMapTransitionIterator {
case INTERCEPTOR:
case NULL_DESCRIPTOR:
// We definitely have no map transition.
- index += 2;
+ raw_index += 2;
+ ++index;
break;
}
}
- *ContentHeader() = descriptor_array_->GetHeap()->fixed_array_map();
+ if (index == descriptor_array_->number_of_descriptors()) {
+ Map* elements_transition = descriptor_array_->elements_transition_map();
+ if (elements_transition != NULL) {
+ *DescriptorArrayHeader() = Smi::FromInt(index + 1);
+ return elements_transition;
+ }
+ }
+ *DescriptorArrayHeader() = descriptor_array_->GetHeap()->fixed_array_map();
return NULL;
}
private:
- bool HasContentArray() {
- return descriptor_array_-> length() > DescriptorArray::kContentArrayIndex;
- }
-
- FixedArray* ContentArray() {
- Object* array = descriptor_array_->get(DescriptorArray::kContentArrayIndex);
- return static_cast<FixedArray*>(array);
- }
-
- Object** ContentHeader() {
- return HeapObject::RawField(ContentArray(), DescriptorArray::kMapOffset);
+ Object** DescriptorArrayHeader() {
+ return HeapObject::RawField(descriptor_array_, DescriptorArray::kMapOffset);
}
DescriptorArray* descriptor_array_;
@@ -4970,7 +5075,7 @@ class IntrusiveMapTransitionIterator {
// underlying array while it is running.
class IntrusivePrototypeTransitionIterator {
public:
- explicit IntrusivePrototypeTransitionIterator(FixedArray* proto_trans)
+ explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans)
: proto_trans_(proto_trans) { }
void Start() {
@@ -4995,7 +5100,7 @@ class IntrusivePrototypeTransitionIterator {
private:
bool HasTransitions() {
- return proto_trans_->length() >= Map::kProtoTransitionHeaderSize;
+ return proto_trans_->map()->IsSmi() || proto_trans_->IsFixedArray();
}
Object** Header() {
@@ -5003,12 +5108,16 @@ class IntrusivePrototypeTransitionIterator {
}
int NumberOfTransitions() {
- Object* num = proto_trans_->get(Map::kProtoTransitionNumberOfEntriesOffset);
+ ASSERT(HasTransitions());
+ FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
+ Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset);
return Smi::cast(num)->value();
}
Map* GetTransition(int transitionNumber) {
- return Map::cast(proto_trans_->get(IndexFor(transitionNumber)));
+ ASSERT(HasTransitions());
+ FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_);
+ return Map::cast(proto_trans->get(IndexFor(transitionNumber)));
}
int IndexFor(int transitionNumber) {
@@ -5017,7 +5126,7 @@ class IntrusivePrototypeTransitionIterator {
transitionNumber * Map::kProtoTransitionElementsPerEntry;
}
- FixedArray* proto_trans_;
+ HeapObject* proto_trans_;
};
@@ -5056,6 +5165,20 @@ class TraversableMap : public Map {
return old_parent;
}
+ // Can either be Smi (no instance descriptors), or a descriptor array with the
+ // header overwritten as a Smi (thus iterating).
+ DescriptorArray* MutatedInstanceDescriptors() {
+ Object* object =
+ *HeapObject::RawField(this, kInstanceDescriptorsOrBitField3Offset);
+ if (object->IsSmi()) {
+ return GetHeap()->empty_descriptor_array();
+ } else {
+ DescriptorArray* descriptor_array =
+ static_cast<DescriptorArray*>(object);
+ return descriptor_array;
+ }
+ }
+
// Start iterating over this map's children, possibly destroying a FixedArray
// map (see explanation above).
void ChildIteratorStart() {
@@ -5067,17 +5190,18 @@ class TraversableMap : public Map {
// If we have an unvisited child map, return that one and advance. If we have
// none, return NULL and reset any destroyed FixedArray maps.
TraversableMap* ChildIteratorNext() {
- IntrusiveMapTransitionIterator descriptor_iterator(instance_descriptors());
- if (descriptor_iterator.IsIterating()) {
- Map* next = descriptor_iterator.Next();
- if (next != NULL) return static_cast<TraversableMap*>(next);
- }
IntrusivePrototypeTransitionIterator
proto_iterator(unchecked_prototype_transitions());
if (proto_iterator.IsIterating()) {
Map* next = proto_iterator.Next();
if (next != NULL) return static_cast<TraversableMap*>(next);
}
+ IntrusiveMapTransitionIterator
+ descriptor_iterator(MutatedInstanceDescriptors());
+ if (descriptor_iterator.IsIterating()) {
+ Map* next = descriptor_iterator.Next();
+ if (next != NULL) return static_cast<TraversableMap*>(next);
+ }
return NULL;
}
};
@@ -5627,29 +5751,30 @@ bool FixedArray::IsEqualTo(FixedArray* other) {
#endif
-MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
+MaybeObject* DescriptorArray::Allocate(int number_of_descriptors,
+ SharedMode shared_mode) {
Heap* heap = Isolate::Current()->heap();
- if (number_of_descriptors == 0) {
- return heap->empty_descriptor_array();
- }
- // Allocate the array of keys.
- Object* array;
- { MaybeObject* maybe_array =
- heap->AllocateFixedArray(ToKeyIndex(number_of_descriptors));
- if (!maybe_array->ToObject(&array)) return maybe_array;
- }
// Do not use DescriptorArray::cast on incomplete object.
- FixedArray* result = FixedArray::cast(array);
-
- // Allocate the content array and set it in the descriptor array.
- { MaybeObject* maybe_array =
- heap->AllocateFixedArray(number_of_descriptors << 1);
- if (!maybe_array->ToObject(&array)) return maybe_array;
+ FixedArray* result;
+ if (number_of_descriptors == 0) {
+ if (shared_mode == MAY_BE_SHARED) {
+ return heap->empty_descriptor_array();
+ }
+ { MaybeObject* maybe_array =
+ heap->AllocateFixedArray(kTransitionsIndex + 1);
+ if (!maybe_array->To(&result)) return maybe_array;
+ }
+ } else {
+ // Allocate the array of keys.
+ { MaybeObject* maybe_array =
+ heap->AllocateFixedArray(ToKeyIndex(number_of_descriptors));
+ if (!maybe_array->To(&result)) return maybe_array;
+ }
+ result->set(kEnumerationIndexIndex,
+ Smi::FromInt(PropertyDetails::kInitialIndex));
}
result->set(kBitField3StorageIndex, Smi::FromInt(0));
- result->set(kContentArrayIndex, array);
- result->set(kEnumerationIndexIndex,
- Smi::FromInt(PropertyDetails::kInitialIndex));
+ result->set(kTransitionsIndex, Smi::FromInt(0));
return result;
}
@@ -5698,7 +5823,7 @@ MaybeObject* DescriptorArray::CopyFrom(int dst_index,
int src_index,
const WhitenessWitness& witness) {
Object* value = src->GetValue(src_index);
- PropertyDetails details(src->GetDetails(src_index));
+ PropertyDetails details = src->GetDetails(src_index);
if (details.type() == CALLBACKS && value->IsAccessorPair()) {
MaybeObject* maybe_copy =
AccessorPair::cast(value)->CopyWithoutTransitions();
@@ -5738,26 +5863,21 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
int index = Search(descriptor->GetKey());
const bool replacing = (index != kNotFound);
bool keep_enumeration_index = false;
- if (replacing) {
- // We are replacing an existing descriptor. We keep the enumeration
- // index of a visible property.
- PropertyType t = PropertyDetails(GetDetails(index)).type();
- if (t == CONSTANT_FUNCTION ||
- t == FIELD ||
- t == CALLBACKS ||
- t == INTERCEPTOR) {
- keep_enumeration_index = true;
- } else if (remove_transitions) {
- // Replaced descriptor has been counted as removed if it is
- // a transition that will be replaced. Adjust count in this case.
- ++new_size;
- }
- } else {
+ if (!replacing) {
+ ++new_size;
+ } else if (!IsTransitionOnly(index)) {
+ // We are replacing an existing descriptor. We keep the enumeration index
+ // of a visible property.
+ keep_enumeration_index = true;
+ } else if (remove_transitions) {
+ // Replaced descriptor has been counted as removed if it is a transition
+ // that will be replaced. Adjust count in this case.
++new_size;
}
DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(new_size);
+ { SharedMode mode = remove_transitions ? MAY_BE_SHARED : CANNOT_BE_SHARED;
+ MaybeObject* maybe_result = Allocate(new_size, mode);
if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
@@ -5768,13 +5888,16 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
int enumeration_index = NextEnumerationIndex();
if (!descriptor->ContainsTransition()) {
if (keep_enumeration_index) {
- descriptor->SetEnumerationIndex(
- PropertyDetails(GetDetails(index)).index());
+ descriptor->SetEnumerationIndex(GetDetails(index).index());
} else {
descriptor->SetEnumerationIndex(enumeration_index);
++enumeration_index;
}
}
+ Map* old_elements_transition = elements_transition_map();
+ if ((!remove_transitions) && (old_elements_transition != NULL)) {
+ new_descriptors->set_elements_transition_map(old_elements_transition);
+ }
new_descriptors->SetNextEnumerationIndex(enumeration_index);
// Copy the descriptors, filtering out transitions and null descriptors,
@@ -5798,6 +5921,8 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
}
}
if (insertion_index < 0) insertion_index = to_index++;
+
+ ASSERT(insertion_index < new_descriptors->number_of_descriptors());
new_descriptors->Set(insertion_index, descriptor, witness);
ASSERT(to_index == new_descriptors->number_of_descriptors());
@@ -5807,14 +5932,15 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
}
-MaybeObject* DescriptorArray::RemoveTransitions() {
+MaybeObject* DescriptorArray::RemoveTransitions(SharedMode shared_mode) {
// Allocate the new descriptor array.
int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
if (IsProperty(i)) new_number_of_descriptors++;
}
DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
+ { MaybeObject* maybe_result = Allocate(new_number_of_descriptors,
+ shared_mode);
if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
@@ -5829,11 +5955,15 @@ MaybeObject* DescriptorArray::RemoveTransitions() {
}
}
ASSERT(next_descriptor == new_descriptors->number_of_descriptors());
+ new_descriptors->SetNextEnumerationIndex(NextEnumerationIndex());
return new_descriptors;
}
-
+// We need the whiteness witness since sort will reshuffle the entries in the
+// descriptor array. If the descriptor array were to be black, the shuffling
+// would move a slot that was already recorded as pointing into an evacuation
+// candidate. This would result in missing updates upon evacuation.
void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
// In-place heap sort.
int len = number_of_descriptors();
@@ -5895,42 +6025,37 @@ void DescriptorArray::Sort(const WhitenessWitness& witness) {
int DescriptorArray::BinarySearch(String* name, int low, int high) {
uint32_t hash = name->Hash();
+ int limit = high;
+
+ ASSERT(low <= high);
- while (low <= high) {
+ while (low != high) {
int mid = (low + high) / 2;
String* mid_name = GetKey(mid);
uint32_t mid_hash = mid_name->Hash();
- if (mid_hash > hash) {
- high = mid - 1;
- continue;
- }
- if (mid_hash < hash) {
+ if (mid_hash >= hash) {
+ high = mid;
+ } else {
low = mid + 1;
- continue;
- }
- // Found an element with the same hash-code.
- ASSERT(hash == mid_hash);
- // There might be more, so we find the first one and
- // check them all to see if we have a match.
- if (name == mid_name && !is_null_descriptor(mid)) return mid;
- while ((mid > low) && (GetKey(mid - 1)->Hash() == hash)) mid--;
- for (; (mid <= high) && (GetKey(mid)->Hash() == hash); mid++) {
- if (GetKey(mid)->Equals(name) && !is_null_descriptor(mid)) return mid;
}
- break;
}
+
+ for (; low <= limit && GetKey(low)->Hash() == hash; ++low) {
+ if (GetKey(low)->Equals(name) && !IsNullDescriptor(low))
+ return low;
+ }
+
return kNotFound;
}
-int DescriptorArray::LinearSearch(String* name, int len) {
+int DescriptorArray::LinearSearch(SearchMode mode, String* name, int len) {
uint32_t hash = name->Hash();
for (int number = 0; number < len; number++) {
String* entry = GetKey(number);
- if ((entry->Hash() == hash) &&
- name->Equals(entry) &&
- !is_null_descriptor(number)) {
+ if (mode == EXPECT_SORTED && entry->Hash() > hash) break;
+ if (name->Equals(entry) && !IsNullDescriptor(number)) {
return number;
}
}
@@ -5951,8 +6076,8 @@ MaybeObject* AccessorPair::CopyWithoutTransitions() {
Object* AccessorPair::GetComponent(AccessorComponent component) {
- Object* accessor = (component == ACCESSOR_GETTER) ? getter() : setter();
- return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
+ Object* accessor = get(component);
+ return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
}
@@ -5978,9 +6103,9 @@ bool DescriptorArray::IsEqualTo(DescriptorArray* other) {
if (other->IsEmpty()) return false;
if (length() != other->length()) return false;
for (int i = 0; i < length(); ++i) {
- if (get(i) != other->get(i) && i != kContentArrayIndex) return false;
+ if (get(i) != other->get(i)) return false;
}
- return GetContentArray()->IsEqualTo(other->GetContentArray());
+ return true;
}
#endif
@@ -7180,134 +7305,58 @@ void String::PrintOn(FILE* file) {
}
-void Map::CreateOneBackPointer(Object* transition_target) {
- if (!transition_target->IsMap()) return;
- Map* target = Map::cast(transition_target);
-#ifdef DEBUG
- // Verify target.
- Object* source_prototype = prototype();
- Object* target_prototype = target->prototype();
- ASSERT(source_prototype->IsJSReceiver() ||
- source_prototype->IsMap() ||
- source_prototype->IsNull());
- ASSERT(target_prototype->IsJSReceiver() ||
- target_prototype->IsNull());
- ASSERT(source_prototype->IsMap() ||
- source_prototype == target_prototype);
-#endif
- // Point target back to source. set_prototype() will not let us set
- // the prototype to a map, as we do here.
- *RawField(target, kPrototypeOffset) = this;
-}
-
-
-void Map::CreateBackPointers() {
- DescriptorArray* descriptors = instance_descriptors();
- for (int i = 0; i < descriptors->number_of_descriptors(); i++) {
- switch (descriptors->GetType(i)) {
- case MAP_TRANSITION:
- case CONSTANT_TRANSITION:
- CreateOneBackPointer(descriptors->GetValue(i));
- break;
- case ELEMENTS_TRANSITION: {
- Object* object = descriptors->GetValue(i);
- if (object->IsMap()) {
- CreateOneBackPointer(object);
- } else {
- FixedArray* array = FixedArray::cast(object);
- for (int i = 0; i < array->length(); ++i) {
- CreateOneBackPointer(array->get(i));
- }
- }
- break;
- }
- case CALLBACKS: {
- Object* object = descriptors->GetValue(i);
- if (object->IsAccessorPair()) {
- AccessorPair* accessors = AccessorPair::cast(object);
- CreateOneBackPointer(accessors->getter());
- CreateOneBackPointer(accessors->setter());
- }
- break;
- }
- case NORMAL:
- case FIELD:
- case CONSTANT_FUNCTION:
- case HANDLER:
- case INTERCEPTOR:
- case NULL_DESCRIPTOR:
- break;
- }
- }
-}
-
-
-bool Map::RestoreOneBackPointer(Object* object,
- Object* real_prototype,
- bool* keep_entry) {
- if (!object->IsMap()) return false;
- Map* map = Map::cast(object);
- if (Marking::MarkBitFrom(map).Get()) {
- *keep_entry = true;
- return false;
- }
- ASSERT(map->prototype() == this || map->prototype() == real_prototype);
- // Getter prototype() is read-only, set_prototype() has side effects.
- *RawField(map, Map::kPrototypeOffset) = real_prototype;
+// Clear a possible back pointer in case the transition leads to a dead map.
+// Return true in case a back pointer has been cleared and false otherwise.
+static bool ClearBackPointer(Heap* heap, Object* target) {
+ ASSERT(target->IsMap());
+ Map* map = Map::cast(target);
+ if (Marking::MarkBitFrom(map).Get()) return false;
+ map->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER);
return true;
}
-void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
+void Map::ClearNonLiveTransitions(Heap* heap) {
DescriptorArray* d = DescriptorArray::cast(
*RawField(this, Map::kInstanceDescriptorsOrBitField3Offset));
if (d->IsEmpty()) return;
Smi* NullDescriptorDetails =
PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
- FixedArray* contents = FixedArray::cast(
- d->get(DescriptorArray::kContentArrayIndex));
- ASSERT(contents->length() >= 2);
- for (int i = 0; i < contents->length(); i += 2) {
+ for (int i = 0; i < d->number_of_descriptors(); ++i) {
// If the pair (value, details) is a map transition, check if the target is
// live. If not, null the descriptor. Also drop the back pointer for that
// map transition, so that this map is not reached again by following a back
- // pointer from a non-live object.
+ // pointer from that non-live map.
bool keep_entry = false;
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
+ PropertyDetails details(d->GetDetails(i));
switch (details.type()) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
- RestoreOneBackPointer(contents->get(i), real_prototype, &keep_entry);
- break;
- case ELEMENTS_TRANSITION: {
- Object* object = contents->get(i);
- if (object->IsMap()) {
- RestoreOneBackPointer(object, real_prototype, &keep_entry);
- } else {
- FixedArray* array = FixedArray::cast(object);
- for (int j = 0; j < array->length(); ++j) {
- if (RestoreOneBackPointer(array->get(j),
- real_prototype,
- &keep_entry)) {
- array->set_undefined(j);
- }
- }
- }
+ keep_entry = !ClearBackPointer(heap, d->GetValue(i));
break;
- }
case CALLBACKS: {
- Object* object = contents->get(i);
+ Object* object = d->GetValue(i);
if (object->IsAccessorPair()) {
AccessorPair* accessors = AccessorPair::cast(object);
- if (RestoreOneBackPointer(accessors->getter(),
- real_prototype,
- &keep_entry)) {
- accessors->set_getter(heap->the_hole_value());
+ Object* getter = accessors->getter();
+ if (getter->IsMap()) {
+ if (ClearBackPointer(heap, getter)) {
+ accessors->set_getter(heap->the_hole_value());
+ } else {
+ keep_entry = true;
+ }
+ } else if (!getter->IsTheHole()) {
+ keep_entry = true;
}
- if (RestoreOneBackPointer(accessors->setter(),
- real_prototype,
- &keep_entry)) {
- accessors->set_setter(heap->the_hole_value());
+ Object* setter = accessors->setter();
+ if (setter->IsMap()) {
+ if (ClearBackPointer(heap, setter)) {
+ accessors->set_setter(heap->the_hole_value());
+ } else {
+ keep_entry = true;
+ }
+ } else if (!getter->IsTheHole()) {
+ keep_entry = true;
}
} else {
keep_entry = true;
@@ -7327,8 +7376,8 @@ void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
// What we *really* want to do here is removing this entry completely, but
// for technical reasons we can't do this, so we zero it out instead.
if (!keep_entry) {
- contents->set_unchecked(i + 1, NullDescriptorDetails);
- contents->set_null_unchecked(heap, i);
+ d->SetDetailsUnchecked(i, NullDescriptorDetails);
+ d->SetNullValueUnchecked(i, heap);
}
}
}
@@ -7451,14 +7500,64 @@ bool JSFunction::IsInlineable() {
}
+MaybeObject* JSObject::OptimizeAsPrototype() {
+ if (IsGlobalObject()) return this;
+
+ // Make sure prototypes are fast objects and their maps have the bit set
+ // so they remain fast.
+ Map* proto_map = map();
+ if (!proto_map->used_for_prototype()) {
+ if (!HasFastProperties()) {
+ MaybeObject* new_proto = TransformToFastProperties(0);
+ if (new_proto->IsFailure()) return new_proto;
+ ASSERT(new_proto == this);
+ proto_map = map();
+ if (!proto_map->is_shared()) {
+ proto_map->set_used_for_prototype(true);
+ }
+ } else {
+ Heap* heap = GetHeap();
+ // We use the hole value as a singleton key in the prototype transition
+ // map so that we don't multiply the number of maps unnecessarily.
+ Map* new_map =
+ proto_map->GetPrototypeTransition(heap->the_hole_value());
+ if (new_map == NULL) {
+ MaybeObject* maybe_new_map =
+ proto_map->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
+ if (!maybe_new_map->To<Map>(&new_map)) return maybe_new_map;
+ new_map->set_used_for_prototype(true);
+ MaybeObject* ok =
+ proto_map->PutPrototypeTransition(heap->the_hole_value(),
+ new_map);
+ if (ok->IsFailure()) return ok;
+ }
+ ASSERT(!proto_map->is_shared() && !new_map->is_shared());
+ set_map(new_map);
+ }
+ }
+ return this;
+}
+
+
MaybeObject* JSFunction::SetInstancePrototype(Object* value) {
ASSERT(value->IsJSReceiver());
Heap* heap = GetHeap();
+
+ // First some logic for the map of the prototype to make sure the
+ // used_for_prototype flag is set.
+ if (value->IsJSObject()) {
+ MaybeObject* ok = JSObject::cast(value)->OptimizeAsPrototype();
+ if (ok->IsFailure()) return ok;
+ }
+
+ // Now some logic for the maps of the objects that are created by using this
+ // function as a constructor.
if (has_initial_map()) {
// If the function has allocated the initial map
// replace it with a copy containing the new prototype.
Map* new_map;
- MaybeObject* maybe_new_map = initial_map()->CopyDropTransitions();
+ MaybeObject* maybe_new_map =
+ initial_map()->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
new_map->set_prototype(value);
MaybeObject* maybe_object =
@@ -7488,7 +7587,8 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
// Remove map transitions because they point to maps with a
// different prototype.
Map* new_map;
- { MaybeObject* maybe_new_map = map()->CopyDropTransitions();
+ { MaybeObject* maybe_new_map =
+ map()->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
}
Heap* heap = new_map->GetHeap();
@@ -7883,6 +7983,7 @@ void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
code()->set_optimizable(true);
}
set_opt_count(0);
+ set_deopt_count(0);
}
}
@@ -8154,6 +8255,25 @@ void Code::ClearInlineCaches() {
}
+void Code::ClearTypeFeedbackCells(Heap* heap) {
+ Object* raw_info = type_feedback_info();
+ if (raw_info->IsTypeFeedbackInfo()) {
+ TypeFeedbackCells* type_feedback_cells =
+ TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
+ for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
+ ASSERT(type_feedback_cells->AstId(i)->IsSmi());
+ JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
+ cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
+ }
+ }
+}
+
+
+bool Code::allowed_in_shared_map_code_cache() {
+ return is_keyed_load_stub() || is_keyed_store_stub();
+}
+
+
#ifdef ENABLE_DISASSEMBLER
void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
@@ -8341,7 +8461,6 @@ const char* Code::PropertyType2String(PropertyType type) {
case HANDLER: return "HANDLER";
case INTERCEPTOR: return "INTERCEPTOR";
case MAP_TRANSITION: return "MAP_TRANSITION";
- case ELEMENTS_TRANSITION: return "ELEMENTS_TRANSITION";
case CONSTANT_TRANSITION: return "CONSTANT_TRANSITION";
case NULL_DESCRIPTOR: return "NULL_DESCRIPTOR";
}
@@ -8386,6 +8505,14 @@ void Code::Disassemble(const char* name, FILE* out) {
if (is_call_stub() || is_keyed_call_stub()) {
PrintF(out, "argc = %d\n", arguments_count());
}
+ if (is_compare_ic_stub()) {
+ CompareIC::State state = CompareIC::ComputeState(this);
+ PrintF(out, "compare_state = %s\n", CompareIC::GetStateName(state));
+ }
+ if (is_compare_ic_stub() && major_key() == CodeStub::CompareIC) {
+ Token::Value op = CompareIC::ComputeOperation(this);
+ PrintF(out, "compare_operation = %s\n", Token::Name(op));
+ }
}
if ((name != NULL) && (name[0] != '\0')) {
PrintF(out, "name = %s\n", name);
@@ -8457,7 +8584,7 @@ void Code::Disassemble(const char* name, FILE* out) {
MaybeObject* JSObject::SetFastElementsCapacityAndLength(
int capacity,
int length,
- SetFastElementsCapacityMode set_capacity_mode) {
+ SetFastElementsCapacitySmiMode smi_mode) {
Heap* heap = GetHeap();
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
@@ -8468,32 +8595,40 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
if (!maybe->To(&new_elements)) return maybe;
}
- // Find the new map to use for this object if there is a map change.
- Map* new_map = NULL;
- if (elements()->map() != heap->non_strict_arguments_elements_map()) {
- // The resized array has FAST_SMI_ONLY_ELEMENTS if the capacity mode forces
- // it, or if it's allowed and the old elements array contained only SMIs.
- bool has_fast_smi_only_elements =
- (set_capacity_mode == kForceSmiOnlyElements) ||
- ((set_capacity_mode == kAllowSmiOnlyElements) &&
- (elements()->map()->has_fast_smi_only_elements() ||
- elements() == heap->empty_fixed_array()));
- ElementsKind elements_kind = has_fast_smi_only_elements
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
- MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
- if (!maybe->To(&new_map)) return maybe;
+ ElementsKind elements_kind = GetElementsKind();
+ ElementsKind new_elements_kind;
+ // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it,
+ // or if it's allowed and the old elements array contained only SMIs.
+ bool has_fast_smi_elements =
+ (smi_mode == kForceSmiElements) ||
+ ((smi_mode == kAllowSmiElements) && HasFastSmiElements());
+ if (has_fast_smi_elements) {
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_SMI_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_SMI_ELEMENTS;
+ }
+ } else {
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_ELEMENTS;
+ }
}
-
FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind = GetElementsKind();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
- ElementsKind to_kind = (elements_kind == FAST_SMI_ONLY_ELEMENTS)
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
- // int copy_size = Min(old_elements_raw->length(), new_elements->length());
- accessor->CopyElements(this, new_elements, to_kind);
+ { MaybeObject* maybe_obj =
+ accessor->CopyElements(this, new_elements, new_elements_kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ }
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ Map* new_map = map();
+ if (new_elements_kind != elements_kind) {
+ MaybeObject* maybe =
+ GetElementsTransitionMap(GetIsolate(), new_elements_kind);
+ if (!maybe->To(&new_map)) return maybe;
+ }
+ ValidateElements();
set_map_and_elements(new_map, new_elements);
} else {
FixedArray* parameter_map = FixedArray::cast(old_elements);
@@ -8505,11 +8640,9 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
GetElementsKind(), new_elements);
}
- // Update the length if necessary.
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::FromInt(length));
}
-
return new_elements;
}
@@ -8527,17 +8660,28 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
if (!maybe_obj->To(&elems)) return maybe_obj;
}
+ ElementsKind elements_kind = GetElementsKind();
+ ElementsKind new_elements_kind = elements_kind;
+ if (IsHoleyElementsKind(elements_kind)) {
+ new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
+ } else {
+ new_elements_kind = FAST_DOUBLE_ELEMENTS;
+ }
+
Map* new_map;
{ MaybeObject* maybe_obj =
- GetElementsTransitionMap(heap->isolate(), FAST_DOUBLE_ELEMENTS);
+ GetElementsTransitionMap(heap->isolate(), new_elements_kind);
if (!maybe_obj->To(&new_map)) return maybe_obj;
}
FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind = GetElementsKind();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
- accessor->CopyElements(this, elems, FAST_DOUBLE_ELEMENTS);
+ { MaybeObject* maybe_obj =
+ accessor->CopyElements(this, elems, FAST_DOUBLE_ELEMENTS);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ }
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ ValidateElements();
set_map_and_elements(new_map, elems);
} else {
FixedArray* parameter_map = FixedArray::cast(old_elements);
@@ -8546,7 +8690,7 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
if (FLAG_trace_elements_transitions) {
PrintElementsTransition(stdout, elements_kind, old_elements,
- FAST_DOUBLE_ELEMENTS, elems);
+ GetElementsKind(), elems);
}
if (IsJSArray()) {
@@ -8586,7 +8730,7 @@ MaybeObject* JSArray::SetElementsLength(Object* len) {
}
-Object* Map::GetPrototypeTransition(Object* prototype) {
+Map* Map::GetPrototypeTransition(Object* prototype) {
FixedArray* cache = prototype_transitions();
int number_of_transitions = NumberOfProtoTransitions();
const int proto_offset =
@@ -8596,8 +8740,7 @@ Object* Map::GetPrototypeTransition(Object* prototype) {
for (int i = 0; i < number_of_transitions; i++) {
if (cache->get(proto_offset + i * step) == prototype) {
Object* map = cache->get(map_offset + i * step);
- ASSERT(map->IsMap());
- return map;
+ return Map::cast(map);
}
}
return NULL;
@@ -8705,21 +8848,27 @@ MaybeObject* JSReceiver::SetPrototype(Object* value,
// Nothing to do if prototype is already set.
if (map->prototype() == value) return value;
- Object* new_map = map->GetPrototypeTransition(value);
+ if (value->IsJSObject()) {
+ MaybeObject* ok = JSObject::cast(value)->OptimizeAsPrototype();
+ if (ok->IsFailure()) return ok;
+ }
+
+ Map* new_map = map->GetPrototypeTransition(value);
if (new_map == NULL) {
- { MaybeObject* maybe_new_map = map->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+ { MaybeObject* maybe_new_map =
+ map->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
}
{ MaybeObject* maybe_new_cache =
- map->PutPrototypeTransition(value, Map::cast(new_map));
+ map->PutPrototypeTransition(value, new_map);
if (maybe_new_cache->IsFailure()) return maybe_new_cache;
}
- Map::cast(new_map)->set_prototype(value);
+ new_map->set_prototype(value);
}
- ASSERT(Map::cast(new_map)->prototype() == value);
- real_receiver->set_map(Map::cast(new_map));
+ ASSERT(new_map->prototype() == value);
+ real_receiver->set_map(new_map);
heap->ClearInstanceofCache();
ASSERT(size == Size());
@@ -8826,8 +8975,10 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -8838,7 +8989,8 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -9122,7 +9274,7 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype) {
- ASSERT(HasFastTypeElements() ||
+ ASSERT(HasFastSmiOrObjectElements() ||
HasFastArgumentsElements());
FixedArray* backing_store = FixedArray::cast(elements());
@@ -9148,13 +9300,29 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
// Check if the length property of this object needs to be updated.
uint32_t array_length = 0;
bool must_update_array_length = false;
+ bool introduces_holes = true;
if (IsJSArray()) {
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_length));
+ introduces_holes = index > array_length;
if (index >= array_length) {
must_update_array_length = true;
array_length = index + 1;
}
+ } else {
+ introduces_holes = index >= capacity;
}
+
+ // If the array is growing, and it's not growth by a single element at the
+ // end, make sure that the ElementsKind is HOLEY.
+ ElementsKind elements_kind = GetElementsKind();
+ if (introduces_holes &&
+ IsFastElementsKind(elements_kind) &&
+ !IsFastHoleyElementsKind(elements_kind)) {
+ ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
+ if (maybe->IsFailure()) return maybe;
+ }
+
// Check if the capacity of the backing store needs to be increased, or if
// a transition to slow elements is necessary.
if (index >= capacity) {
@@ -9174,42 +9342,44 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
}
// Convert to fast double elements if appropriate.
- if (HasFastSmiOnlyElements() && !value->IsSmi() && value->IsNumber()) {
+ if (HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) {
MaybeObject* maybe =
SetFastDoubleElementsCapacityAndLength(new_capacity, array_length);
if (maybe->IsFailure()) return maybe;
FixedDoubleArray::cast(elements())->set(index, value->Number());
+ ValidateElements();
return value;
}
- // Change elements kind from SMI_ONLY to generic FAST if necessary.
- if (HasFastSmiOnlyElements() && !value->IsSmi()) {
+ // Change elements kind from Smi-only to generic FAST if necessary.
+ if (HasFastSmiElements() && !value->IsSmi()) {
Map* new_map;
- { MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
- FAST_ELEMENTS);
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- }
+ ElementsKind kind = HasFastHoleyElements()
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
+ kind);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+
set_map(new_map);
- if (FLAG_trace_elements_transitions) {
- PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(),
- FAST_ELEMENTS, elements());
- }
}
// Increase backing store capacity if that's been decided previously.
if (new_capacity != capacity) {
FixedArray* new_elements;
- SetFastElementsCapacityMode set_capacity_mode =
- value->IsSmi() && HasFastSmiOnlyElements()
- ? kAllowSmiOnlyElements
- : kDontAllowSmiOnlyElements;
+ SetFastElementsCapacitySmiMode smi_mode =
+ value->IsSmi() && HasFastSmiElements()
+ ? kAllowSmiElements
+ : kDontAllowSmiElements;
{ MaybeObject* maybe =
SetFastElementsCapacityAndLength(new_capacity,
array_length,
- set_capacity_mode);
+ smi_mode);
if (!maybe->To(&new_elements)) return maybe;
}
new_elements->set(index, value);
+ ValidateElements();
return value;
}
+
// Finally, set the new element and length.
ASSERT(elements()->IsFixedArray());
backing_store->set(index, value);
@@ -9333,20 +9503,21 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
} else {
new_length = dictionary->max_number_key() + 1;
}
- SetFastElementsCapacityMode set_capacity_mode = FLAG_smi_only_arrays
- ? kAllowSmiOnlyElements
- : kDontAllowSmiOnlyElements;
+ SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays
+ ? kAllowSmiElements
+ : kDontAllowSmiElements;
bool has_smi_only_elements = false;
bool should_convert_to_fast_double_elements =
ShouldConvertToFastDoubleElements(&has_smi_only_elements);
if (has_smi_only_elements) {
- set_capacity_mode = kForceSmiOnlyElements;
+ smi_mode = kForceSmiElements;
}
MaybeObject* result = should_convert_to_fast_double_elements
? SetFastDoubleElementsCapacityAndLength(new_length, new_length)
: SetFastElementsCapacityAndLength(new_length,
new_length,
- set_capacity_mode);
+ smi_mode);
+ ValidateElements();
if (result->IsFailure()) return result;
#ifdef DEBUG
if (FLAG_trace_normalization) {
@@ -9385,27 +9556,40 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
// If the value object is not a heap number, switch to fast elements and try
// again.
bool value_is_smi = value->IsSmi();
+ bool introduces_holes = true;
+ uint32_t length = elms_length;
+ if (IsJSArray()) {
+ CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
+ introduces_holes = index > length;
+ } else {
+ introduces_holes = index >= elms_length;
+ }
+
if (!value->IsNumber()) {
- Object* obj;
- uint32_t length = elms_length;
- if (IsJSArray()) {
- CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
- }
MaybeObject* maybe_obj = SetFastElementsCapacityAndLength(
elms_length,
length,
- kDontAllowSmiOnlyElements);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- return SetFastElement(index,
- value,
- strict_mode,
- check_prototype);
+ kDontAllowSmiElements);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ maybe_obj = SetFastElement(index, value, strict_mode, check_prototype);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ ValidateElements();
+ return maybe_obj;
}
double double_value = value_is_smi
? static_cast<double>(Smi::cast(value)->value())
: HeapNumber::cast(value)->value();
+ // If the array is growing, and it's not growth by a single element at the
+ // end, make sure that the ElementsKind is HOLEY.
+ ElementsKind elements_kind = GetElementsKind();
+ if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) {
+ ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind);
+ MaybeObject* maybe = TransitionElementsKind(transitioned_kind);
+ if (maybe->IsFailure()) return maybe;
+ }
+
// Check whether there is extra space in the fixed array.
if (index < elms_length) {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
@@ -9427,13 +9611,11 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
int new_capacity = NewElementsCapacity(index+1);
if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
- Object* obj;
- { MaybeObject* maybe_obj =
- SetFastDoubleElementsCapacityAndLength(new_capacity,
- index + 1);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj =
+ SetFastDoubleElementsCapacityAndLength(new_capacity, index + 1);
+ if (maybe_obj->IsFailure()) return maybe_obj;
FixedDoubleArray::cast(elements())->set(index, double_value);
+ ValidateElements();
return value;
}
}
@@ -9458,7 +9640,7 @@ MaybeObject* JSReceiver::SetElement(uint32_t index,
bool check_proto) {
if (IsJSProxy()) {
return JSProxy::cast(this)->SetElementWithHandler(
- index, value, strict_mode);
+ this, index, value, strict_mode);
} else {
return JSObject::cast(this)->SetElement(
index, value, attributes, strict_mode, check_proto);
@@ -9577,10 +9759,13 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
(attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
Isolate* isolate = GetIsolate();
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
return SetFastElement(index, value, strict_mode, check_prototype);
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
return SetFastDoubleElement(index, value, strict_mode, check_prototype);
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
@@ -9671,11 +9856,19 @@ Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
ElementsKind from_kind = map()->elements_kind();
+ if (IsFastHoleyElementsKind(from_kind)) {
+ to_kind = GetHoleyElementsKind(to_kind);
+ }
+
Isolate* isolate = GetIsolate();
- if ((from_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements() == isolate->heap()->empty_fixed_array()) &&
- to_kind == FAST_ELEMENTS) {
- ASSERT(from_kind != FAST_ELEMENTS);
+ if (elements() == isolate->heap()->empty_fixed_array() ||
+ (IsFastSmiOrObjectElementsKind(from_kind) &&
+ IsFastSmiOrObjectElementsKind(to_kind)) ||
+ (from_kind == FAST_DOUBLE_ELEMENTS &&
+ to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) {
+ ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND);
+ // No change is needed to the elements() buffer, the transition
+ // only requires a map change.
MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind);
Map* new_map;
if (!maybe_new_map->To(&new_map)) return maybe_new_map;
@@ -9702,18 +9895,21 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
}
}
- if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
MaybeObject* maybe_result =
SetFastDoubleElementsCapacityAndLength(capacity, length);
if (maybe_result->IsFailure()) return maybe_result;
+ ValidateElements();
return this;
}
- if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
MaybeObject* maybe_result = SetFastElementsCapacityAndLength(
- capacity, length, kDontAllowSmiOnlyElements);
+ capacity, length, kDontAllowSmiElements);
if (maybe_result->IsFailure()) return maybe_result;
+ ValidateElements();
return this;
}
@@ -9727,10 +9923,14 @@ MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
// static
bool Map::IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind) {
- return
- (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- (to_kind == FAST_DOUBLE_ELEMENTS || to_kind == FAST_ELEMENTS)) ||
- (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS);
+ // Transitions can't go backwards.
+ if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) {
+ return false;
+ }
+
+ // Transitions from HOLEY -> PACKED are not allowed.
+ return !IsFastHoleyElementsKind(from_kind) ||
+ IsFastHoleyElementsKind(to_kind);
}
@@ -9821,8 +10021,16 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
break;
}
// Fall through.
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_ELEMENTS:
+ if (IsJSArray()) {
+ *capacity = backing_store_base->length();
+ *used = Smi::cast(JSArray::cast(this)->length())->value();
+ break;
+ }
+ // Fall through if packing is not guaranteed.
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
backing_store = FixedArray::cast(backing_store_base);
*capacity = backing_store->length();
for (int i = 0; i < *capacity; ++i) {
@@ -9836,7 +10044,14 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
*used = dictionary->NumberOfElements();
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ if (IsJSArray()) {
+ *capacity = backing_store_base->length();
+ *used = Smi::cast(JSArray::cast(this)->length())->value();
+ break;
+ }
+ // Fall through if packing is not guaranteed.
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
*capacity = elms->length();
for (int i = 0; i < *capacity; i++) {
@@ -10106,16 +10321,19 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
if (this->IsStringObjectWithCharacterAt(index)) return true;
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedArray::cast(elements())->length());
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -10315,7 +10533,7 @@ int JSObject::NumberOfLocalElements(PropertyAttributes filter) {
int JSObject::NumberOfEnumElements() {
// Fast case for objects with no elements.
- if (!IsJSValue() && HasFastElements()) {
+ if (!IsJSValue() && HasFastObjectElements()) {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
@@ -10331,8 +10549,10 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
PropertyAttributes filter) {
int counter = 0;
switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
FixedArray::cast(elements())->length();
@@ -10347,7 +10567,8 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
ASSERT(!storage || storage->length() >= counter);
break;
}
- case FAST_DOUBLE_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS: {
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
FixedDoubleArray::cast(elements())->length();
@@ -10928,7 +11149,6 @@ bool StringDictionary::ContainsTransition(int entry) {
switch (DetailsAt(entry).type()) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
return true;
case CALLBACKS: {
Object* value = ValueAt(entry);
@@ -11280,10 +11500,9 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
// Convert to fast elements.
Object* obj;
- { MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
- FAST_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
+ MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
+ FAST_HOLEY_ELEMENTS);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
Map* new_map = Map::cast(obj);
PretenureFlag tenure = heap->InNewSpace(this) ? NOT_TENURED: TENURED;
@@ -11294,9 +11513,9 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
}
FixedArray* fast_elements = FixedArray::cast(new_array);
dict->CopyValuesTo(fast_elements);
+ ValidateElements();
- set_map(new_map);
- set_elements(fast_elements);
+ set_map_and_elements(new_map, fast_elements);
} else if (HasExternalArrayElements()) {
// External arrays cannot have holes or undefined elements.
return Smi::FromInt(ExternalArray::cast(elements())->length());
@@ -11306,7 +11525,7 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
}
- ASSERT(HasFastTypeElements() || HasFastDoubleElements());
+ ASSERT(HasFastSmiOrObjectElements() || HasFastDoubleElements());
// Collect holes at the end, undefined before that and the rest at the
// start, and return the number of non-hole, non-undefined values.
@@ -12379,7 +12598,8 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
// Allocate the instance descriptor.
DescriptorArray* descriptors;
{ MaybeObject* maybe_descriptors =
- DescriptorArray::Allocate(instance_descriptor_length);
+ DescriptorArray::Allocate(instance_descriptor_length,
+ DescriptorArray::MAY_BE_SHARED);
if (!maybe_descriptors->To<DescriptorArray>(&descriptors)) {
return maybe_descriptors;
}
@@ -12853,7 +13073,7 @@ int BreakPointInfo::GetBreakPointCount() {
#endif // ENABLE_DEBUGGER_SUPPORT
-MaybeObject* JSDate::GetField(Object* object, Smi* index) {
+Object* JSDate::GetField(Object* object, Smi* index) {
return JSDate::cast(object)->DoGetField(
static_cast<FieldIndex>(index->value()));
}
diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h
index 7469142f5b..9aac37fcce 100644
--- a/deps/v8/src/objects.h
+++ b/deps/v8/src/objects.h
@@ -30,6 +30,7 @@
#include "allocation.h"
#include "builtins.h"
+#include "elements-kind.h"
#include "list.h"
#include "property-details.h"
#include "smart-array-pointer.h"
@@ -40,6 +41,7 @@
#include "mips/constants-mips.h"
#endif
#include "v8checks.h"
+#include "zone.h"
//
@@ -59,6 +61,7 @@
// - JSWeakMap
// - JSRegExp
// - JSFunction
+// - JSModule
// - GlobalObject
// - JSGlobalObject
// - JSBuiltinsObject
@@ -130,40 +133,6 @@
namespace v8 {
namespace internal {
-enum ElementsKind {
- // The "fast" kind for elements that only contain SMI values. Must be first
- // to make it possible to efficiently check maps for this kind.
- FAST_SMI_ONLY_ELEMENTS,
-
- // The "fast" kind for tagged values. Must be second to make it possible to
- // efficiently check maps for this and the FAST_SMI_ONLY_ELEMENTS kind
- // together at once.
- FAST_ELEMENTS,
-
- // The "fast" kind for unwrapped, non-tagged double values.
- FAST_DOUBLE_ELEMENTS,
-
- // The "slow" kind.
- DICTIONARY_ELEMENTS,
- NON_STRICT_ARGUMENTS_ELEMENTS,
- // The "fast" kind for external arrays
- EXTERNAL_BYTE_ELEMENTS,
- EXTERNAL_UNSIGNED_BYTE_ELEMENTS,
- EXTERNAL_SHORT_ELEMENTS,
- EXTERNAL_UNSIGNED_SHORT_ELEMENTS,
- EXTERNAL_INT_ELEMENTS,
- EXTERNAL_UNSIGNED_INT_ELEMENTS,
- EXTERNAL_FLOAT_ELEMENTS,
- EXTERNAL_DOUBLE_ELEMENTS,
- EXTERNAL_PIXEL_ELEMENTS,
-
- // Derived constants from ElementsKind
- FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_BYTE_ELEMENTS,
- LAST_EXTERNAL_ARRAY_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS,
- FIRST_ELEMENTS_KIND = FAST_SMI_ONLY_ELEMENTS,
- LAST_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS
-};
-
enum CompareMapMode {
REQUIRE_EXACT_MAP,
ALLOW_ELEMENT_TRANSITION_MAPS
@@ -174,13 +143,6 @@ enum KeyedAccessGrowMode {
ALLOW_JSARRAY_GROWTH
};
-const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
-
-void PrintElementsKind(FILE* out, ElementsKind kind);
-
-inline bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
- ElementsKind to_kind);
-
// Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
enum WriteBarrierMode { SKIP_WRITE_BARRIER, UPDATE_WRITE_BARRIER };
@@ -208,6 +170,14 @@ enum CreationFlag {
};
+// Indicates whether the search function should expect a sorted or an unsorted
+// array as input.
+enum SearchMode {
+ EXPECT_SORTED,
+ EXPECT_UNSORTED
+};
+
+
// Instance size sentinel for objects of variable size.
const int kVariableSizeSentinel = 0;
@@ -306,6 +276,7 @@ const int kVariableSizeSentinel = 0;
V(JS_DATE_TYPE) \
V(JS_OBJECT_TYPE) \
V(JS_CONTEXT_EXTENSION_OBJECT_TYPE) \
+ V(JS_MODULE_TYPE) \
V(JS_GLOBAL_OBJECT_TYPE) \
V(JS_BUILTINS_OBJECT_TYPE) \
V(JS_GLOBAL_PROXY_TYPE) \
@@ -626,6 +597,7 @@ enum InstanceType {
JS_DATE_TYPE,
JS_OBJECT_TYPE,
JS_CONTEXT_EXTENSION_OBJECT_TYPE,
+ JS_MODULE_TYPE,
JS_GLOBAL_OBJECT_TYPE,
JS_BUILTINS_OBJECT_TYPE,
JS_GLOBAL_PROXY_TYPE,
@@ -677,6 +649,7 @@ const int kExternalArrayTypeCount =
STATIC_CHECK(JS_OBJECT_TYPE == Internals::kJSObjectType);
STATIC_CHECK(FIRST_NONSTRING_TYPE == Internals::kFirstNonstringType);
+STATIC_CHECK(ODDBALL_TYPE == Internals::kOddballType);
STATIC_CHECK(FOREIGN_TYPE == Internals::kForeignType);
@@ -700,12 +673,13 @@ enum CompareResult {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
+class AccessorPair;
class DictionaryElementsAccessor;
class ElementsAccessor;
+class Failure;
class FixedArrayBase;
class ObjectVisitor;
class StringStream;
-class Failure;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@@ -803,6 +777,7 @@ class MaybeObject BASE_EMBEDDED {
V(JSReceiver) \
V(JSObject) \
V(JSContextExtensionObject) \
+ V(JSModule) \
V(Map) \
V(DescriptorArray) \
V(DeoptimizationInputData) \
@@ -812,6 +787,7 @@ class MaybeObject BASE_EMBEDDED {
V(FixedDoubleArray) \
V(Context) \
V(GlobalContext) \
+ V(ModuleContext) \
V(ScopeInfo) \
V(JSFunction) \
V(Code) \
@@ -1386,6 +1362,13 @@ class JSReceiver: public HeapObject {
FORCE_DELETION
};
+ // A non-keyed store is of the form a.x = foo or a["x"] = foo whereas
+ // a keyed store is of the form a[expression] = foo.
+ enum StoreFromKeyed {
+ MAY_BE_STORE_FROM_KEYED,
+ CERTAINLY_NOT_STORE_FROM_KEYED
+ };
+
// Casting.
static inline JSReceiver* cast(Object* obj);
@@ -1395,15 +1378,19 @@ class JSReceiver: public HeapObject {
PropertyAttributes attributes,
StrictModeFlag strict_mode);
// Can cause GC.
- MUST_USE_RESULT MaybeObject* SetProperty(String* key,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
- MUST_USE_RESULT MaybeObject* SetProperty(LookupResult* result,
- String* key,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ MUST_USE_RESULT MaybeObject* SetProperty(
+ String* key,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_from_keyed = MAY_BE_STORE_FROM_KEYED);
+ MUST_USE_RESULT MaybeObject* SetProperty(
+ LookupResult* result,
+ String* key,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_from_keyed = MAY_BE_STORE_FROM_KEYED);
MUST_USE_RESULT MaybeObject* SetPropertyWithDefinedSetter(JSReceiver* setter,
Object* value);
@@ -1503,13 +1490,19 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT inline MaybeObject* ResetElements();
inline ElementsKind GetElementsKind();
inline ElementsAccessor* GetElementsAccessor();
- inline bool HasFastSmiOnlyElements();
- inline bool HasFastElements();
- // Returns if an object has either FAST_ELEMENT or FAST_SMI_ONLY_ELEMENT
- // elements. TODO(danno): Rename HasFastTypeElements to HasFastElements() and
- // HasFastElements to HasFastObjectElements.
- inline bool HasFastTypeElements();
+ // Returns true if an object has elements of FAST_SMI_ELEMENTS ElementsKind.
+ inline bool HasFastSmiElements();
+ // Returns true if an object has elements of FAST_ELEMENTS ElementsKind.
+ inline bool HasFastObjectElements();
+ // Returns true if an object has elements of FAST_ELEMENTS or
+ // FAST_SMI_ONLY_ELEMENTS.
+ inline bool HasFastSmiOrObjectElements();
+ // Returns true if an object has elements of FAST_DOUBLE_ELEMENTS
+ // ElementsKind.
inline bool HasFastDoubleElements();
+ // Returns true if an object has elements of FAST_HOLEY_*_ELEMENTS
+ // ElementsKind.
+ inline bool HasFastHoleyElements();
inline bool HasNonStrictArgumentsElements();
inline bool HasDictionaryElements();
inline bool HasExternalPixelElements();
@@ -1551,7 +1544,8 @@ class JSObject: public JSReceiver {
String* key,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode);
MUST_USE_RESULT MaybeObject* SetPropertyWithFailedAccessCheck(
LookupResult* result,
String* name,
@@ -1610,6 +1604,8 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* DeleteNormalizedProperty(String* name,
DeleteMode mode);
+ MUST_USE_RESULT MaybeObject* OptimizeAsPrototype();
+
// Retrieve interceptors.
InterceptorInfo* GetNamedInterceptor();
InterceptorInfo* GetIndexedInterceptor();
@@ -1636,6 +1632,14 @@ class JSObject: public JSReceiver {
Object* getter,
Object* setter,
PropertyAttributes attributes);
+ // Try to define a single accessor paying attention to map transitions.
+ // Returns a JavaScript null if this was not possible and we have to use the
+ // slow case. Note that we can fail due to allocations, too.
+ MUST_USE_RESULT MaybeObject* DefineFastAccessor(
+ String* name,
+ AccessorComponent component,
+ Object* accessor,
+ PropertyAttributes attributes);
Object* LookupAccessor(String* name, AccessorComponent component);
MUST_USE_RESULT MaybeObject* DefineAccessor(AccessorInfo* info);
@@ -1704,7 +1708,7 @@ class JSObject: public JSReceiver {
static Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
- inline void ValidateSmiOnlyElements();
+ inline void ValidateElements();
// Makes sure that this object can contain HeapObject as elements.
MUST_USE_RESULT inline MaybeObject* EnsureCanContainHeapObjectElements();
@@ -1716,6 +1720,7 @@ class JSObject: public JSReceiver {
EnsureElementsMode mode);
MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
FixedArrayBase* elements,
+ uint32_t length,
EnsureElementsMode mode);
MUST_USE_RESULT MaybeObject* EnsureCanContainElements(
Arguments* arguments,
@@ -1814,10 +1819,10 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* GetElementWithInterceptor(Object* receiver,
uint32_t index);
- enum SetFastElementsCapacityMode {
- kAllowSmiOnlyElements,
- kForceSmiOnlyElements,
- kDontAllowSmiOnlyElements
+ enum SetFastElementsCapacitySmiMode {
+ kAllowSmiElements,
+ kForceSmiElements,
+ kDontAllowSmiElements
};
// Replace the elements' backing store with fast elements of the given
@@ -1826,7 +1831,7 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* SetFastElementsCapacityAndLength(
int capacity,
int length,
- SetFastElementsCapacityMode set_capacity_mode);
+ SetFastElementsCapacitySmiMode smi_mode);
MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength(
int capacity,
int length);
@@ -1855,7 +1860,6 @@ class JSObject: public JSReceiver {
void LocalLookupRealNamedProperty(String* name, LookupResult* result);
void LookupRealNamedProperty(String* name, LookupResult* result);
void LookupRealNamedPropertyInPrototypes(String* name, LookupResult* result);
- void LookupCallbackSetterInPrototypes(String* name, LookupResult* result);
MUST_USE_RESULT MaybeObject* SetElementWithCallbackSetterInPrototypes(
uint32_t index, Object* value, bool* found, StrictModeFlag strict_mode);
void LookupCallback(String* name, LookupResult* result);
@@ -1938,9 +1942,11 @@ class JSObject: public JSReceiver {
PropertyAttributes attributes);
// Add a property to a fast-case object.
- MUST_USE_RESULT MaybeObject* AddFastProperty(String* name,
- Object* value,
- PropertyAttributes attributes);
+ MUST_USE_RESULT MaybeObject* AddFastProperty(
+ String* name,
+ Object* value,
+ PropertyAttributes attributes,
+ StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED);
// Add a property to a slow-case object.
MUST_USE_RESULT MaybeObject* AddSlowProperty(String* name,
@@ -1948,10 +1954,12 @@ class JSObject: public JSReceiver {
PropertyAttributes attributes);
// Add a property to an object.
- MUST_USE_RESULT MaybeObject* AddProperty(String* name,
- Object* value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
+ MUST_USE_RESULT MaybeObject* AddProperty(
+ String* name,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ StoreFromKeyed store_mode = MAY_BE_STORE_FROM_KEYED);
// Convert the object to use the canonical dictionary
// representation. If the object is expected to have additional properties
@@ -2071,7 +2079,7 @@ class JSObject: public JSReceiver {
// Maximal number of fast properties for the JSObject. Used to
// restrict the number of map transitions to avoid an explosion in
// the number of maps for objects used as dictionaries.
- inline int MaxFastProperties();
+ inline bool TooManyFastProperties(int properties, StoreFromKeyed store_mode);
// Maximal number of elements (numbered 0 .. kMaxElementCount - 1).
// Also maximal value of JSArray's length property.
@@ -2093,7 +2101,8 @@ class JSObject: public JSReceiver {
static const int kMaxUncheckedOldFastElementsLength = 500;
static const int kInitialMaxFastElementArray = 100000;
- static const int kMaxFastProperties = 12;
+ static const int kFastPropertiesSoftLimit = 12;
+ static const int kMaxFastProperties = 64;
static const int kMaxInstanceSize = 255 * kPointerSize;
// When extending the backing storage for property values, we increase
// its size by more than the 1 entry necessary, so sequentially adding fields
@@ -2140,17 +2149,16 @@ class JSObject: public JSReceiver {
bool check_prototype,
SetPropertyMode set_mode);
- // Searches the prototype chain for a callback setter and sets the property
- // with the setter if it finds one. The '*found' flag indicates whether
- // a setter was found or not.
- // This function can cause GC and can return a failure result with
- // '*found==true'.
- MUST_USE_RESULT MaybeObject* SetPropertyWithCallbackSetterInPrototypes(
+ // Searches the prototype chain for property 'name'. If it is found and
+ // has a setter, invoke it and set '*done' to true. If it is found and is
+ // read-only, reject and set '*done' to true. Otherwise, set '*done' to
+ // false. Can cause GC and can return a failure result with '*done==true'.
+ MUST_USE_RESULT MaybeObject* SetPropertyViaPrototypes(
String* name,
Object* value,
PropertyAttributes attributes,
- bool* found,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ bool* done);
MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name,
DeleteMode mode);
@@ -2186,6 +2194,7 @@ class JSObject: public JSReceiver {
Object* getter,
Object* setter,
PropertyAttributes attributes);
+ MUST_USE_RESULT MaybeObject* CreateAccessorPairFor(String* name);
MUST_USE_RESULT MaybeObject* DefinePropertyAccessor(
String* name,
Object* getter,
@@ -2417,12 +2426,17 @@ class DescriptorArray: public FixedArray {
// map uses to encode additional bit fields when the descriptor array is not
// yet used.
inline bool IsEmpty();
+ inline bool MayContainTransitions();
+
+ DECL_ACCESSORS(elements_transition_map, Map)
// Returns the number of descriptors in the array.
int number_of_descriptors() {
- ASSERT(length() > kFirstIndex || IsEmpty());
+ ASSERT(length() > kFirstIndex ||
+ length() == kTransitionsIndex ||
+ IsEmpty());
int len = length();
- return len <= kFirstIndex ? 0 : len - kFirstIndex;
+ return len <= kFirstIndex ? 0 : (len - kFirstIndex) / kDescriptorSize;
}
int NextEnumerationIndex() {
@@ -2452,6 +2466,18 @@ class DescriptorArray: public FixedArray {
return bridge->get(kEnumCacheBridgeCacheIndex);
}
+ Object** GetEnumCacheSlot() {
+ ASSERT(HasEnumCache());
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kEnumerationIndexOffset);
+ }
+
+ Object** GetTransitionsSlot() {
+ ASSERT(elements_transition_map() != NULL);
+ return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
+ kTransitionsOffset);
+ }
+
// TODO(1399): It should be possible to make room for bit_field3 in the map
// without overloading the instance descriptors field in the map
// (and storing it in the DescriptorArray when the map has one).
@@ -2466,8 +2492,12 @@ class DescriptorArray: public FixedArray {
// Accessors for fetching instance descriptor at descriptor number.
inline String* GetKey(int descriptor_number);
+ inline Object** GetKeySlot(int descriptor_number);
inline Object* GetValue(int descriptor_number);
- inline Smi* GetDetails(int descriptor_number);
+ inline Object** GetValueSlot(int descriptor_number);
+ inline void SetNullValueUnchecked(int descriptor_number, Heap* heap);
+ inline PropertyDetails GetDetails(int descriptor_number);
+ inline void SetDetailsUnchecked(int descriptor_number, Smi* value);
inline PropertyType GetType(int descriptor_number);
inline int GetFieldIndex(int descriptor_number);
inline JSFunction* GetConstantFunction(int descriptor_number);
@@ -2476,8 +2506,15 @@ class DescriptorArray: public FixedArray {
inline bool IsProperty(int descriptor_number);
inline bool IsTransitionOnly(int descriptor_number);
inline bool IsNullDescriptor(int descriptor_number);
- inline bool IsDontEnum(int descriptor_number);
+ // WhitenessWitness is used to prove that a specific descriptor array is white
+ // (unmarked), so incremental write barriers can be skipped because the
+ // marking invariant cannot be broken and slots pointing into evacuation
+ // candidates will be discovered when the object is scanned. A witness is
+ // always stack-allocated right after creating a descriptor array. By
+ // allocating a witness, incremental marking is globally disabled. The witness
+ // is then passed along wherever needed to statically prove that the
+ // descriptor array is known to be white.
class WhitenessWitness {
public:
inline explicit WhitenessWitness(DescriptorArray* array);
@@ -2517,9 +2554,16 @@ class DescriptorArray: public FixedArray {
MUST_USE_RESULT MaybeObject* CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag);
+ // Indicates whether the search function should expect a sorted or an unsorted
+ // descriptor array as input.
+ enum SharedMode {
+ MAY_BE_SHARED,
+ CANNOT_BE_SHARED
+ };
+
// Return a copy of the array with all transitions and null descriptors
// removed. Return a Failure object in case of an allocation failure.
- MUST_USE_RESULT MaybeObject* RemoveTransitions();
+ MUST_USE_RESULT MaybeObject* RemoveTransitions(SharedMode shared_mode);
// Sort the instance descriptors by the hash codes of their keys.
// Does not check for duplicates.
@@ -2547,12 +2591,13 @@ class DescriptorArray: public FixedArray {
// Perform a linear search in the instance descriptors represented
// by this fixed array. len is the number of descriptor indices that are
- // valid. Does not require the descriptors to be sorted.
- int LinearSearch(String* name, int len);
+ // valid.
+ int LinearSearch(SearchMode mode, String* name, int len);
// Allocates a DescriptorArray, but returns the singleton
// empty descriptor array object if number_of_descriptors is 0.
- MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors);
+ MUST_USE_RESULT static MaybeObject* Allocate(int number_of_descriptors,
+ SharedMode shared_mode);
// Casting.
static inline DescriptorArray* cast(Object* obj);
@@ -2561,7 +2606,7 @@ class DescriptorArray: public FixedArray {
static const int kNotFound = -1;
static const int kBitField3StorageIndex = 0;
- static const int kContentArrayIndex = 1;
+ static const int kTransitionsIndex = 1;
static const int kEnumerationIndexIndex = 2;
static const int kFirstIndex = 3;
@@ -2573,8 +2618,8 @@ class DescriptorArray: public FixedArray {
// Layout description.
static const int kBitField3StorageOffset = FixedArray::kHeaderSize;
- static const int kContentArrayOffset = kBitField3StorageOffset + kPointerSize;
- static const int kEnumerationIndexOffset = kContentArrayOffset + kPointerSize;
+ static const int kTransitionsOffset = kBitField3StorageOffset + kPointerSize;
+ static const int kEnumerationIndexOffset = kTransitionsOffset + kPointerSize;
static const int kFirstOffset = kEnumerationIndexOffset + kPointerSize;
// Layout description for the bridge array.
@@ -2582,6 +2627,12 @@ class DescriptorArray: public FixedArray {
static const int kEnumCacheBridgeCacheOffset =
kEnumCacheBridgeEnumOffset + kPointerSize;
+ // Layout of descriptor.
+ static const int kDescriptorKey = 0;
+ static const int kDescriptorDetails = 1;
+ static const int kDescriptorValue = 2;
+ static const int kDescriptorSize = 3;
+
#ifdef OBJECT_PRINT
// Print all the descriptors.
inline void PrintDescriptors() {
@@ -2594,6 +2645,9 @@ class DescriptorArray: public FixedArray {
// Is the descriptor array sorted and without duplicates?
bool IsSortedNoDuplicates();
+ // Is the descriptor array consistent with the back pointers in targets?
+ bool IsConsistentWithBackPointers(Map* current_map);
+
// Are two DescriptorArrays equal?
bool IsEqualTo(DescriptorArray* other);
#endif
@@ -2619,21 +2673,23 @@ class DescriptorArray: public FixedArray {
// Conversion from descriptor number to array indices.
static int ToKeyIndex(int descriptor_number) {
- return descriptor_number+kFirstIndex;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorKey;
}
static int ToDetailsIndex(int descriptor_number) {
- return (descriptor_number << 1) + 1;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorDetails;
}
static int ToValueIndex(int descriptor_number) {
- return descriptor_number << 1;
+ return kFirstIndex +
+ (descriptor_number * kDescriptorSize) +
+ kDescriptorValue;
}
- bool is_null_descriptor(int descriptor_number) {
- return PropertyDetails(GetDetails(descriptor_number)).type() ==
- NULL_DESCRIPTOR;
- }
// Swap operation on FixedArray without using write barriers.
static inline void NoIncrementalWriteBarrierSwap(
FixedArray* array, int first, int second);
@@ -2642,9 +2698,6 @@ class DescriptorArray: public FixedArray {
inline void NoIncrementalWriteBarrierSwapDescriptors(
int first, int second);
- FixedArray* GetContentArray() {
- return FixedArray::cast(get(kContentArrayIndex));
- }
DISALLOW_IMPLICIT_CONSTRUCTORS(DescriptorArray);
};
@@ -3413,13 +3466,13 @@ class ScopeInfo : public FixedArray {
// otherwise returns a value < 0. The name must be a symbol (canonicalized).
int ParameterIndex(String* name);
- // Lookup support for serialized scope info. Returns the
- // function context slot index if the function name is present (named
+ // Lookup support for serialized scope info. Returns the function context
+ // slot index if the function name is present and context-allocated (named
// function expressions, only), otherwise returns a value < 0. The name
// must be a symbol (canonicalized).
int FunctionContextSlotIndex(String* name, VariableMode* mode);
- static Handle<ScopeInfo> Create(Scope* scope);
+ static Handle<ScopeInfo> Create(Scope* scope, Zone* zone);
// Serializes empty scope info.
static ScopeInfo* Empty();
@@ -4243,11 +4296,6 @@ class Code: public HeapObject {
inline bool is_compiled_optimizable();
inline void set_compiled_optimizable(bool value);
- // [has_self_optimization_header]: For FUNCTION kind, tells if it has
- // a self-optimization header.
- inline bool has_self_optimization_header();
- inline void set_self_optimization_header(bool value);
-
// [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for
// how long the function has been marked for OSR and therefore which
// level of loop nesting we are willing to do on-stack replacement
@@ -4294,6 +4342,11 @@ class Code: public HeapObject {
inline byte compare_state();
inline void set_compare_state(byte value);
+ // [compare_operation]: For kind COMPARE_IC tells what compare operation the
+ // stub was generated for.
+ inline byte compare_operation();
+ inline void set_compare_operation(byte value);
+
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
inline byte to_boolean_state();
inline void set_to_boolean_state(byte value);
@@ -4303,6 +4356,8 @@ class Code: public HeapObject {
inline bool has_function_cache();
inline void set_has_function_cache(bool flag);
+ bool allowed_in_shared_map_code_cache();
+
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
@@ -4429,6 +4484,7 @@ class Code: public HeapObject {
void CodeVerify();
#endif
void ClearInlineCaches();
+ void ClearTypeFeedbackCells(Heap* heap);
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
@@ -4474,10 +4530,11 @@ class Code: public HeapObject {
public BitField<bool, 0, 1> {}; // NOLINT
class FullCodeFlagsHasDebugBreakSlotsField: public BitField<bool, 1, 1> {};
class FullCodeFlagsIsCompiledOptimizable: public BitField<bool, 2, 1> {};
- class FullCodeFlagsHasSelfOptimizationHeader: public BitField<bool, 3, 1> {};
static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
+ static const int kCompareOperationOffset = kCompareStateOffset + 1;
+
static const int kAllowOSRAtLoopNestingLevelOffset = kFullCodeFlags + 1;
static const int kProfilerTicksOffset = kAllowOSRAtLoopNestingLevelOffset + 1;
@@ -4631,17 +4688,21 @@ class Map: public HeapObject {
}
// Tells whether the instance has fast elements that are only Smis.
- inline bool has_fast_smi_only_elements() {
- return elements_kind() == FAST_SMI_ONLY_ELEMENTS;
+ inline bool has_fast_smi_elements() {
+ return IsFastSmiElementsKind(elements_kind());
}
// Tells whether the instance has fast elements.
- inline bool has_fast_elements() {
- return elements_kind() == FAST_ELEMENTS;
+ inline bool has_fast_object_elements() {
+ return IsFastObjectElementsKind(elements_kind());
+ }
+
+ inline bool has_fast_smi_or_object_elements() {
+ return IsFastSmiOrObjectElementsKind(elements_kind());
}
inline bool has_fast_double_elements() {
- return elements_kind() == FAST_DOUBLE_ELEMENTS;
+ return IsFastDoubleElementsKind(elements_kind());
}
inline bool has_non_strict_arguments_elements() {
@@ -4649,13 +4710,11 @@ class Map: public HeapObject {
}
inline bool has_external_array_elements() {
- ElementsKind kind(elements_kind());
- return kind >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
- kind <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND;
+ return IsExternalArrayElementsKind(elements_kind());
}
inline bool has_dictionary_elements() {
- return elements_kind() == DICTIONARY_ELEMENTS;
+ return IsDictionaryElementsKind(elements_kind());
}
inline bool has_slow_elements_kind() {
@@ -4666,6 +4725,9 @@ class Map: public HeapObject {
static bool IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind);
+ inline Map* elements_transition_map();
+ inline void set_elements_transition_map(Map* transitioned_map);
+
// Tells whether the map is attached to SharedFunctionInfo
// (for inobject slack tracking).
inline void set_attached_to_shared_function_info(bool value);
@@ -4676,9 +4738,16 @@ class Map: public HeapObject {
// behavior. If true, the map should never be modified, instead a clone
// should be created and modified.
inline void set_is_shared(bool value);
-
inline bool is_shared();
+ // Tells whether the map is used for an object that is a prototype for another
+ // object or is the prototype on a function. Such maps are made faster by
+ // tweaking the heuristics that distinguish between regular object-oriented
+ // objects and the objects that are being used as hash maps. This flag is
+ // for optimization, not correctness.
+ inline void set_used_for_prototype(bool value);
+ inline bool used_for_prototype();
+
// Tells whether the instance needs security checks when accessing its
// properties.
inline void set_is_access_check_needed(bool access_check_needed);
@@ -4706,19 +4775,30 @@ class Map: public HeapObject {
// [stub cache]: contains stubs compiled for this map.
DECL_ACCESSORS(code_cache, Object)
+ // [back pointer]: points back to the parent map from which a transition
+ // leads to this map. The field overlaps with prototype transitions and the
+ // back pointer will be moved into the prototype transitions array if
+ // required.
+ inline Object* GetBackPointer();
+ inline void SetBackPointer(Object* value,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+
// [prototype transitions]: cache of prototype transitions.
// Prototype transition is a transition that happens
// when we change object's prototype to a new one.
// Cache format:
// 0: finger - index of the first free cell in the cache
- // 1 + 2 * i: prototype
- // 2 + 2 * i: target map
+ // 1: back pointer that overlaps with prototype transitions field.
+ // 2 + 2 * i: prototype
+ // 3 + 2 * i: target map
DECL_ACCESSORS(prototype_transitions, FixedArray)
- inline FixedArray* unchecked_prototype_transitions();
+ inline void init_prototype_transitions(Object* undefined);
+ inline HeapObject* unchecked_prototype_transitions();
- static const int kProtoTransitionHeaderSize = 1;
+ static const int kProtoTransitionHeaderSize = 2;
static const int kProtoTransitionNumberOfEntriesOffset = 0;
+ static const int kProtoTransitionBackPointerOffset = 1;
static const int kProtoTransitionElementsPerEntry = 2;
static const int kProtoTransitionPrototypeOffset = 0;
static const int kProtoTransitionMapOffset = 1;
@@ -4751,7 +4831,8 @@ class Map: public HeapObject {
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
- MUST_USE_RESULT MaybeObject* CopyDropTransitions();
+ MUST_USE_RESULT MaybeObject* CopyDropTransitions(
+ DescriptorArray::SharedMode shared_mode);
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
@@ -4790,25 +4871,10 @@ class Map: public HeapObject {
// Removes a code object from the code cache at the given index.
void RemoveFromCodeCache(String* name, Code* code, int index);
- // For every transition in this map, makes the transition's
- // target's prototype pointer point back to this map.
- // This is undone in MarkCompactCollector::ClearNonLiveTransitions().
- void CreateBackPointers();
-
- void CreateOneBackPointer(Object* transition_target);
-
- // Set all map transitions from this map to dead maps to null.
- // Also, restore the original prototype on the targets of these
- // transitions, so that we do not process this map again while
- // following back pointers.
- void ClearNonLiveTransitions(Heap* heap, Object* real_prototype);
-
- // Restore a possible back pointer in the prototype field of object.
- // Return true in that case and false otherwise. Set *keep_entry to
- // true when a live map transition has been found.
- bool RestoreOneBackPointer(Object* object,
- Object* real_prototype,
- bool* keep_entry);
+ // Set all map transitions from this map to dead maps to null. Also clear
+ // back pointers in transition targets so that we do not process this map
+ // again while following back pointers.
+ void ClearNonLiveTransitions(Heap* heap);
// Computes a hash value for this map, to be used in HashTables and such.
int Hash();
@@ -4819,23 +4885,15 @@ class Map: public HeapObject {
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
- // Returns the contents of this map's descriptor array for the given string.
- // May return NULL. |safe_to_add_transition| is set to false and NULL
- // is returned if adding transitions is not allowed.
- Object* GetDescriptorContents(String* sentinel_name,
- bool* safe_to_add_transitions);
-
// Returns the map that this map transitions to if its elements_kind
// is changed to |elements_kind|, or NULL if no such map is cached yet.
// |safe_to_add_transitions| is set to false if adding transitions is not
// allowed.
- Map* LookupElementsTransitionMap(ElementsKind elements_kind,
- bool* safe_to_add_transition);
+ Map* LookupElementsTransitionMap(ElementsKind elements_kind);
- // Adds an entry to this map's descriptor array for a transition to
- // |transitioned_map| when its elements_kind is changed to |elements_kind|.
- MUST_USE_RESULT MaybeObject* AddElementsTransition(
- ElementsKind elements_kind, Map* transitioned_map);
+ // Adds a new transitions for changing the elements kind to |elements_kind|.
+ MUST_USE_RESULT MaybeObject* CreateNextElementsTransition(
+ ElementsKind elements_kind);
// Returns the transitioned map for this map with the most generic
// elements_kind that's found in |candidates|, or null handle if no match is
@@ -4843,6 +4901,14 @@ class Map: public HeapObject {
Handle<Map> FindTransitionedMap(MapHandleList* candidates);
Map* FindTransitionedMap(MapList* candidates);
+ // Zaps the contents of backing data structures in debug mode. Note that the
+ // heap verifier (i.e. VerifyMarkingVisitor) relies on zapping of objects
+ // holding weak references when incremental marking is used, because it also
+ // iterates over objects that are otherwise unreachable.
+#ifdef DEBUG
+ void ZapInstanceDescriptors();
+ void ZapPrototypeTransitions();
+#endif
// Dispatched behavior.
#ifdef OBJECT_PRINT
@@ -4863,9 +4929,18 @@ class Map: public HeapObject {
void TraverseTransitionTree(TraverseCallback callback, void* data);
+ // When you set the prototype of an object using the __proto__ accessor you
+ // need a new map for the object (the prototype is stored in the map). In
+ // order not to multiply maps unnecessarily we store these as transitions in
+ // the original map. That way we can transition to the same map if the same
+ // prototype is set, rather than creating a new map every time. The
+ // transitions are in the form of a map where the keys are prototype objects
+ // and the values are the maps the are transitioned to. The special key
+ // the_hole denotes the map we should transition to when the
+ // used_for_prototype flag is set.
static const int kMaxCachedPrototypeTransitions = 256;
- Object* GetPrototypeTransition(Object* prototype);
+ Map* GetPrototypeTransition(Object* prototype);
MUST_USE_RESULT MaybeObject* PutPrototypeTransition(Object* prototype,
Map* map);
@@ -4890,16 +4965,17 @@ class Map: public HeapObject {
kConstructorOffset + kPointerSize;
static const int kCodeCacheOffset =
kInstanceDescriptorsOrBitField3Offset + kPointerSize;
- static const int kPrototypeTransitionsOffset =
+ static const int kPrototypeTransitionsOrBackPointerOffset =
kCodeCacheOffset + kPointerSize;
- static const int kPadStart = kPrototypeTransitionsOffset + kPointerSize;
+ static const int kPadStart =
+ kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
static const int kSize = MAP_POINTER_ALIGN(kPadStart);
// Layout of pointer fields. Heap iteration code relies on them
// being continuously allocated.
static const int kPointerFieldsBeginOffset = Map::kPrototypeOffset;
static const int kPointerFieldsEndOffset =
- Map::kPrototypeTransitionsOffset + kPointerSize;
+ kPrototypeTransitionsOrBackPointerOffset + kPointerSize;
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
@@ -4932,37 +5008,38 @@ class Map: public HeapObject {
// Bit positions for bit field 2
static const int kIsExtensible = 0;
- static const int kFunctionWithPrototype = 1;
- static const int kStringWrapperSafeForDefaultValueOf = 2;
- static const int kAttachedToSharedFunctionInfo = 3;
+ static const int kStringWrapperSafeForDefaultValueOf = 1;
+ static const int kAttachedToSharedFunctionInfo = 2;
// No bits can be used after kElementsKindFirstBit, they are all reserved for
// storing ElementKind.
- static const int kElementsKindShift = 4;
- static const int kElementsKindBitCount = 4;
+ static const int kElementsKindShift = 3;
+ static const int kElementsKindBitCount = 5;
// Derived values from bit field 2
static const int kElementsKindMask = (-1 << kElementsKindShift) &
((1 << (kElementsKindShift + kElementsKindBitCount)) - 1);
static const int8_t kMaximumBitField2FastElementValue = static_cast<int8_t>(
(FAST_ELEMENTS + 1) << Map::kElementsKindShift) - 1;
- static const int8_t kMaximumBitField2FastSmiOnlyElementValue =
- static_cast<int8_t>((FAST_SMI_ONLY_ELEMENTS + 1) <<
+ static const int8_t kMaximumBitField2FastSmiElementValue =
+ static_cast<int8_t>((FAST_SMI_ELEMENTS + 1) <<
+ Map::kElementsKindShift) - 1;
+ static const int8_t kMaximumBitField2FastHoleyElementValue =
+ static_cast<int8_t>((FAST_HOLEY_ELEMENTS + 1) <<
+ Map::kElementsKindShift) - 1;
+ static const int8_t kMaximumBitField2FastHoleySmiElementValue =
+ static_cast<int8_t>((FAST_HOLEY_SMI_ELEMENTS + 1) <<
Map::kElementsKindShift) - 1;
// Bit positions for bit field 3
static const int kIsShared = 0;
-
- // Layout of the default cache. It holds alternating name and code objects.
- static const int kCodeCacheEntrySize = 2;
- static const int kCodeCacheEntryNameOffset = 0;
- static const int kCodeCacheEntryCodeOffset = 1;
+ static const int kFunctionWithPrototype = 1;
+ static const int kUsedForPrototype = 2;
typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
kPointerFieldsEndOffset,
kSize> BodyDescriptor;
private:
- String* elements_transition_sentinel_name();
DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
};
@@ -5335,8 +5412,10 @@ class SharedFunctionInfo: public HeapObject {
// A counter used to determine when to stress the deoptimizer with a
// deopt.
- inline int deopt_counter();
- inline void set_deopt_counter(int counter);
+ inline int stress_deopt_counter();
+ inline void set_stress_deopt_counter(int counter);
+
+ inline int profiler_ticks();
// Inline cache age is used to infer whether the function survived a context
// disposal or not. In the former case we reset the opt_count.
@@ -5462,9 +5541,26 @@ class SharedFunctionInfo: public HeapObject {
bool HasSourceCode();
Handle<Object> GetSourceCode();
+ // Number of times the function was optimized.
inline int opt_count();
inline void set_opt_count(int opt_count);
+ // Number of times the function was deoptimized.
+ inline void set_deopt_count(int value);
+ inline int deopt_count();
+ inline void increment_deopt_count();
+
+ // Number of time we tried to re-enable optimization after it
+ // was disabled due to high number of deoptimizations.
+ inline void set_opt_reenable_tries(int value);
+ inline int opt_reenable_tries();
+
+ inline void TryReenableOptimization();
+
+ // Stores deopt_count, opt_reenable_tries and ic_age as bit-fields.
+ inline void set_counters(int value);
+ inline int counters();
+
// Source size of this function.
int SourceSize();
@@ -5521,13 +5617,14 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
- // ic_age is a Smi field. It could be grouped with another Smi field into a
- // PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
- static const int kICAgeOffset = kThisPropertyAssignmentsOffset + kPointerSize;
+ // ast_node_count is a Smi field. It could be grouped with another Smi field
+ // into a PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
+ static const int kAstNodeCountOffset =
+ kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kICAgeOffset + kPointerSize;
+ kAstNodeCountOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -5545,12 +5642,11 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize;
- static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
- static const int kDeoptCounterOffset = kAstNodeCountOffset + kPointerSize;
-
+ static const int kCountersOffset = kOptCountOffset + kPointerSize;
+ static const int kStressDeoptCounterOffset = kCountersOffset + kPointerSize;
// Total size.
- static const int kSize = kDeoptCounterOffset + kPointerSize;
+ static const int kSize = kStressDeoptCounterOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
@@ -5562,7 +5658,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kICAgeOffset + kPointerSize;
+ kAstNodeCountOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@@ -5586,11 +5682,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize;
- static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
- static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
+ static const int kCountersOffset = kOptCountOffset + kIntSize;
+ static const int kStressDeoptCounterOffset = kCountersOffset + kIntSize;
// Total size.
- static const int kSize = kDeoptCounterOffset + kIntSize;
+ static const int kSize = kStressDeoptCounterOffset + kIntSize;
#endif
@@ -5643,6 +5739,10 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsCount // Pseudo entry
};
+ class DeoptCountBits: public BitField<int, 0, 4> {};
+ class OptReenableTriesBits: public BitField<int, 4, 18> {};
+ class ICAgeBits: public BitField<int, 22, 8> {};
+
private:
#if V8_HOST_ARCH_32_BIT
// On 32 bit platforms, compiler hints is a smi.
@@ -5696,6 +5796,35 @@ class SharedFunctionInfo: public HeapObject {
};
+// Representation for module instance objects.
+class JSModule: public JSObject {
+ public:
+ // [context]: the context holding the module's locals, or undefined if none.
+ DECL_ACCESSORS(context, Object)
+
+ // Casting.
+ static inline JSModule* cast(Object* obj);
+
+ // Dispatched behavior.
+#ifdef OBJECT_PRINT
+ inline void JSModulePrint() {
+ JSModulePrint(stdout);
+ }
+ void JSModulePrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void JSModuleVerify();
+#endif
+
+ // Layout description.
+ static const int kContextOffset = JSObject::kHeaderSize;
+ static const int kSize = kContextOffset + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSModule);
+};
+
+
// JSFunction describes JavaScript functions.
class JSFunction: public JSObject {
public:
@@ -6095,7 +6224,7 @@ class JSDate: public JSObject {
// Returns the date field with the specified index.
// See FieldIndex for the list of date fields.
- static MaybeObject* GetField(Object* date, Smi* index);
+ static Object* GetField(Object* date, Smi* index);
void SetValue(Object* value, bool is_value_nan);
@@ -7199,6 +7328,10 @@ class SeqAsciiString: public SeqString {
unsigned* offset,
unsigned chars);
+#ifdef DEBUG
+ void SeqAsciiStringVerify();
+#endif
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqAsciiString);
};
@@ -7612,6 +7745,10 @@ class Oddball: public HeapObject {
kToNumberOffset + kPointerSize,
kSize> BodyDescriptor;
+ STATIC_CHECK(kKindOffset == Internals::kOddballKindOffset);
+ STATIC_CHECK(kNull == Internals::kNullOddballKind);
+ STATIC_CHECK(kUndefined == Internals::kUndefinedOddballKind);
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Oddball);
};
@@ -7671,23 +7808,28 @@ class JSProxy: public JSReceiver {
uint32_t index);
MUST_USE_RESULT MaybeObject* SetPropertyWithHandler(
+ JSReceiver* receiver,
String* name,
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode);
MUST_USE_RESULT MaybeObject* SetElementWithHandler(
+ JSReceiver* receiver,
uint32_t index,
Object* value,
StrictModeFlag strict_mode);
- // If the handler defines an accessor property, invoke its setter
- // (or throw if only a getter exists) and set *found to true. Otherwise false.
- MUST_USE_RESULT MaybeObject* SetPropertyWithHandlerIfDefiningSetter(
+ // If the handler defines an accessor property with a setter, invoke it.
+ // If it defines an accessor property without a setter, or a data property
+ // that is read-only, throw. In all these cases set '*done' to true,
+ // otherwise set it to false.
+ MUST_USE_RESULT MaybeObject* SetPropertyViaPrototypesWithHandler(
+ JSReceiver* receiver,
String* name,
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool* found);
+ bool* done);
MUST_USE_RESULT MaybeObject* DeletePropertyWithHandler(
String* name,
@@ -8014,6 +8156,7 @@ class AccessorInfo: public Struct {
DECL_ACCESSORS(data, Object)
DECL_ACCESSORS(name, Object)
DECL_ACCESSORS(flag, Smi)
+ DECL_ACCESSORS(expected_receiver_type, Object)
inline bool all_can_read();
inline void set_all_can_read(bool value);
@@ -8027,6 +8170,9 @@ class AccessorInfo: public Struct {
inline PropertyAttributes property_attributes();
inline void set_property_attributes(PropertyAttributes attributes);
+ // Checks whether the given receiver is compatible with this accessor.
+ inline bool IsCompatibleReceiver(Object* receiver);
+
static inline AccessorInfo* cast(Object* obj);
#ifdef OBJECT_PRINT
@@ -8044,7 +8190,8 @@ class AccessorInfo: public Struct {
static const int kDataOffset = kSetterOffset + kPointerSize;
static const int kNameOffset = kDataOffset + kPointerSize;
static const int kFlagOffset = kNameOffset + kPointerSize;
- static const int kSize = kFlagOffset + kPointerSize;
+ static const int kExpectedReceiverTypeOffset = kFlagOffset + kPointerSize;
+ static const int kSize = kExpectedReceiverTypeOffset + kPointerSize;
private:
// Bit positions in flag.
@@ -8072,6 +8219,18 @@ class AccessorPair: public Struct {
MUST_USE_RESULT MaybeObject* CopyWithoutTransitions();
+ Object* get(AccessorComponent component) {
+ return component == ACCESSOR_GETTER ? getter() : setter();
+ }
+
+ void set(AccessorComponent component, Object* value) {
+ if (component == ACCESSOR_GETTER) {
+ set_getter(value);
+ } else {
+ set_setter(value);
+ }
+ }
+
// Note: Returns undefined instead in case of a hole.
Object* GetComponent(AccessorComponent component);
diff --git a/deps/v8/src/parser.cc b/deps/v8/src/parser.cc
index da680411a9..7c51b694c8 100644
--- a/deps/v8/src/parser.cc
+++ b/deps/v8/src/parser.cc
@@ -86,8 +86,8 @@ class PositionStack {
};
-RegExpBuilder::RegExpBuilder()
- : zone_(Isolate::Current()->zone()),
+RegExpBuilder::RegExpBuilder(Zone* zone)
+ : zone_(zone),
pending_empty_(false),
characters_(NULL),
terms_(),
@@ -103,7 +103,7 @@ void RegExpBuilder::FlushCharacters() {
if (characters_ != NULL) {
RegExpTree* atom = new(zone()) RegExpAtom(characters_->ToConstVector());
characters_ = NULL;
- text_.Add(atom);
+ text_.Add(atom, zone());
LAST(ADD_ATOM);
}
}
@@ -115,12 +115,12 @@ void RegExpBuilder::FlushText() {
if (num_text == 0) {
return;
} else if (num_text == 1) {
- terms_.Add(text_.last());
+ terms_.Add(text_.last(), zone());
} else {
- RegExpText* text = new(zone()) RegExpText();
+ RegExpText* text = new(zone()) RegExpText(zone());
for (int i = 0; i < num_text; i++)
- text_.Get(i)->AppendToText(text);
- terms_.Add(text);
+ text_.Get(i)->AppendToText(text, zone());
+ terms_.Add(text, zone());
}
text_.Clear();
}
@@ -129,9 +129,9 @@ void RegExpBuilder::FlushText() {
void RegExpBuilder::AddCharacter(uc16 c) {
pending_empty_ = false;
if (characters_ == NULL) {
- characters_ = new(zone()) ZoneList<uc16>(4);
+ characters_ = new(zone()) ZoneList<uc16>(4, zone());
}
- characters_->Add(c);
+ characters_->Add(c, zone());
LAST(ADD_CHAR);
}
@@ -148,10 +148,10 @@ void RegExpBuilder::AddAtom(RegExpTree* term) {
}
if (term->IsTextElement()) {
FlushCharacters();
- text_.Add(term);
+ text_.Add(term, zone());
} else {
FlushText();
- terms_.Add(term);
+ terms_.Add(term, zone());
}
LAST(ADD_ATOM);
}
@@ -159,7 +159,7 @@ void RegExpBuilder::AddAtom(RegExpTree* term) {
void RegExpBuilder::AddAssertion(RegExpTree* assert) {
FlushText();
- terms_.Add(assert);
+ terms_.Add(assert, zone());
LAST(ADD_ASSERT);
}
@@ -178,9 +178,9 @@ void RegExpBuilder::FlushTerms() {
} else if (num_terms == 1) {
alternative = terms_.last();
} else {
- alternative = new(zone()) RegExpAlternative(terms_.GetList());
+ alternative = new(zone()) RegExpAlternative(terms_.GetList(zone()));
}
- alternatives_.Add(alternative);
+ alternatives_.Add(alternative, zone());
terms_.Clear();
LAST(ADD_NONE);
}
@@ -195,7 +195,7 @@ RegExpTree* RegExpBuilder::ToRegExp() {
if (num_alternatives == 1) {
return alternatives_.last();
}
- return new(zone()) RegExpDisjunction(alternatives_.GetList());
+ return new(zone()) RegExpDisjunction(alternatives_.GetList(zone()));
}
@@ -214,7 +214,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
int num_chars = char_vector.length();
if (num_chars > 1) {
Vector<const uc16> prefix = char_vector.SubVector(0, num_chars - 1);
- text_.Add(new(zone()) RegExpAtom(prefix));
+ text_.Add(new(zone()) RegExpAtom(prefix), zone());
char_vector = char_vector.SubVector(num_chars - 1, num_chars);
}
characters_ = NULL;
@@ -233,7 +233,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
if (min == 0) {
return;
}
- terms_.Add(atom);
+ terms_.Add(atom, zone());
return;
}
} else {
@@ -241,7 +241,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min,
UNREACHABLE();
return;
}
- terms_.Add(new(zone()) RegExpQuantifier(min, max, type, atom));
+ terms_.Add(new(zone()) RegExpQuantifier(min, max, type, atom), zone());
LAST(ADD_TERM);
}
@@ -270,7 +270,7 @@ Handle<String> Parser::LookupCachedSymbol(int symbol_id) {
if (symbol_cache_.length() <= symbol_id) {
// Increase length to index + 1.
symbol_cache_.AddBlock(Handle<String>::null(),
- symbol_id + 1 - symbol_cache_.length());
+ symbol_id + 1 - symbol_cache_.length(), zone());
}
Handle<String> result = symbol_cache_.at(symbol_id);
if (result.is_null()) {
@@ -408,7 +408,7 @@ unsigned* ScriptDataImpl::ReadAddress(int position) {
Scope* Parser::NewScope(Scope* parent, ScopeType type) {
- Scope* result = new(zone()) Scope(parent, type);
+ Scope* result = new(zone()) Scope(parent, type, zone());
result->Initialize();
return result;
}
@@ -535,9 +535,10 @@ Parser::FunctionState::~FunctionState() {
Parser::Parser(Handle<Script> script,
int parser_flags,
v8::Extension* extension,
- ScriptDataImpl* pre_data)
+ ScriptDataImpl* pre_data,
+ Zone* zone)
: isolate_(script->GetIsolate()),
- symbol_cache_(pre_data ? pre_data->symbol_count() : 0),
+ symbol_cache_(pre_data ? pre_data->symbol_count() : 0, zone),
script_(script),
scanner_(isolate_->unicode_cache()),
reusable_preparser_(NULL),
@@ -551,7 +552,8 @@ Parser::Parser(Handle<Script> script,
allow_lazy_((parser_flags & kAllowLazy) != 0),
allow_modules_((parser_flags & kAllowModules) != 0),
stack_overflow_(false),
- parenthesized_function_(false) {
+ parenthesized_function_(false),
+ zone_(zone) {
isolate_->set_ast_node_id(0);
if ((parser_flags & kLanguageModeMask) == EXTENDED_MODE) {
scanner().SetHarmonyScoping(true);
@@ -568,7 +570,7 @@ FunctionLiteral* Parser::ParseProgram(CompilationInfo* info) {
HistogramTimerScope timer(isolate()->counters()->parse());
Handle<String> source(String::cast(script_->source()));
isolate()->counters()->total_parse_size()->Increment(source->length());
- fni_ = new(zone()) FuncNameInferrer(isolate());
+ fni_ = new(zone()) FuncNameInferrer(isolate(), zone());
// Initialize parser state.
source->TryFlatten();
@@ -607,7 +609,8 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
if (info->is_eval()) {
Handle<SharedFunctionInfo> shared = info->shared_info();
if (!info->is_global() && (shared.is_null() || shared->is_function())) {
- scope = Scope::DeserializeScopeChain(*info->calling_context(), scope);
+ scope = Scope::DeserializeScopeChain(*info->calling_context(), scope,
+ zone());
}
if (!scope->is_global_scope() || info->language_mode() != CLASSIC_MODE) {
scope = NewScope(scope, EVAL_SCOPE);
@@ -617,7 +620,7 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
scope->set_end_position(source->length());
FunctionState function_state(this, scope, isolate());
top_scope_->SetLanguageMode(info->language_mode());
- ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16);
+ ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16, zone());
bool ok = true;
int beg_loc = scanner().location().beg_pos;
ParseSourceElements(body, Token::EOS, info->is_eval(), &ok);
@@ -694,7 +697,7 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
ASSERT(target_stack_ == NULL);
Handle<String> name(String::cast(shared_info->name()));
- fni_ = new(zone()) FuncNameInferrer(isolate());
+ fni_ = new(zone()) FuncNameInferrer(isolate(), zone());
fni_->PushEnclosingName(name);
mode_ = PARSE_EAGERLY;
@@ -707,7 +710,8 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
info->SetGlobalScope(scope);
if (!info->closure().is_null()) {
- scope = Scope::DeserializeScopeChain(info->closure()->context(), scope);
+ scope = Scope::DeserializeScopeChain(info->closure()->context(), scope,
+ zone());
}
FunctionState function_state(this, scope, isolate());
ASSERT(scope->language_mode() != STRICT_MODE || !info->is_classic_mode());
@@ -942,12 +946,13 @@ class InitializationBlockFinder : public ParserFinder {
// function contains only assignments of this type.
class ThisNamedPropertyAssignmentFinder : public ParserFinder {
public:
- explicit ThisNamedPropertyAssignmentFinder(Isolate* isolate)
+ ThisNamedPropertyAssignmentFinder(Isolate* isolate, Zone* zone)
: isolate_(isolate),
only_simple_this_property_assignments_(true),
- names_(0),
- assigned_arguments_(0),
- assigned_constants_(0) {
+ names_(0, zone),
+ assigned_arguments_(0, zone),
+ assigned_constants_(0, zone),
+ zone_(zone) {
}
void Update(Scope* scope, Statement* stat) {
@@ -1056,9 +1061,9 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
return;
}
}
- names_.Add(name);
- assigned_arguments_.Add(index);
- assigned_constants_.Add(isolate_->factory()->undefined_value());
+ names_.Add(name, zone());
+ assigned_arguments_.Add(index, zone());
+ assigned_constants_.Add(isolate_->factory()->undefined_value(), zone());
}
void AssignmentFromConstant(Handle<String> name, Handle<Object> value) {
@@ -1070,9 +1075,9 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
return;
}
}
- names_.Add(name);
- assigned_arguments_.Add(-1);
- assigned_constants_.Add(value);
+ names_.Add(name, zone());
+ assigned_arguments_.Add(-1, zone());
+ assigned_constants_.Add(value, zone());
}
void AssignmentFromSomethingElse() {
@@ -1084,17 +1089,20 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
if (names_.capacity() == 0) {
ASSERT(assigned_arguments_.capacity() == 0);
ASSERT(assigned_constants_.capacity() == 0);
- names_.Initialize(4);
- assigned_arguments_.Initialize(4);
- assigned_constants_.Initialize(4);
+ names_.Initialize(4, zone());
+ assigned_arguments_.Initialize(4, zone());
+ assigned_constants_.Initialize(4, zone());
}
}
+ Zone* zone() const { return zone_; }
+
Isolate* isolate_;
bool only_simple_this_property_assignments_;
ZoneStringList names_;
ZoneList<int> assigned_arguments_;
ZoneObjectList assigned_constants_;
+ Zone* zone_;
};
@@ -1113,7 +1121,8 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
ASSERT(processor != NULL);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
- ThisNamedPropertyAssignmentFinder this_property_assignment_finder(isolate());
+ ThisNamedPropertyAssignmentFinder this_property_assignment_finder(isolate(),
+ zone());
bool directive_prologue = true; // Parsing directive prologue.
while (peek() != end_token) {
@@ -1171,7 +1180,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
if (top_scope_->is_function_scope()) {
this_property_assignment_finder.Update(top_scope_, stat);
}
- processor->Add(stat);
+ processor->Add(stat, zone());
}
// Propagate the collected information on this property assignments.
@@ -1242,7 +1251,7 @@ Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
// 'module' Identifier Module
// Create new block with one expected declaration.
- Block* block = factory()->NewBlock(NULL, 1, true);
+ Block* block = factory()->NewBlock(NULL, 1, true, zone());
Handle<String> name = ParseIdentifier(CHECK_OK);
#ifdef DEBUG
@@ -1268,7 +1277,7 @@ Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
// TODO(rossberg): Add initialization statement to block.
- if (names) names->Add(name);
+ if (names) names->Add(name, zone());
return block;
}
@@ -1305,7 +1314,7 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
// '{' ModuleElement '}'
// Construct block expecting 16 statements.
- Block* body = factory()->NewBlock(NULL, 16, false);
+ Block* body = factory()->NewBlock(NULL, 16, false, zone());
#ifdef DEBUG
if (FLAG_print_interface_details) PrintF("# Literal ");
#endif
@@ -1317,7 +1326,7 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
{
BlockState block_state(this, scope);
- TargetCollector collector;
+ TargetCollector collector(zone());
Target target(&this->target_stack_, &collector);
Target target_body(&this->target_stack_, body);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
@@ -1325,7 +1334,7 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
while (peek() != Token::RBRACE) {
Statement* stat = ParseModuleElement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- body->AddStatement(stat);
+ body->AddStatement(stat, zone());
block_finder.Update(stat);
}
}
@@ -1333,11 +1342,19 @@ Module* Parser::ParseModuleLiteral(bool* ok) {
Expect(Token::RBRACE, CHECK_OK);
scope->set_end_position(scanner().location().end_pos);
- body->set_block_scope(scope);
+ body->set_scope(scope);
- scope->interface()->Freeze(ok);
+ // Instance objects have to be created ahead of time (before code generation
+ // linking them) because of potentially cyclic references between them.
+ // We create them here, to avoid another pass over the AST.
+ Interface* interface = scope->interface();
+ interface->MakeModule(ok);
+ ASSERT(ok);
+ interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
ASSERT(ok);
- return factory()->NewModuleLiteral(body, scope->interface());
+ interface->Freeze(ok);
+ ASSERT(ok);
+ return factory()->NewModuleLiteral(body, interface);
}
@@ -1354,7 +1371,7 @@ Module* Parser::ParseModulePath(bool* ok) {
PrintF("# Path .%s ", name->ToAsciiArray());
#endif
Module* member = factory()->NewModulePath(result, name);
- result->interface()->Add(name, member->interface(), ok);
+ result->interface()->Add(name, member->interface(), zone(), ok);
if (!*ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1385,7 +1402,8 @@ Module* Parser::ParseModuleVariable(bool* ok) {
PrintF("# Module variable %s ", name->ToAsciiArray());
#endif
VariableProxy* proxy = top_scope_->NewUnresolved(
- factory(), name, scanner().location().beg_pos, Interface::NewModule());
+ factory(), name, scanner().location().beg_pos,
+ Interface::NewModule(zone()));
return factory()->NewModuleVariable(proxy);
}
@@ -1403,7 +1421,14 @@ Module* Parser::ParseModuleUrl(bool* ok) {
#ifdef DEBUG
if (FLAG_print_interface_details) PrintF("# Url ");
#endif
- return factory()->NewModuleUrl(symbol);
+
+ Module* result = factory()->NewModuleUrl(symbol);
+ Interface* interface = result->interface();
+ interface->MakeSingleton(Isolate::Current()->factory()->NewJSModule(), ok);
+ ASSERT(ok);
+ interface->Freeze(ok);
+ ASSERT(ok);
+ return result;
}
@@ -1427,14 +1452,14 @@ Block* Parser::ParseImportDeclaration(bool* ok) {
// TODO(ES6): implement destructuring ImportSpecifiers
Expect(Token::IMPORT, CHECK_OK);
- ZoneStringList names(1);
+ ZoneStringList names(1, zone());
Handle<String> name = ParseIdentifierName(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
while (peek() == Token::COMMA) {
Consume(Token::COMMA);
name = ParseIdentifierName(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
}
ExpectContextualKeyword("from", CHECK_OK);
@@ -1443,14 +1468,14 @@ Block* Parser::ParseImportDeclaration(bool* ok) {
// Generate a separate declaration for each identifier.
// TODO(ES6): once we implement destructuring, make that one declaration.
- Block* block = factory()->NewBlock(NULL, 1, true);
+ Block* block = factory()->NewBlock(NULL, 1, true, zone());
for (int i = 0; i < names.length(); ++i) {
#ifdef DEBUG
if (FLAG_print_interface_details)
PrintF("# Import %s ", names[i]->ToAsciiArray());
#endif
- Interface* interface = Interface::NewUnknown();
- module->interface()->Add(names[i], interface, ok);
+ Interface* interface = Interface::NewUnknown(zone());
+ module->interface()->Add(names[i], interface, zone(), ok);
if (!*ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1485,17 +1510,17 @@ Statement* Parser::ParseExportDeclaration(bool* ok) {
Expect(Token::EXPORT, CHECK_OK);
Statement* result = NULL;
- ZoneStringList names(1);
+ ZoneStringList names(1, zone());
switch (peek()) {
case Token::IDENTIFIER: {
Handle<String> name = ParseIdentifier(CHECK_OK);
// Handle 'module' as a context-sensitive keyword.
if (!name->IsEqualTo(CStrVector("module"))) {
- names.Add(name);
+ names.Add(name, zone());
while (peek() == Token::COMMA) {
Consume(Token::COMMA);
name = ParseIdentifier(CHECK_OK);
- names.Add(name);
+ names.Add(name, zone());
}
ExpectSemicolon(CHECK_OK);
result = factory()->NewEmptyStatement();
@@ -1528,8 +1553,10 @@ Statement* Parser::ParseExportDeclaration(bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Export %s ", names[i]->ToAsciiArray());
#endif
- Interface* inner = Interface::NewUnknown();
- interface->Add(names[i], inner, CHECK_OK);
+ Interface* inner = Interface::NewUnknown(zone());
+ interface->Add(names[i], inner, zone(), CHECK_OK);
+ if (!*ok)
+ return NULL;
VariableProxy* proxy = NewUnresolved(names[i], LET, inner);
USE(proxy);
// TODO(rossberg): Rethink whether we actually need to store export
@@ -1656,13 +1683,13 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
// one must take great care not to treat it as a
// fall-through. It is much easier just to wrap the entire
// try-statement in a statement block and put the labels there
- Block* result = factory()->NewBlock(labels, 1, false);
+ Block* result = factory()->NewBlock(labels, 1, false, zone());
Target target(&this->target_stack_, result);
TryStatement* statement = ParseTryStatement(CHECK_OK);
if (statement) {
statement->set_statement_pos(statement_pos);
}
- if (result) result->AddStatement(statement);
+ if (result) result->AddStatement(statement, zone());
return result;
}
@@ -1855,7 +1882,7 @@ void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Declare %s\n", var->name()->ToAsciiArray());
#endif
- proxy->interface()->Unify(var->interface(), &ok);
+ proxy->interface()->Unify(var->interface(), zone(), &ok);
if (!ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1954,7 +1981,7 @@ Statement* Parser::ParseFunctionDeclaration(ZoneStringList* names, bool* ok) {
Declaration* declaration =
factory()->NewFunctionDeclaration(proxy, mode, fun, top_scope_);
Declare(declaration, true, CHECK_OK);
- if (names) names->Add(name);
+ if (names) names->Add(name, zone());
return factory()->NewEmptyStatement();
}
@@ -1969,14 +1996,14 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
// (ECMA-262, 3rd, 12.2)
//
// Construct block expecting 16 statements.
- Block* result = factory()->NewBlock(labels, 16, false);
+ Block* result = factory()->NewBlock(labels, 16, false, zone());
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
Statement* stat = ParseStatement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- result->AddStatement(stat);
+ result->AddStatement(stat, zone());
block_finder.Update(stat);
}
}
@@ -1992,14 +2019,14 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
// '{' BlockElement* '}'
// Construct block expecting 16 statements.
- Block* body = factory()->NewBlock(labels, 16, false);
+ Block* body = factory()->NewBlock(labels, 16, false, zone());
Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE);
// Parse the statements and collect escaping labels.
Expect(Token::LBRACE, CHECK_OK);
block_scope->set_start_position(scanner().location().beg_pos);
{ BlockState block_state(this, block_scope);
- TargetCollector collector;
+ TargetCollector collector(zone());
Target target(&this->target_stack_, &collector);
Target target_body(&this->target_stack_, body);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
@@ -2007,7 +2034,7 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
while (peek() != Token::RBRACE) {
Statement* stat = ParseBlockElement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
- body->AddStatement(stat);
+ body->AddStatement(stat, zone());
block_finder.Update(stat);
}
}
@@ -2015,7 +2042,7 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
Expect(Token::RBRACE, CHECK_OK);
block_scope->set_end_position(scanner().location().end_pos);
block_scope = block_scope->FinalizeBlockScope();
- body->set_block_scope(block_scope);
+ body->set_scope(block_scope);
return body;
}
@@ -2149,7 +2176,7 @@ Block* Parser::ParseVariableDeclarations(
// is inside an initializer block, it is ignored.
//
// Create new block with one expected declaration.
- Block* block = factory()->NewBlock(NULL, 1, true);
+ Block* block = factory()->NewBlock(NULL, 1, true, zone());
int nvars = 0; // the number of variables declared
Handle<String> name;
do {
@@ -2193,7 +2220,7 @@ Block* Parser::ParseVariableDeclarations(
*ok = false;
return NULL;
}
- if (names) names->Add(name);
+ if (names) names->Add(name, zone());
// Parse initialization expression if present and/or needed. A
// declaration of the form:
@@ -2254,7 +2281,7 @@ Block* Parser::ParseVariableDeclarations(
// Global variable declarations must be compiled in a specific
// way. When the script containing the global variable declaration
// is entered, the global variable must be declared, so that if it
- // doesn't exist (not even in a prototype of the global object) it
+ // doesn't exist (on the global object itself, see ES5 errata) it
// gets created with an initial undefined value. This is handled
// by the declarations part of the function representing the
// top-level global code; see Runtime::DeclareGlobalVariable. If
@@ -2272,13 +2299,14 @@ Block* Parser::ParseVariableDeclarations(
// properties defined in prototype objects.
if (initialization_scope->is_global_scope()) {
// Compute the arguments for the runtime call.
- ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
+ ZoneList<Expression*>* arguments =
+ new(zone()) ZoneList<Expression*>(3, zone());
// We have at least 1 parameter.
- arguments->Add(factory()->NewLiteral(name));
+ arguments->Add(factory()->NewLiteral(name), zone());
CallRuntime* initialize;
if (is_const) {
- arguments->Add(value);
+ arguments->Add(value, zone());
value = NULL; // zap the value to avoid the unnecessary assignment
// Construct the call to Runtime_InitializeConstGlobal
@@ -2293,14 +2321,14 @@ Block* Parser::ParseVariableDeclarations(
// Add strict mode.
// We may want to pass singleton to avoid Literal allocations.
LanguageMode language_mode = initialization_scope->language_mode();
- arguments->Add(factory()->NewNumberLiteral(language_mode));
+ arguments->Add(factory()->NewNumberLiteral(language_mode), zone());
// Be careful not to assign a value to the global variable if
// we're in a with. The initialization value should not
// necessarily be stored in the global object in that case,
// which is why we need to generate a separate assignment node.
if (value != NULL && !inside_with()) {
- arguments->Add(value);
+ arguments->Add(value, zone());
value = NULL; // zap the value to avoid the unnecessary assignment
}
@@ -2314,7 +2342,8 @@ Block* Parser::ParseVariableDeclarations(
arguments);
}
- block->AddStatement(factory()->NewExpressionStatement(initialize));
+ block->AddStatement(factory()->NewExpressionStatement(initialize),
+ zone());
} else if (needs_init) {
// Constant initializations always assign to the declared constant which
// is always at the function scope level. This is only relevant for
@@ -2328,7 +2357,8 @@ Block* Parser::ParseVariableDeclarations(
ASSERT(value != NULL);
Assignment* assignment =
factory()->NewAssignment(init_op, proxy, value, position);
- block->AddStatement(factory()->NewExpressionStatement(assignment));
+ block->AddStatement(factory()->NewExpressionStatement(assignment),
+ zone());
value = NULL;
}
@@ -2343,7 +2373,8 @@ Block* Parser::ParseVariableDeclarations(
initialization_scope->NewUnresolved(factory(), name);
Assignment* assignment =
factory()->NewAssignment(init_op, proxy, value, position);
- block->AddStatement(factory()->NewExpressionStatement(assignment));
+ block->AddStatement(factory()->NewExpressionStatement(assignment),
+ zone());
}
if (fni_ != NULL) fni_->Leave();
@@ -2397,8 +2428,10 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels,
*ok = false;
return NULL;
}
- if (labels == NULL) labels = new(zone()) ZoneStringList(4);
- labels->Add(label);
+ if (labels == NULL) {
+ labels = new(zone()) ZoneStringList(4, zone());
+ }
+ labels->Add(label, zone());
// Remove the "ghost" variable that turned out to be a label
// from the top scope. This way, we don't try to resolve it
// during the scope processing.
@@ -2610,12 +2643,13 @@ CaseClause* Parser::ParseCaseClause(bool* default_seen_ptr, bool* ok) {
}
Expect(Token::COLON, CHECK_OK);
int pos = scanner().location().beg_pos;
- ZoneList<Statement*>* statements = new(zone()) ZoneList<Statement*>(5);
+ ZoneList<Statement*>* statements =
+ new(zone()) ZoneList<Statement*>(5, zone());
while (peek() != Token::CASE &&
peek() != Token::DEFAULT &&
peek() != Token::RBRACE) {
Statement* stat = ParseStatement(NULL, CHECK_OK);
- statements->Add(stat);
+ statements->Add(stat, zone());
}
return new(zone()) CaseClause(isolate(), label, statements, pos);
@@ -2636,11 +2670,11 @@ SwitchStatement* Parser::ParseSwitchStatement(ZoneStringList* labels,
Expect(Token::RPAREN, CHECK_OK);
bool default_seen = false;
- ZoneList<CaseClause*>* cases = new(zone()) ZoneList<CaseClause*>(4);
+ ZoneList<CaseClause*>* cases = new(zone()) ZoneList<CaseClause*>(4, zone());
Expect(Token::LBRACE, CHECK_OK);
while (peek() != Token::RBRACE) {
CaseClause* clause = ParseCaseClause(&default_seen, CHECK_OK);
- cases->Add(clause);
+ cases->Add(clause, zone());
}
Expect(Token::RBRACE, CHECK_OK);
@@ -2681,7 +2715,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
Expect(Token::TRY, CHECK_OK);
- TargetCollector try_collector;
+ TargetCollector try_collector(zone());
Block* try_block;
{ Target target(&this->target_stack_, &try_collector);
@@ -2699,7 +2733,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
// then we will need to collect escaping targets from the catch
// block. Since we don't know yet if there will be a finally block, we
// always collect the targets.
- TargetCollector catch_collector;
+ TargetCollector catch_collector(zone());
Scope* catch_scope = NULL;
Variable* catch_variable = NULL;
Block* catch_block = NULL;
@@ -2753,8 +2787,8 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
TryCatchStatement* statement = factory()->NewTryCatchStatement(
index, try_block, catch_scope, catch_variable, catch_block);
statement->set_escaping_targets(try_collector.targets());
- try_block = factory()->NewBlock(NULL, 1, false);
- try_block->AddStatement(statement);
+ try_block = factory()->NewBlock(NULL, 1, false, zone());
+ try_block->AddStatement(statement, zone());
catch_block = NULL; // Clear to indicate it's been handled.
}
@@ -2770,7 +2804,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
int index = current_function_state_->NextHandlerIndex();
result = factory()->NewTryFinallyStatement(index, try_block, finally_block);
// Combine the jump targets of the try block and the possible catch block.
- try_collector.targets()->AddAll(*catch_collector.targets());
+ try_collector.targets()->AddAll(*catch_collector.targets(), zone());
}
result->set_escaping_targets(try_collector.targets());
@@ -2859,9 +2893,9 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Statement* body = ParseStatement(NULL, CHECK_OK);
loop->Initialize(each, enumerable, body);
- Block* result = factory()->NewBlock(NULL, 2, false);
- result->AddStatement(variable_statement);
- result->AddStatement(loop);
+ Block* result = factory()->NewBlock(NULL, 2, false, zone());
+ result->AddStatement(variable_statement, zone());
+ result->AddStatement(loop, zone());
top_scope_ = saved_scope;
for_scope->set_end_position(scanner().location().end_pos);
for_scope = for_scope->FinalizeBlockScope();
@@ -2905,19 +2939,19 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Expect(Token::RPAREN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK);
- Block* body_block = factory()->NewBlock(NULL, 3, false);
+ Block* body_block = factory()->NewBlock(NULL, 3, false, zone());
Assignment* assignment = factory()->NewAssignment(
Token::ASSIGN, each, temp_proxy, RelocInfo::kNoPosition);
Statement* assignment_statement =
factory()->NewExpressionStatement(assignment);
- body_block->AddStatement(variable_statement);
- body_block->AddStatement(assignment_statement);
- body_block->AddStatement(body);
+ body_block->AddStatement(variable_statement, zone());
+ body_block->AddStatement(assignment_statement, zone());
+ body_block->AddStatement(body, zone());
loop->Initialize(temp_proxy, enumerable, body_block);
top_scope_ = saved_scope;
for_scope->set_end_position(scanner().location().end_pos);
for_scope = for_scope->FinalizeBlockScope();
- body_block->set_block_scope(for_scope);
+ body_block->set_scope(for_scope);
// Parsed for-in loop w/ let declaration.
return loop;
@@ -2994,10 +3028,10 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// for (; c; n) b
// }
ASSERT(init != NULL);
- Block* result = factory()->NewBlock(NULL, 2, false);
- result->AddStatement(init);
- result->AddStatement(loop);
- result->set_block_scope(for_scope);
+ Block* result = factory()->NewBlock(NULL, 2, false, zone());
+ result->AddStatement(init, zone());
+ result->AddStatement(loop, zone());
+ result->set_scope(for_scope);
if (loop) loop->Initialize(NULL, cond, next, body);
return result;
} else {
@@ -3439,7 +3473,7 @@ Expression* Parser::ParseNewPrefix(PositionStack* stack, bool* ok) {
if (!stack->is_empty()) {
int last = stack->pop();
result = factory()->NewCallNew(
- result, new(zone()) ZoneList<Expression*>(0), last);
+ result, new(zone()) ZoneList<Expression*>(0, zone()), last);
}
return result;
}
@@ -3629,7 +3663,7 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
if (FLAG_print_interface_details)
PrintF("# Variable %s ", name->ToAsciiArray());
#endif
- Interface* interface = Interface::NewUnknown();
+ Interface* interface = Interface::NewUnknown(zone());
result = top_scope_->NewUnresolved(
factory(), name, scanner().location().beg_pos, interface);
break;
@@ -3729,7 +3763,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// ArrayLiteral ::
// '[' Expression? (',' Expression?)* ']'
- ZoneList<Expression*>* values = new(zone()) ZoneList<Expression*>(4);
+ ZoneList<Expression*>* values = new(zone()) ZoneList<Expression*>(4, zone());
Expect(Token::LBRACK, CHECK_OK);
while (peek() != Token::RBRACK) {
Expression* elem;
@@ -3738,7 +3772,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
} else {
elem = ParseAssignmentExpression(true, CHECK_OK);
}
- values->Add(elem);
+ values->Add(elem, zone());
if (peek() != Token::RBRACK) {
Expect(Token::COMMA, CHECK_OK);
}
@@ -3752,10 +3786,12 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Handle<FixedArray> object_literals =
isolate()->factory()->NewFixedArray(values->length(), TENURED);
Handle<FixedDoubleArray> double_literals;
- ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
+ ElementsKind elements_kind = FAST_SMI_ELEMENTS;
bool has_only_undefined_values = true;
+ bool has_hole_values = false;
// Fill in the literals.
+ Heap* heap = isolate()->heap();
bool is_simple = true;
int depth = 1;
for (int i = 0, n = values->length(); i < n; i++) {
@@ -3764,12 +3800,18 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
depth = m_literal->depth() + 1;
}
Handle<Object> boilerplate_value = GetBoilerplateValue(values->at(i));
- if (boilerplate_value->IsUndefined()) {
+ if (boilerplate_value->IsTheHole()) {
+ has_hole_values = true;
object_literals->set_the_hole(i);
if (elements_kind == FAST_DOUBLE_ELEMENTS) {
double_literals->set_the_hole(i);
}
+ } else if (boilerplate_value->IsUndefined()) {
is_simple = false;
+ object_literals->set(i, Smi::FromInt(0));
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ double_literals->set(i, 0);
+ }
} else {
// Examine each literal element, and adjust the ElementsKind if the
// literal element is not of a type that can be stored in the current
@@ -3779,7 +3821,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// ultimately end up in FAST_ELEMENTS.
has_only_undefined_values = false;
object_literals->set(i, *boilerplate_value);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (elements_kind == FAST_SMI_ELEMENTS) {
// Smi only elements. Notice if a transition to FAST_DOUBLE_ELEMENTS or
// FAST_ELEMENTS is required.
if (!boilerplate_value->IsSmi()) {
@@ -3827,7 +3869,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// elements array to a copy-on-write array.
if (is_simple && depth == 1 && values->length() > 0 &&
elements_kind != FAST_DOUBLE_ELEMENTS) {
- object_literals->set_map(isolate()->heap()->fixed_cow_array_map());
+ object_literals->set_map(heap->fixed_cow_array_map());
}
Handle<FixedArrayBase> element_values = elements_kind == FAST_DOUBLE_ELEMENTS
@@ -3839,6 +3881,10 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Handle<FixedArray> literals =
isolate()->factory()->NewFixedArray(2, TENURED);
+ if (has_hole_values || !FLAG_packed_arrays) {
+ elements_kind = GetHoleyElementsKind(elements_kind);
+ }
+
literals->set(0, Smi::FromInt(elements_kind));
literals->set(1, *element_values);
@@ -4095,7 +4141,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
// )*[','] '}'
ZoneList<ObjectLiteral::Property*>* properties =
- new(zone()) ZoneList<ObjectLiteral::Property*>(4);
+ new(zone()) ZoneList<ObjectLiteral::Property*>(4, zone());
int number_of_boilerplate_properties = 0;
bool has_function = false;
@@ -4132,7 +4178,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
}
// Validate the property.
checker.CheckProperty(property, loc, CHECK_OK);
- properties->Add(property);
+ properties->Add(property, zone());
if (peek() != Token::RBRACE) Expect(Token::COMMA, CHECK_OK);
if (fni_ != NULL) {
@@ -4200,7 +4246,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
if (IsBoilerplateProperty(property)) number_of_boilerplate_properties++;
// Validate the property
checker.CheckProperty(property, loc, CHECK_OK);
- properties->Add(property);
+ properties->Add(property, zone());
// TODO(1240767): Consider allowing trailing comma.
if (peek() != Token::RBRACE) Expect(Token::COMMA, CHECK_OK);
@@ -4259,12 +4305,12 @@ ZoneList<Expression*>* Parser::ParseArguments(bool* ok) {
// Arguments ::
// '(' (AssignmentExpression)*[','] ')'
- ZoneList<Expression*>* result = new(zone()) ZoneList<Expression*>(4);
+ ZoneList<Expression*>* result = new(zone()) ZoneList<Expression*>(4, zone());
Expect(Token::LPAREN, CHECK_OK);
bool done = (peek() == Token::RPAREN);
while (!done) {
Expression* argument = ParseAssignmentExpression(true, CHECK_OK);
- result->Add(argument);
+ result->Add(argument, zone());
if (result->length() > kMaxNumFunctionParameters) {
ReportMessageAt(scanner().location(), "too_many_arguments",
Vector<const char*>::empty());
@@ -4460,15 +4506,15 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
Variable* fvar = NULL;
Token::Value fvar_init_op = Token::INIT_CONST;
if (type == FunctionLiteral::NAMED_EXPRESSION) {
- VariableMode fvar_mode;
- if (is_extended_mode()) {
- fvar_mode = CONST_HARMONY;
- fvar_init_op = Token::INIT_CONST_HARMONY;
- } else {
- fvar_mode = CONST;
- }
- fvar =
- top_scope_->DeclareFunctionVar(function_name, fvar_mode, factory());
+ if (is_extended_mode()) fvar_init_op = Token::INIT_CONST_HARMONY;
+ VariableMode fvar_mode = is_extended_mode() ? CONST_HARMONY : CONST;
+ fvar = new(zone()) Variable(top_scope_,
+ function_name, fvar_mode, true /* is valid LHS */,
+ Variable::NORMAL, kCreatedInitialized);
+ VariableProxy* proxy = factory()->NewVariableProxy(fvar);
+ VariableDeclaration* fvar_declaration =
+ factory()->NewVariableDeclaration(proxy, fvar_mode, top_scope_);
+ top_scope_->DeclareFunctionVar(fvar_declaration);
}
// Determine whether the function will be lazily compiled.
@@ -4553,7 +4599,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
}
if (!is_lazily_compiled) {
- body = new(zone()) ZoneList<Statement*>(8);
+ body = new(zone()) ZoneList<Statement*>(8, zone());
if (fvar != NULL) {
VariableProxy* fproxy =
top_scope_->NewUnresolved(factory(), function_name);
@@ -4562,7 +4608,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
factory()->NewAssignment(fvar_init_op,
fproxy,
factory()->NewThisFunction(),
- RelocInfo::kNoPosition)));
+ RelocInfo::kNoPosition)),
+ zone());
}
ParseSourceElements(body, Token::RBRACE, false, CHECK_OK);
@@ -4961,7 +5008,7 @@ void Parser::RegisterTargetUse(Label* target, Target* stop) {
// the break target to any TargetCollectors passed on the stack.
for (Target* t = target_stack_; t != stop; t = t->previous()) {
TargetCollector* collector = t->node()->AsTargetCollector();
- if (collector != NULL) collector->AddTarget(target);
+ if (collector != NULL) collector->AddTarget(target, zone());
}
}
@@ -5008,9 +5055,9 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements(
elements, FAST_ELEMENTS, TENURED);
- ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2);
- args->Add(factory()->NewLiteral(type));
- args->Add(factory()->NewLiteral(array));
+ ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2, zone());
+ args->Add(factory()->NewLiteral(type), zone());
+ args->Add(factory()->NewLiteral(array), zone());
CallRuntime* call_constructor =
factory()->NewCallRuntime(constructor, NULL, args);
return factory()->NewThrow(call_constructor, scanner().location().beg_pos);
@@ -5119,7 +5166,7 @@ RegExpTree* RegExpParser::ParsePattern() {
// Atom Quantifier
RegExpTree* RegExpParser::ParseDisjunction() {
// Used to store current state while parsing subexpressions.
- RegExpParserState initial_state(NULL, INITIAL, 0);
+ RegExpParserState initial_state(NULL, INITIAL, 0, zone());
RegExpParserState* stored_state = &initial_state;
// Cache the builder in a local variable for quick access.
RegExpBuilder* builder = initial_state.builder();
@@ -5204,8 +5251,8 @@ RegExpTree* RegExpParser::ParseDisjunction() {
Advance();
// everything except \x0a, \x0d, \u2028 and \u2029
ZoneList<CharacterRange>* ranges =
- new(zone()) ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape('.', ranges);
+ new(zone()) ZoneList<CharacterRange>(2, zone());
+ CharacterRange::AddClassEscape('.', ranges, zone());
RegExpTree* atom = new(zone()) RegExpCharacterClass(ranges, false);
builder->AddAtom(atom);
break;
@@ -5231,17 +5278,16 @@ RegExpTree* RegExpParser::ParseDisjunction() {
Advance(2);
} else {
if (captures_ == NULL) {
- captures_ = new(zone()) ZoneList<RegExpCapture*>(2);
+ captures_ = new(zone()) ZoneList<RegExpCapture*>(2, zone());
}
if (captures_started() >= kMaxCaptures) {
ReportError(CStrVector("Too many captures") CHECK_FAILED);
}
- captures_->Add(NULL);
+ captures_->Add(NULL, zone());
}
// Store current state and begin new disjunction parsing.
- stored_state = new(zone()) RegExpParserState(stored_state,
- type,
- captures_started());
+ stored_state = new(zone()) RegExpParserState(stored_state, type,
+ captures_started(), zone());
builder = stored_state->builder();
continue;
}
@@ -5275,8 +5321,8 @@ RegExpTree* RegExpParser::ParseDisjunction() {
uc32 c = Next();
Advance(2);
ZoneList<CharacterRange>* ranges =
- new(zone()) ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(c, ranges);
+ new(zone()) ZoneList<CharacterRange>(2, zone());
+ CharacterRange::AddClassEscape(c, ranges, zone());
RegExpTree* atom = new(zone()) RegExpCharacterClass(ranges, false);
builder->AddAtom(atom);
break;
@@ -5751,11 +5797,12 @@ static const uc16 kNoCharClass = 0;
// escape (i.e., 's' means whitespace, from '\s').
static inline void AddRangeOrEscape(ZoneList<CharacterRange>* ranges,
uc16 char_class,
- CharacterRange range) {
+ CharacterRange range,
+ Zone* zone) {
if (char_class != kNoCharClass) {
- CharacterRange::AddClassEscape(char_class, ranges);
+ CharacterRange::AddClassEscape(char_class, ranges, zone);
} else {
- ranges->Add(range);
+ ranges->Add(range, zone);
}
}
@@ -5771,7 +5818,8 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
is_negated = true;
Advance();
}
- ZoneList<CharacterRange>* ranges = new(zone()) ZoneList<CharacterRange>(2);
+ ZoneList<CharacterRange>* ranges =
+ new(zone()) ZoneList<CharacterRange>(2, zone());
while (has_more() && current() != ']') {
uc16 char_class = kNoCharClass;
CharacterRange first = ParseClassAtom(&char_class CHECK_FAILED);
@@ -5782,25 +5830,25 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
// following code report an error.
break;
} else if (current() == ']') {
- AddRangeOrEscape(ranges, char_class, first);
- ranges->Add(CharacterRange::Singleton('-'));
+ AddRangeOrEscape(ranges, char_class, first, zone());
+ ranges->Add(CharacterRange::Singleton('-'), zone());
break;
}
uc16 char_class_2 = kNoCharClass;
CharacterRange next = ParseClassAtom(&char_class_2 CHECK_FAILED);
if (char_class != kNoCharClass || char_class_2 != kNoCharClass) {
// Either end is an escaped character class. Treat the '-' verbatim.
- AddRangeOrEscape(ranges, char_class, first);
- ranges->Add(CharacterRange::Singleton('-'));
- AddRangeOrEscape(ranges, char_class_2, next);
+ AddRangeOrEscape(ranges, char_class, first, zone());
+ ranges->Add(CharacterRange::Singleton('-'), zone());
+ AddRangeOrEscape(ranges, char_class_2, next, zone());
continue;
}
if (first.from() > next.to()) {
return ReportError(CStrVector(kRangeOutOfOrder) CHECK_FAILED);
}
- ranges->Add(CharacterRange::Range(first.from(), next.to()));
+ ranges->Add(CharacterRange::Range(first.from(), next.to()), zone());
} else {
- AddRangeOrEscape(ranges, char_class, first);
+ AddRangeOrEscape(ranges, char_class, first, zone());
}
}
if (!has_more()) {
@@ -5808,7 +5856,7 @@ RegExpTree* RegExpParser::ParseCharacterClass() {
}
Advance();
if (ranges->length() == 0) {
- ranges->Add(CharacterRange::Everything());
+ ranges->Add(CharacterRange::Everything(), zone());
is_negated = !is_negated;
}
return new(zone()) RegExpCharacterClass(ranges, is_negated);
@@ -5993,7 +6041,7 @@ bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
}
if (info->is_lazy()) {
ASSERT(!info->is_eval());
- Parser parser(script, parsing_flags, NULL, NULL);
+ Parser parser(script, parsing_flags, NULL, NULL, info->isolate()->zone());
if (info->shared_info()->is_function()) {
result = parser.ParseLazy(info);
} else {
@@ -6001,7 +6049,8 @@ bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
}
} else {
ScriptDataImpl* pre_data = info->pre_parse_data();
- Parser parser(script, parsing_flags, info->extension(), pre_data);
+ Parser parser(script, parsing_flags, info->extension(), pre_data,
+ info->isolate()->zone());
if (pre_data != NULL && pre_data->has_error()) {
Scanner::Location loc = pre_data->MessageLocation();
const char* message = pre_data->BuildMessage();
diff --git a/deps/v8/src/parser.h b/deps/v8/src/parser.h
index b4d88255f7..773d59a5e2 100644
--- a/deps/v8/src/parser.h
+++ b/deps/v8/src/parser.h
@@ -200,12 +200,12 @@ class BufferedZoneList {
// Adds element at end of list. This element is buffered and can
// be read using last() or removed using RemoveLast until a new Add or until
// RemoveLast or GetList has been called.
- void Add(T* value) {
+ void Add(T* value, Zone* zone) {
if (last_ != NULL) {
if (list_ == NULL) {
- list_ = new ZoneList<T*>(initial_size);
+ list_ = new(zone) ZoneList<T*>(initial_size, zone);
}
- list_->Add(last_);
+ list_->Add(last_, zone);
}
last_ = value;
}
@@ -250,12 +250,12 @@ class BufferedZoneList {
return length + ((last_ == NULL) ? 0 : 1);
}
- ZoneList<T*>* GetList() {
+ ZoneList<T*>* GetList(Zone* zone) {
if (list_ == NULL) {
- list_ = new ZoneList<T*>(initial_size);
+ list_ = new(zone) ZoneList<T*>(initial_size, zone);
}
if (last_ != NULL) {
- list_->Add(last_);
+ list_->Add(last_, zone);
last_ = NULL;
}
return list_;
@@ -270,7 +270,7 @@ class BufferedZoneList {
// Accumulates RegExp atoms and assertions into lists of terms and alternatives.
class RegExpBuilder: public ZoneObject {
public:
- RegExpBuilder();
+ explicit RegExpBuilder(Zone* zone);
void AddCharacter(uc16 character);
// "Adds" an empty expression. Does nothing except consume a
// following quantifier
@@ -285,7 +285,7 @@ class RegExpBuilder: public ZoneObject {
void FlushCharacters();
void FlushText();
void FlushTerms();
- Zone* zone() { return zone_; }
+ Zone* zone() const { return zone_; }
Zone* zone_;
bool pending_empty_;
@@ -368,9 +368,10 @@ class RegExpParser {
public:
RegExpParserState(RegExpParserState* previous_state,
SubexpressionType group_type,
- int disjunction_capture_index)
+ int disjunction_capture_index,
+ Zone* zone)
: previous_state_(previous_state),
- builder_(new RegExpBuilder()),
+ builder_(new(zone) RegExpBuilder(zone)),
group_type_(group_type),
disjunction_capture_index_(disjunction_capture_index) {}
// Parser state of containing expression, if any.
@@ -397,7 +398,7 @@ class RegExpParser {
};
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return isolate_->zone(); }
uc32 current() { return current_; }
bool has_more() { return has_more_; }
@@ -433,7 +434,8 @@ class Parser {
Parser(Handle<Script> script,
int parsing_flags, // Combination of ParsingFlags
v8::Extension* extension,
- ScriptDataImpl* pre_data);
+ ScriptDataImpl* pre_data,
+ Zone* zone);
virtual ~Parser() {
delete reusable_preparser_;
reusable_preparser_ = NULL;
@@ -546,7 +548,7 @@ class Parser {
ZoneScope* zone_scope);
Isolate* isolate() { return isolate_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() const { return zone_; }
// Called by ParseProgram after setting up the scanner.
FunctionLiteral* DoParseProgram(CompilationInfo* info,
@@ -834,6 +836,7 @@ class Parser {
// so never lazily compile it.
bool parenthesized_function_;
+ Zone* zone_;
friend class BlockState;
friend class FunctionState;
};
diff --git a/deps/v8/src/platform-cygwin.cc b/deps/v8/src/platform-cygwin.cc
index dd7253ba06..089ea38d9a 100644
--- a/deps/v8/src/platform-cygwin.cc
+++ b/deps/v8/src/platform-cygwin.cc
@@ -62,22 +62,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::SetUp() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
-
-
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
uint64_t OS::CpuFeaturesImpliedByPlatform() {
@@ -634,8 +620,11 @@ class SamplerThread : public Thread {
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
instance_ = new SamplerThread(sampler->interval());
@@ -646,7 +635,7 @@ class SamplerThread : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -732,7 +721,7 @@ class SamplerThread : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SamplerThread* instance_;
private:
@@ -740,10 +729,29 @@ class SamplerThread : public Thread {
};
-LazyMutex SamplerThread::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
+void OS::TearDown() {
+ SamplerThread::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-freebsd.cc b/deps/v8/src/platform-freebsd.cc
index 6a004ea7fa..511759c485 100644
--- a/deps/v8/src/platform-freebsd.cc
+++ b/deps/v8/src/platform-freebsd.cc
@@ -80,22 +80,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::SetUp() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
-
-
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
@@ -568,6 +554,7 @@ class FreeBSDMutex : public Mutex {
ASSERT(result == 0);
result = pthread_mutex_init(&mutex_, &attrs);
ASSERT(result == 0);
+ USE(result);
}
virtual ~FreeBSDMutex() { pthread_mutex_destroy(&mutex_); }
@@ -730,8 +717,11 @@ class SignalSender : public Thread {
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
// Install a signal handler.
@@ -751,7 +741,7 @@ class SignalSender : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -844,7 +834,7 @@ class SignalSender : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SignalSender* instance_;
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
@@ -853,12 +843,31 @@ class SignalSender : public Thread {
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-LazyMutex SignalSender::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
+void OS::TearDown() {
+ SignalSender::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-linux.cc b/deps/v8/src/platform-linux.cc
index 9781407e17..f6db423e42 100644
--- a/deps/v8/src/platform-linux.cc
+++ b/deps/v8/src/platform-linux.cc
@@ -46,9 +46,9 @@
#include <sys/stat.h> // open
#include <fcntl.h> // open
#include <unistd.h> // sysconf
-#ifdef __GLIBC__
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
#include <execinfo.h> // backtrace, backtrace_symbols
-#endif // def __GLIBC__
+#endif // defined(__GLIBC__) && !defined(__UCLIBC__)
#include <strings.h> // index
#include <errno.h>
#include <stdarg.h>
@@ -79,37 +79,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::SetUp() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-
-#ifdef __arm__
- // When running on ARM hardware check that the EABI used by V8 and
- // by the C code is the same.
- bool hard_float = OS::ArmUsingHardFloat();
- if (hard_float) {
-#if !USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary compiled with -mfloat-abi=hard but without "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- } else {
-#if USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary not compiled with -mfloat-abi=hard but with "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- }
-#endif
-}
-
-
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
@@ -564,7 +535,7 @@ void OS::SignalCodeMovingGC() {
int OS::StackWalk(Vector<OS::StackFrame> frames) {
// backtrace is a glibc extension.
-#ifdef __GLIBC__
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
int frames_size = frames.length();
ScopedVector<void*> addresses(frames_size);
@@ -589,9 +560,9 @@ int OS::StackWalk(Vector<OS::StackFrame> frames) {
free(symbols);
return frames_count;
-#else // ndef __GLIBC__
+#else // defined(__GLIBC__) && !defined(__UCLIBC__)
return 0;
-#endif // ndef __GLIBC__
+#endif // defined(__GLIBC__) && !defined(__UCLIBC__)
}
@@ -1103,6 +1074,9 @@ class SignalSender : public Thread {
vm_tgid_(getpid()),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void InstallSignalHandler() {
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -1120,7 +1094,7 @@ class SignalSender : public Thread {
}
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
// Start a thread that will send SIGPROF signal to VM threads,
@@ -1133,7 +1107,7 @@ class SignalSender : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -1236,7 +1210,7 @@ class SignalSender : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SignalSender* instance_;
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
@@ -1246,12 +1220,46 @@ class SignalSender : public Thread {
};
-LazyMutex SignalSender::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+
+#ifdef __arm__
+ // When running on ARM hardware check that the EABI used by V8 and
+ // by the C code is the same.
+ bool hard_float = OS::ArmUsingHardFloat();
+ if (hard_float) {
+#if !USE_EABI_HARDFLOAT
+ PrintF("ERROR: Binary compiled with -mfloat-abi=hard but without "
+ "-DUSE_EABI_HARDFLOAT\n");
+ exit(1);
+#endif
+ } else {
+#if USE_EABI_HARDFLOAT
+ PrintF("ERROR: Binary not compiled with -mfloat-abi=hard but with "
+ "-DUSE_EABI_HARDFLOAT\n");
+ exit(1);
+#endif
+ }
+#endif
+ SignalSender::SetUp();
+}
+
+
+void OS::TearDown() {
+ SignalSender::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-macos.cc b/deps/v8/src/platform-macos.cc
index dbcd80e9f2..a937ed3a5c 100644
--- a/deps/v8/src/platform-macos.cc
+++ b/deps/v8/src/platform-macos.cc
@@ -94,18 +94,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::SetUp() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
-
-
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
@@ -753,8 +743,11 @@ class SamplerThread : public Thread {
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
instance_ = new SamplerThread(sampler->interval());
@@ -765,7 +758,7 @@ class SamplerThread : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -862,7 +855,7 @@ class SamplerThread : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SamplerThread* instance_;
private:
@@ -872,10 +865,25 @@ class SamplerThread : public Thread {
#undef REGISTER_FIELD
-LazyMutex SamplerThread::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
+void OS::TearDown() {
+ SamplerThread::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-nullos.cc b/deps/v8/src/platform-nullos.cc
index 42799dbe7a..679ef8e89e 100644
--- a/deps/v8/src/platform-nullos.cc
+++ b/deps/v8/src/platform-nullos.cc
@@ -91,6 +91,11 @@ void OS::PostSetUp() {
}
+void OS::TearDown() {
+ UNIMPLEMENTED();
+}
+
+
// Returns the accumulated user time for thread.
int OS::GetUserTime(uint32_t* secs, uint32_t* usecs) {
UNIMPLEMENTED();
diff --git a/deps/v8/src/platform-openbsd.cc b/deps/v8/src/platform-openbsd.cc
index 6a06e3e536..ba33a8444e 100644
--- a/deps/v8/src/platform-openbsd.cc
+++ b/deps/v8/src/platform-openbsd.cc
@@ -100,18 +100,8 @@ static void* GetRandomMmapAddr() {
}
-void OS::SetUp() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
-
-
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
@@ -803,6 +793,9 @@ class SignalSender : public Thread {
vm_tgid_(getpid()),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void InstallSignalHandler() {
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -820,7 +813,7 @@ class SignalSender : public Thread {
}
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
// Start a thread that will send SIGPROF signal to VM threads,
@@ -833,7 +826,7 @@ class SignalSender : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -927,7 +920,7 @@ class SignalSender : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SignalSender* instance_;
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
@@ -937,12 +930,27 @@ class SignalSender : public Thread {
};
-LazyMutex SignalSender::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
+void OS::TearDown() {
+ SignalSender::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-posix.cc b/deps/v8/src/platform-posix.cc
index 59066ea39e..d942d78a55 100644
--- a/deps/v8/src/platform-posix.cc
+++ b/deps/v8/src/platform-posix.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -147,15 +147,6 @@ UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction())
#undef MATH_FUNCTION
-void MathSetup() {
- init_fast_sin_function();
- init_fast_cos_function();
- init_fast_tan_function();
- init_fast_log_function();
- init_fast_sqrt_function();
-}
-
-
double OS::nan_value() {
// NAN from math.h is defined in C99 and not in POSIX.
return NAN;
@@ -313,20 +304,11 @@ int OS::VSNPrintF(Vector<char> str,
#if defined(V8_TARGET_ARCH_IA32)
static OS::MemCopyFunction memcopy_function = NULL;
-static LazyMutex memcopy_function_mutex = LAZY_MUTEX_INITIALIZER;
// Defined in codegen-ia32.cc.
OS::MemCopyFunction CreateMemCopyFunction();
// Copy memory area to disjoint memory area.
void OS::MemCopy(void* dest, const void* src, size_t size) {
- if (memcopy_function == NULL) {
- ScopedLock lock(memcopy_function_mutex.Pointer());
- if (memcopy_function == NULL) {
- OS::MemCopyFunction temp = CreateMemCopyFunction();
- MemoryBarrier();
- memcopy_function = temp;
- }
- }
// Note: here we rely on dependent reads being ordered. This is true
// on all architectures we currently support.
(*memcopy_function)(dest, src, size);
@@ -336,6 +318,18 @@ void OS::MemCopy(void* dest, const void* src, size_t size) {
}
#endif // V8_TARGET_ARCH_IA32
+
+void POSIXPostSetUp() {
+#if defined(V8_TARGET_ARCH_IA32)
+ memcopy_function = CreateMemCopyFunction();
+#endif
+ init_fast_sin_function();
+ init_fast_cos_function();
+ init_fast_tan_function();
+ init_fast_log_function();
+ init_fast_sqrt_function();
+}
+
// ----------------------------------------------------------------------------
// POSIX string support.
//
@@ -427,7 +421,11 @@ Socket* POSIXSocket::Accept() const {
return NULL;
}
- int socket = accept(socket_, NULL, NULL);
+ int socket;
+ do {
+ socket = accept(socket_, NULL, NULL);
+ } while (socket == -1 && errno == EINTR);
+
if (socket == -1) {
return NULL;
} else {
@@ -454,7 +452,9 @@ bool POSIXSocket::Connect(const char* host, const char* port) {
}
// Connect.
- status = connect(socket_, result->ai_addr, result->ai_addrlen);
+ do {
+ status = connect(socket_, result->ai_addr, result->ai_addrlen);
+ } while (status == -1 && errno == EINTR);
freeaddrinfo(result);
return status == 0;
}
@@ -473,14 +473,29 @@ bool POSIXSocket::Shutdown() {
int POSIXSocket::Send(const char* data, int len) const {
- int status = send(socket_, data, len, 0);
- return status;
+ if (len <= 0) return 0;
+ int written = 0;
+ while (written < len) {
+ int status = send(socket_, data + written, len - written, 0);
+ if (status == 0) {
+ break;
+ } else if (status > 0) {
+ written += status;
+ } else if (errno != EINTR) {
+ return 0;
+ }
+ }
+ return written;
}
int POSIXSocket::Receive(char* data, int len) const {
- int status = recv(socket_, data, len, 0);
- return status;
+ if (len <= 0) return 0;
+ int status;
+ do {
+ status = recv(socket_, data, len, 0);
+ } while (status == -1 && errno == EINTR);
+ return (status < 0) ? 0 : status;
}
diff --git a/deps/v8/src/platform-posix.h b/deps/v8/src/platform-posix.h
index 4ae0e526fb..7a982ed2ef 100644
--- a/deps/v8/src/platform-posix.h
+++ b/deps/v8/src/platform-posix.h
@@ -31,9 +31,8 @@
namespace v8 {
namespace internal {
-// Used by platform implementation files during OS::PostSetUp() to initialize
-// the math functions.
-void MathSetup();
+// Used by platform implementation files during OS::PostSetUp().
+void POSIXPostSetUp();
} } // namespace v8::internal
diff --git a/deps/v8/src/platform-solaris.cc b/deps/v8/src/platform-solaris.cc
index e044dbccad..4248ea214f 100644
--- a/deps/v8/src/platform-solaris.cc
+++ b/deps/v8/src/platform-solaris.cc
@@ -91,22 +91,10 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::SetUp() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly will cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
void OS::PostSetUp() {
- // Math functions depend on CPU features therefore they are initialized after
- // CPU.
- MathSetup();
+ POSIXPostSetUp();
}
@@ -499,12 +487,10 @@ void Thread::set_name(const char* name) {
void Thread::Start() {
- pthread_attr_t* attr_ptr = NULL;
pthread_attr_t attr;
if (stack_size_ > 0) {
pthread_attr_init(&attr);
pthread_attr_setstacksize(&attr, static_cast<size_t>(stack_size_));
- attr_ptr = &attr;
}
pthread_create(&data_->thread_, NULL, ThreadEntry, this);
ASSERT(data_->thread_ != kNoThread);
@@ -724,6 +710,9 @@ class SignalSender : public Thread {
: Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void InstallSignalHandler() {
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -741,7 +730,7 @@ class SignalSender : public Thread {
}
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
// Start a thread that will send SIGPROF signal to VM threads,
@@ -754,7 +743,7 @@ class SignalSender : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -848,7 +837,7 @@ class SignalSender : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SignalSender* instance_;
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
@@ -857,12 +846,31 @@ class SignalSender : public Thread {
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-LazyMutex SignalSender::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly will cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
+void OS::TearDown() {
+ SignalSender::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform-win32.cc b/deps/v8/src/platform-win32.cc
index c79f44217a..2473949dec 100644
--- a/deps/v8/src/platform-win32.cc
+++ b/deps/v8/src/platform-win32.cc
@@ -51,6 +51,22 @@ int strncasecmp(const char* s1, const char* s2, int n) {
// the Microsoft Visual Studio C++ CRT.
#ifdef __MINGW32__
+
+#ifndef __MINGW64_VERSION_MAJOR
+
+#define _TRUNCATE 0
+#define STRUNCATE 80
+
+inline void MemoryBarrier() {
+ int barrier = 0;
+ __asm__ __volatile__("xchgl %%eax,%0 ":"=r" (barrier));
+}
+
+#endif // __MINGW64_VERSION_MAJOR
+
+
+#ifndef MINGW_HAS_SECURE_API
+
int localtime_s(tm* out_tm, const time_t* time) {
tm* posix_local_time_struct = localtime(time);
if (posix_local_time_struct == NULL) return 1;
@@ -64,21 +80,6 @@ int fopen_s(FILE** pFile, const char* filename, const char* mode) {
return *pFile != NULL ? 0 : 1;
}
-
-#ifndef __MINGW64_VERSION_MAJOR
-
-// Not sure this the correct interpretation of _mkgmtime
-time_t _mkgmtime(tm* timeptr) {
- return mktime(timeptr);
-}
-
-
-#define _TRUNCATE 0
-#define STRUNCATE 80
-
-#endif // __MINGW64_VERSION_MAJOR
-
-
int _vsnprintf_s(char* buffer, size_t sizeOfBuffer, size_t count,
const char* format, va_list argptr) {
ASSERT(count == _TRUNCATE);
@@ -112,16 +113,7 @@ int strncpy_s(char* dest, size_t dest_size, const char* source, size_t count) {
return 0;
}
-
-#ifndef __MINGW64_VERSION_MAJOR
-
-inline void MemoryBarrier() {
- int barrier = 0;
- __asm__ __volatile__("xchgl %%eax,%0 ":"=r" (barrier));
-}
-
-#endif // __MINGW64_VERSION_MAJOR
-
+#endif // MINGW_HAS_SECURE_API
#endif // __MINGW32__
@@ -149,20 +141,11 @@ static Mutex* limit_mutex = NULL;
#if defined(V8_TARGET_ARCH_IA32)
static OS::MemCopyFunction memcopy_function = NULL;
-static LazyMutex memcopy_function_mutex = LAZY_MUTEX_INITIALIZER;
// Defined in codegen-ia32.cc.
OS::MemCopyFunction CreateMemCopyFunction();
// Copy memory area to disjoint memory area.
void OS::MemCopy(void* dest, const void* src, size_t size) {
- if (memcopy_function == NULL) {
- ScopedLock lock(memcopy_function_mutex.Pointer());
- if (memcopy_function == NULL) {
- OS::MemCopyFunction temp = CreateMemCopyFunction();
- MemoryBarrier();
- memcopy_function = temp;
- }
- }
// Note: here we rely on dependent reads being ordered. This is true
// on all architectures we currently support.
(*memcopy_function)(dest, src, size);
@@ -477,6 +460,9 @@ void Time::SetToCurrentTime() {
// Check if we need to resync due to elapsed time.
needs_resync |= (time_now.t_ - init_time.t_) > kMaxClockElapsedTime;
+ // Check if we need to resync due to backwards time change.
+ needs_resync |= time_now.t_ < init_time.t_;
+
// Resync the clock if necessary.
if (needs_resync) {
GetSystemTimeAsFileTime(&init_time.ft_);
@@ -518,11 +504,14 @@ int64_t Time::LocalOffset() {
// Convert to local time, as struct with fields for day, hour, year, etc.
tm posix_local_time_struct;
if (localtime_s(&posix_local_time_struct, &posix_time)) return 0;
- // Convert local time in struct to POSIX time as if it were a UTC time.
- time_t local_posix_time = _mkgmtime(&posix_local_time_struct);
- Time localtime(1000.0 * local_posix_time);
- return localtime.Diff(&rounded_to_second);
+ if (posix_local_time_struct.tm_isdst > 0) {
+ return (tzinfo_.Bias + tzinfo_.DaylightBias) * -kMsPerMinute;
+ } else if (posix_local_time_struct.tm_isdst == 0) {
+ return (tzinfo_.Bias + tzinfo_.StandardBias) * -kMsPerMinute;
+ } else {
+ return tzinfo_.Bias * -kMsPerMinute;
+ }
}
@@ -565,22 +554,13 @@ char* Time::LocalTimezone() {
}
-void OS::SetUp() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srand(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-}
-
-
void OS::PostSetUp() {
// Math functions depend on CPU features therefore they are initialized after
// CPU.
MathSetup();
+#if defined(V8_TARGET_ARCH_IA32)
+ memcopy_function = CreateMemCopyFunction();
+#endif
}
@@ -1868,14 +1848,26 @@ bool Win32Socket::Shutdown() {
int Win32Socket::Send(const char* data, int len) const {
- int status = send(socket_, data, len, 0);
- return status;
+ if (len <= 0) return 0;
+ int written = 0;
+ while (written < len) {
+ int status = send(socket_, data + written, len - written, 0);
+ if (status == 0) {
+ break;
+ } else if (status > 0) {
+ written += status;
+ } else {
+ return 0;
+ }
+ }
+ return written;
}
int Win32Socket::Receive(char* data, int len) const {
+ if (len <= 0) return 0;
int status = recv(socket_, data, len, 0);
- return status;
+ return (status == SOCKET_ERROR) ? 0 : status;
}
@@ -1969,8 +1961,11 @@ class SamplerThread : public Thread {
: Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() { if (!mutex_) mutex_ = OS::CreateMutex(); }
+ static void TearDown() { delete mutex_; }
+
static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
instance_ = new SamplerThread(sampler->interval());
@@ -1981,7 +1976,7 @@ class SamplerThread : public Thread {
}
static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_.Pointer());
+ ScopedLock lock(mutex_);
SamplerRegistry::RemoveActiveSampler(sampler);
if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
@@ -2067,7 +2062,7 @@ class SamplerThread : public Thread {
RuntimeProfilerRateLimiter rate_limiter_;
// Protects the process wide state below.
- static LazyMutex mutex_;
+ static Mutex* mutex_;
static SamplerThread* instance_;
private:
@@ -2075,10 +2070,29 @@ class SamplerThread : public Thread {
};
-LazyMutex SamplerThread::mutex_ = LAZY_MUTEX_INITIALIZER;
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srand(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
+void OS::TearDown() {
+ SamplerThread::TearDown();
+ delete limit_mutex;
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/deps/v8/src/platform.h b/deps/v8/src/platform.h
index 3b2aa3c8ba..a2ddf7a625 100644
--- a/deps/v8/src/platform.h
+++ b/deps/v8/src/platform.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -123,6 +123,9 @@ class OS {
// called after CPU initialization.
static void PostSetUp();
+ // Clean up platform-OS-related things. Called once at VM shutdown.
+ static void TearDown();
+
// Returns the accumulated user time for thread. This routine
// can be used for profiling. The implementation should
// strive for high-precision timer resolution, preferable
@@ -650,6 +653,7 @@ class Socket {
virtual bool Shutdown() = 0;
// Data Transimission
+ // Return 0 on failure.
virtual int Send(const char* data, int len) const = 0;
virtual int Receive(char* data, int len) const = 0;
diff --git a/deps/v8/src/preparser.cc b/deps/v8/src/preparser.cc
index 20d3b9c59c..0c17eecd6a 100644
--- a/deps/v8/src/preparser.cc
+++ b/deps/v8/src/preparser.cc
@@ -581,9 +581,8 @@ PreParser::Statement PreParser::ParseWithStatement(bool* ok) {
ParseExpression(true, CHECK_OK);
Expect(i::Token::RPAREN, CHECK_OK);
- scope_->EnterWith();
+ Scope::InsideWith iw(scope_);
ParseStatement(CHECK_OK);
- scope_->LeaveWith();
return Statement::Default();
}
@@ -749,10 +748,9 @@ PreParser::Statement PreParser::ParseTryStatement(bool* ok) {
return Statement::Default();
}
Expect(i::Token::RPAREN, CHECK_OK);
- scope_->EnterWith();
- ParseBlock(ok);
- scope_->LeaveWith();
- if (!*ok) Statement::Default();
+ { Scope::InsideWith iw(scope_);
+ ParseBlock(CHECK_OK);
+ }
catch_or_finally_seen = true;
}
if (peek() == i::Token::FINALLY) {
diff --git a/deps/v8/src/preparser.h b/deps/v8/src/preparser.h
index f3a43475df..13261f7a5b 100644
--- a/deps/v8/src/preparser.h
+++ b/deps/v8/src/preparser.h
@@ -470,8 +470,19 @@ class PreParser {
void set_language_mode(i::LanguageMode language_mode) {
language_mode_ = language_mode;
}
- void EnterWith() { with_nesting_count_++; }
- void LeaveWith() { with_nesting_count_--; }
+
+ class InsideWith {
+ public:
+ explicit InsideWith(Scope* scope) : scope_(scope) {
+ scope->with_nesting_count_++;
+ }
+
+ ~InsideWith() { scope_->with_nesting_count_--; }
+
+ private:
+ Scope* scope_;
+ DISALLOW_COPY_AND_ASSIGN(InsideWith);
+ };
private:
Scope** const variable_;
diff --git a/deps/v8/src/profile-generator-inl.h b/deps/v8/src/profile-generator-inl.h
index 65369befdf..6c64350e8d 100644
--- a/deps/v8/src/profile-generator-inl.h
+++ b/deps/v8/src/profile-generator-inl.h
@@ -95,6 +95,35 @@ CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
}
+HeapEntry* HeapGraphEdge::from() const {
+ return &snapshot()->entries()[from_index_];
+}
+
+
+HeapSnapshot* HeapGraphEdge::snapshot() const {
+ return to_entry_->snapshot();
+}
+
+
+int HeapEntry::index() const {
+ return static_cast<int>(this - &snapshot_->entries().first());
+}
+
+
+int HeapEntry::set_children_index(int index) {
+ children_index_ = index;
+ int next_index = index + children_count_;
+ children_count_ = 0;
+ return next_index;
+}
+
+
+HeapGraphEdge** HeapEntry::children_arr() {
+ ASSERT(children_index_ >= 0);
+ return &snapshot_->children()[children_index_];
+}
+
+
SnapshotObjectId HeapObjectsMap::GetNthGcSubrootId(int delta) {
return kGcRootsFirstSubrootId + delta * kObjectIdStep;
}
diff --git a/deps/v8/src/profile-generator.cc b/deps/v8/src/profile-generator.cc
index 2d0984ecbf..ca19f4aaaf 100644
--- a/deps/v8/src/profile-generator.cc
+++ b/deps/v8/src/profile-generator.cc
@@ -34,6 +34,7 @@
#include "scopeinfo.h"
#include "unicode.h"
#include "zone-inl.h"
+#include "debug.h"
namespace v8 {
namespace internal {
@@ -168,6 +169,15 @@ const char* StringsStorage::GetName(int index) {
}
+size_t StringsStorage::GetUsedMemorySize() const {
+ size_t size = sizeof(*this);
+ size += sizeof(HashMap::Entry) * names_.capacity();
+ for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
+ size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
+ }
+ return size;
+}
+
const char* const CodeEntry::kEmptyNamePrefix = "";
@@ -930,81 +940,63 @@ void ProfileGenerator::RecordTickSample(const TickSample& sample) {
}
-void HeapGraphEdge::Init(
- int child_index, Type type, const char* name, HeapEntry* to) {
+HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
+ : type_(type),
+ from_index_(from),
+ to_index_(to),
+ name_(name) {
ASSERT(type == kContextVariable
- || type == kProperty
- || type == kInternal
- || type == kShortcut);
- child_index_ = child_index;
- type_ = type;
- name_ = name;
- to_ = to;
+ || type == kProperty
+ || type == kInternal
+ || type == kShortcut);
}
-void HeapGraphEdge::Init(int child_index, Type type, int index, HeapEntry* to) {
+HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
+ : type_(type),
+ from_index_(from),
+ to_index_(to),
+ index_(index) {
ASSERT(type == kElement || type == kHidden || type == kWeak);
- child_index_ = child_index;
- type_ = type;
- index_ = index;
- to_ = to;
}
-void HeapGraphEdge::Init(int child_index, int index, HeapEntry* to) {
- Init(child_index, kElement, index, to);
+void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
+ to_entry_ = &snapshot->entries()[to_index_];
}
-HeapEntry* HeapGraphEdge::From() {
- return reinterpret_cast<HeapEntry*>(this - child_index_) - 1;
-}
-
+const int HeapEntry::kNoEntry = -1;
-void HeapEntry::Init(HeapSnapshot* snapshot,
+HeapEntry::HeapEntry(HeapSnapshot* snapshot,
Type type,
const char* name,
SnapshotObjectId id,
- int self_size,
- int children_count,
- int retainers_count) {
- snapshot_ = snapshot;
- type_ = type;
- painted_ = false;
- name_ = name;
- self_size_ = self_size;
- retained_size_ = 0;
- children_count_ = children_count;
- retainers_count_ = retainers_count;
- dominator_ = NULL;
- id_ = id;
-}
+ int self_size)
+ : type_(type),
+ children_count_(0),
+ children_index_(-1),
+ self_size_(self_size),
+ id_(id),
+ snapshot_(snapshot),
+ name_(name) { }
void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
- int child_index,
const char* name,
- HeapEntry* entry,
- int retainer_index) {
- children()[child_index].Init(child_index, type, name, entry);
- entry->retainers()[retainer_index] = children_arr() + child_index;
+ HeapEntry* entry) {
+ HeapGraphEdge edge(type, name, this->index(), entry->index());
+ snapshot_->edges().Add(edge);
+ ++children_count_;
}
void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
- int child_index,
int index,
- HeapEntry* entry,
- int retainer_index) {
- children()[child_index].Init(child_index, type, index, entry);
- entry->retainers()[retainer_index] = children_arr() + child_index;
-}
-
-
-void HeapEntry::SetUnidirElementReference(
- int child_index, int index, HeapEntry* entry) {
- children()[child_index].Init(child_index, index, entry);
+ HeapEntry* entry) {
+ HeapGraphEdge edge(type, index, this->index(), entry->index());
+ snapshot_->edges().Add(edge);
+ ++children_count_;
}
@@ -1015,9 +1007,9 @@ Handle<HeapObject> HeapEntry::GetHeapObject() {
void HeapEntry::Print(
const char* prefix, const char* edge_name, int max_depth, int indent) {
- OS::Print("%6d %7d @%6llu %*c %s%s: ",
- self_size(), retained_size(), id(),
- indent, ' ', prefix, edge_name);
+ STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
+ OS::Print("%6d @%6u %*c %s%s: ",
+ self_size(), id(), indent, ' ', prefix, edge_name);
if (type() != kString) {
OS::Print("%s %.40s\n", TypeAsString(), name_);
} else {
@@ -1033,9 +1025,9 @@ void HeapEntry::Print(
OS::Print("\"\n");
}
if (--max_depth == 0) return;
- Vector<HeapGraphEdge> ch = children();
+ Vector<HeapGraphEdge*> ch = children();
for (int i = 0; i < ch.length(); ++i) {
- HeapGraphEdge& edge = ch[i];
+ HeapGraphEdge& edge = *ch[i];
const char* edge_prefix = "";
EmbeddedVector<char, 64> index;
const char* edge_name = index.start();
@@ -1091,15 +1083,6 @@ const char* HeapEntry::TypeAsString() {
}
-size_t HeapEntry::EntriesSize(int entries_count,
- int children_count,
- int retainers_count) {
- return sizeof(HeapEntry) * entries_count // NOLINT
- + sizeof(HeapGraphEdge) * children_count // NOLINT
- + sizeof(HeapGraphEdge*) * retainers_count; // NOLINT
-}
-
-
// It is very important to keep objects that form a heap snapshot
// as small as possible.
namespace { // Avoid littering the global namespace.
@@ -1108,13 +1091,17 @@ template <size_t ptr_size> struct SnapshotSizeConstants;
template <> struct SnapshotSizeConstants<4> {
static const int kExpectedHeapGraphEdgeSize = 12;
- static const int kExpectedHeapEntrySize = 32;
+ static const int kExpectedHeapEntrySize = 24;
+ static const int kExpectedHeapSnapshotsCollectionSize = 96;
+ static const int kExpectedHeapSnapshotSize = 136;
static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
};
template <> struct SnapshotSizeConstants<8> {
static const int kExpectedHeapGraphEdgeSize = 24;
- static const int kExpectedHeapEntrySize = 48;
+ static const int kExpectedHeapEntrySize = 32;
+ static const int kExpectedHeapSnapshotsCollectionSize = 144;
+ static const int kExpectedHeapSnapshotSize = 168;
static const uint64_t kMaxSerializableSnapshotRawSize =
static_cast<uint64_t>(6000) * MB;
};
@@ -1129,11 +1116,10 @@ HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
type_(type),
title_(title),
uid_(uid),
- root_entry_(NULL),
- gc_roots_entry_(NULL),
- natives_root_entry_(NULL),
- raw_entries_(NULL),
- entries_sorted_(false) {
+ root_index_(HeapEntry::kNoEntry),
+ gc_roots_index_(HeapEntry::kNoEntry),
+ natives_root_index_(HeapEntry::kNoEntry),
+ max_snapshot_js_object_id_(0) {
STATIC_CHECK(
sizeof(HeapGraphEdge) ==
SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
@@ -1141,132 +1127,105 @@ HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
sizeof(HeapEntry) ==
SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
- gc_subroot_entries_[i] = NULL;
+ gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
}
}
-HeapSnapshot::~HeapSnapshot() {
- DeleteArray(raw_entries_);
-}
-
-
void HeapSnapshot::Delete() {
collection_->RemoveSnapshot(this);
delete this;
}
-void HeapSnapshot::AllocateEntries(int entries_count,
- int children_count,
- int retainers_count) {
- ASSERT(raw_entries_ == NULL);
- raw_entries_size_ =
- HeapEntry::EntriesSize(entries_count, children_count, retainers_count);
- raw_entries_ = NewArray<char>(raw_entries_size_);
+void HeapSnapshot::RememberLastJSObjectId() {
+ max_snapshot_js_object_id_ = collection_->last_assigned_id();
}
-static void HeapEntryClearPaint(HeapEntry** entry_ptr) {
- (*entry_ptr)->clear_paint();
-}
-
-
-void HeapSnapshot::ClearPaint() {
- entries_.Iterate(HeapEntryClearPaint);
-}
-
-
-HeapEntry* HeapSnapshot::AddRootEntry(int children_count) {
- ASSERT(root_entry_ == NULL);
- return (root_entry_ = AddEntry(HeapEntry::kObject,
- "",
- HeapObjectsMap::kInternalRootObjectId,
- 0,
- children_count,
- 0));
+HeapEntry* HeapSnapshot::AddRootEntry() {
+ ASSERT(root_index_ == HeapEntry::kNoEntry);
+ ASSERT(entries_.is_empty()); // Root entry must be the first one.
+ HeapEntry* entry = AddEntry(HeapEntry::kObject,
+ "",
+ HeapObjectsMap::kInternalRootObjectId,
+ 0);
+ root_index_ = entry->index();
+ ASSERT(root_index_ == 0);
+ return entry;
}
-HeapEntry* HeapSnapshot::AddGcRootsEntry(int children_count,
- int retainers_count) {
- ASSERT(gc_roots_entry_ == NULL);
- return (gc_roots_entry_ = AddEntry(HeapEntry::kObject,
- "(GC roots)",
- HeapObjectsMap::kGcRootsObjectId,
- 0,
- children_count,
- retainers_count));
+HeapEntry* HeapSnapshot::AddGcRootsEntry() {
+ ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
+ HeapEntry* entry = AddEntry(HeapEntry::kObject,
+ "(GC roots)",
+ HeapObjectsMap::kGcRootsObjectId,
+ 0);
+ gc_roots_index_ = entry->index();
+ return entry;
}
-HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag,
- int children_count,
- int retainers_count) {
- ASSERT(gc_subroot_entries_[tag] == NULL);
+HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
+ ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
- return (gc_subroot_entries_[tag] = AddEntry(
+ HeapEntry* entry = AddEntry(
HeapEntry::kObject,
VisitorSynchronization::kTagNames[tag],
HeapObjectsMap::GetNthGcSubrootId(tag),
- 0,
- children_count,
- retainers_count));
+ 0);
+ gc_subroot_indexes_[tag] = entry->index();
+ return entry;
}
HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
const char* name,
SnapshotObjectId id,
- int size,
- int children_count,
- int retainers_count) {
- HeapEntry* entry = GetNextEntryToInit();
- entry->Init(this, type, name, id, size, children_count, retainers_count);
- return entry;
+ int size) {
+ HeapEntry entry(this, type, name, id, size);
+ entries_.Add(entry);
+ return &entries_.last();
}
-void HeapSnapshot::SetDominatorsToSelf() {
- for (int i = 0; i < entries_.length(); ++i) {
- HeapEntry* entry = entries_[i];
- if (entry->dominator() == NULL) entry->set_dominator(entry);
+void HeapSnapshot::FillChildren() {
+ ASSERT(children().is_empty());
+ children().Allocate(edges().length());
+ int children_index = 0;
+ for (int i = 0; i < entries().length(); ++i) {
+ HeapEntry* entry = &entries()[i];
+ children_index = entry->set_children_index(children_index);
+ }
+ ASSERT(edges().length() == children_index);
+ for (int i = 0; i < edges().length(); ++i) {
+ HeapGraphEdge* edge = &edges()[i];
+ edge->ReplaceToIndexWithEntry(this);
+ edge->from()->add_child(edge);
}
}
-HeapEntry* HeapSnapshot::GetNextEntryToInit() {
- if (entries_.length() > 0) {
- HeapEntry* last_entry = entries_.last();
- entries_.Add(reinterpret_cast<HeapEntry*>(
- reinterpret_cast<char*>(last_entry) + last_entry->EntrySize()));
- } else {
- entries_.Add(reinterpret_cast<HeapEntry*>(raw_entries_));
+class FindEntryById {
+ public:
+ explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
+ int operator()(HeapEntry* const* entry) {
+ if ((*entry)->id() == id_) return 0;
+ return (*entry)->id() < id_ ? -1 : 1;
}
- ASSERT(reinterpret_cast<char*>(entries_.last()) <
- (raw_entries_ + raw_entries_size_));
- return entries_.last();
-}
+ private:
+ SnapshotObjectId id_;
+};
HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
-
// Perform a binary search by id.
- int low = 0;
- int high = entries_by_id->length() - 1;
- while (low <= high) {
- int mid =
- (static_cast<unsigned int>(low) + static_cast<unsigned int>(high)) >> 1;
- SnapshotObjectId mid_id = entries_by_id->at(mid)->id();
- if (mid_id > id)
- high = mid - 1;
- else if (mid_id < id)
- low = mid + 1;
- else
- return entries_by_id->at(mid);
- }
- return NULL;
+ int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
+ if (index == -1)
+ return NULL;
+ return entries_by_id->at(index);
}
@@ -1279,11 +1238,14 @@ static int SortByIds(const T* entry1_ptr,
List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
- if (!entries_sorted_) {
- entries_.Sort(SortByIds);
- entries_sorted_ = true;
+ if (sorted_entries_.is_empty()) {
+ sorted_entries_.Allocate(entries_.length());
+ for (int i = 0; i < entries_.length(); ++i) {
+ sorted_entries_[i] = &entries_[i];
+ }
+ sorted_entries_.Sort(SortByIds);
}
- return &entries_;
+ return &sorted_entries_;
}
@@ -1292,6 +1254,24 @@ void HeapSnapshot::Print(int max_depth) {
}
+template<typename T, class P>
+static size_t GetMemoryUsedByList(const List<T, P>& list) {
+ return list.length() * sizeof(T) + sizeof(list);
+}
+
+
+size_t HeapSnapshot::RawSnapshotSize() const {
+ STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize ==
+ sizeof(HeapSnapshot)); // NOLINT
+ return
+ sizeof(*this) +
+ GetMemoryUsedByList(entries_) +
+ GetMemoryUsedByList(edges_) +
+ GetMemoryUsedByList(children_) +
+ GetMemoryUsedByList(sorted_entries_);
+}
+
+
// We split IDs on evens for embedder objects (see
// HeapObjectsMap::GenerateId) and odds for native objects.
const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
@@ -1304,96 +1284,167 @@ const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
HeapObjectsMap::HeapObjectsMap()
- : initial_fill_mode_(true),
- next_id_(kFirstAvailableObjectId),
- entries_map_(AddressesMatch),
- entries_(new List<EntryInfo>()) { }
-
-
-HeapObjectsMap::~HeapObjectsMap() {
- delete entries_;
+ : next_id_(kFirstAvailableObjectId),
+ entries_map_(AddressesMatch) {
+ // This dummy element solves a problem with entries_map_.
+ // When we do lookup in HashMap we see no difference between two cases:
+ // it has an entry with NULL as the value or it has created
+ // a new entry on the fly with NULL as the default value.
+ // With such dummy element we have a guaranty that all entries_map_ entries
+ // will have the value field grater than 0.
+ // This fact is using in MoveObject method.
+ entries_.Add(EntryInfo(0, NULL, 0));
}
void HeapObjectsMap::SnapshotGenerationFinished() {
- initial_fill_mode_ = false;
RemoveDeadEntries();
}
-SnapshotObjectId HeapObjectsMap::FindObject(Address addr) {
- if (!initial_fill_mode_) {
- SnapshotObjectId existing = FindEntry(addr);
- if (existing != 0) return existing;
- }
- SnapshotObjectId id = next_id_;
- next_id_ += kObjectIdStep;
- AddEntry(addr, id);
- return id;
-}
-
-
void HeapObjectsMap::MoveObject(Address from, Address to) {
+ ASSERT(to != NULL);
+ ASSERT(from != NULL);
if (from == to) return;
- HashMap::Entry* entry = entries_map_.Lookup(from, AddressHash(from), false);
- if (entry != NULL) {
- void* value = entry->value;
- entries_map_.Remove(from, AddressHash(from));
- if (to != NULL) {
- entry = entries_map_.Lookup(to, AddressHash(to), true);
- // We can have an entry at the new location, it is OK, as GC can overwrite
- // dead objects with alive objects being moved.
- entry->value = value;
- }
+ void* from_value = entries_map_.Remove(from, AddressHash(from));
+ if (from_value == NULL) return;
+ int from_entry_info_index =
+ static_cast<int>(reinterpret_cast<intptr_t>(from_value));
+ entries_.at(from_entry_info_index).addr = to;
+ HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
+ if (to_entry->value != NULL) {
+ int to_entry_info_index =
+ static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
+ // Without this operation we will have two EntryInfo's with the same
+ // value in addr field. It is bad because later at RemoveDeadEntries
+ // one of this entry will be removed with the corresponding entries_map_
+ // entry.
+ entries_.at(to_entry_info_index).addr = NULL;
}
+ to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
}
-void HeapObjectsMap::AddEntry(Address addr, SnapshotObjectId id) {
- HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
- ASSERT(entry->value == NULL);
- entry->value = reinterpret_cast<void*>(entries_->length());
- entries_->Add(EntryInfo(id));
+SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
+ HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
+ if (entry == NULL) return 0;
+ int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
+ EntryInfo& entry_info = entries_.at(entry_index);
+ ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
+ return entry_info.id;
}
-SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
- HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
- if (entry != NULL) {
+SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
+ unsigned int size) {
+ ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
+ HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
+ if (entry->value != NULL) {
int entry_index =
static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
- EntryInfo& entry_info = entries_->at(entry_index);
+ EntryInfo& entry_info = entries_.at(entry_index);
entry_info.accessed = true;
+ entry_info.size = size;
return entry_info.id;
- } else {
- return 0;
}
+ entry->value = reinterpret_cast<void*>(entries_.length());
+ SnapshotObjectId id = next_id_;
+ next_id_ += kObjectIdStep;
+ entries_.Add(EntryInfo(id, addr, size));
+ ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
+ return id;
+}
+
+
+void HeapObjectsMap::StopHeapObjectsTracking() {
+ time_intervals_.Clear();
+}
+
+void HeapObjectsMap::UpdateHeapObjectsMap() {
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "HeapSnapshotsCollection::UpdateHeapObjectsMap");
+ HeapIterator iterator;
+ for (HeapObject* obj = iterator.next();
+ obj != NULL;
+ obj = iterator.next()) {
+ FindOrAddEntry(obj->address(), obj->Size());
+ }
+ RemoveDeadEntries();
+}
+
+
+SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
+ UpdateHeapObjectsMap();
+ time_intervals_.Add(TimeInterval(next_id_));
+ int prefered_chunk_size = stream->GetChunkSize();
+ List<v8::HeapStatsUpdate> stats_buffer;
+ ASSERT(!entries_.is_empty());
+ EntryInfo* entry_info = &entries_.first();
+ EntryInfo* end_entry_info = &entries_.last() + 1;
+ for (int time_interval_index = 0;
+ time_interval_index < time_intervals_.length();
+ ++time_interval_index) {
+ TimeInterval& time_interval = time_intervals_[time_interval_index];
+ SnapshotObjectId time_interval_id = time_interval.id;
+ uint32_t entries_size = 0;
+ EntryInfo* start_entry_info = entry_info;
+ while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
+ entries_size += entry_info->size;
+ ++entry_info;
+ }
+ uint32_t entries_count =
+ static_cast<uint32_t>(entry_info - start_entry_info);
+ if (time_interval.count != entries_count ||
+ time_interval.size != entries_size) {
+ stats_buffer.Add(v8::HeapStatsUpdate(
+ time_interval_index,
+ time_interval.count = entries_count,
+ time_interval.size = entries_size));
+ if (stats_buffer.length() >= prefered_chunk_size) {
+ OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
+ &stats_buffer.first(), stats_buffer.length());
+ if (result == OutputStream::kAbort) return last_assigned_id();
+ stats_buffer.Clear();
+ }
+ }
+ }
+ ASSERT(entry_info == end_entry_info);
+ if (!stats_buffer.is_empty()) {
+ OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
+ &stats_buffer.first(), stats_buffer.length());
+ if (result == OutputStream::kAbort) return last_assigned_id();
+ }
+ stream->EndOfStream();
+ return last_assigned_id();
}
void HeapObjectsMap::RemoveDeadEntries() {
- List<EntryInfo>* new_entries = new List<EntryInfo>();
- List<void*> dead_entries;
- for (HashMap::Entry* entry = entries_map_.Start();
- entry != NULL;
- entry = entries_map_.Next(entry)) {
- int entry_index =
- static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
- EntryInfo& entry_info = entries_->at(entry_index);
+ ASSERT(entries_.length() > 0 &&
+ entries_.at(0).id == 0 &&
+ entries_.at(0).addr == NULL);
+ int first_free_entry = 1;
+ for (int i = 1; i < entries_.length(); ++i) {
+ EntryInfo& entry_info = entries_.at(i);
if (entry_info.accessed) {
- entry->value = reinterpret_cast<void*>(new_entries->length());
- new_entries->Add(EntryInfo(entry_info.id, false));
+ if (first_free_entry != i) {
+ entries_.at(first_free_entry) = entry_info;
+ }
+ entries_.at(first_free_entry).accessed = false;
+ HashMap::Entry* entry = entries_map_.Lookup(
+ entry_info.addr, AddressHash(entry_info.addr), false);
+ ASSERT(entry);
+ entry->value = reinterpret_cast<void*>(first_free_entry);
+ ++first_free_entry;
} else {
- dead_entries.Add(entry->key);
+ if (entry_info.addr) {
+ entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
+ }
}
}
- for (int i = 0; i < dead_entries.length(); ++i) {
- void* raw_entry = dead_entries[i];
- entries_map_.Remove(
- raw_entry, AddressHash(reinterpret_cast<Address>(raw_entry)));
- }
- delete entries_;
- entries_ = new_entries;
+ entries_.Rewind(first_free_entry);
+ ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
+ entries_map_.occupancy());
}
@@ -1411,6 +1462,15 @@ SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
}
+size_t HeapObjectsMap::GetUsedMemorySize() const {
+ return
+ sizeof(*this) +
+ sizeof(HashMap::Entry) * entries_map_.capacity() +
+ GetMemoryUsedByList(entries_) +
+ GetMemoryUsedByList(time_intervals_);
+}
+
+
HeapSnapshotsCollection::HeapSnapshotsCollection()
: is_tracking_objects_(false),
snapshots_uids_(HeapSnapshotsMatch),
@@ -1480,7 +1540,7 @@ Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
- if (ids_.FindObject(obj->address()) == id) {
+ if (ids_.FindEntry(obj->address()) == id) {
ASSERT(object == NULL);
object = obj;
// Can't break -- kFilterUnreachable requires full heap traversal.
@@ -1490,80 +1550,38 @@ Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
}
-HeapEntry* const HeapEntriesMap::kHeapEntryPlaceholder =
- reinterpret_cast<HeapEntry*>(1);
-
-HeapEntriesMap::HeapEntriesMap()
- : entries_(HeapThingsMatch),
- entries_count_(0),
- total_children_count_(0),
- total_retainers_count_(0) {
-}
-
-
-HeapEntriesMap::~HeapEntriesMap() {
- for (HashMap::Entry* p = entries_.Start(); p != NULL; p = entries_.Next(p)) {
- delete reinterpret_cast<EntryInfo*>(p->value);
+size_t HeapSnapshotsCollection::GetUsedMemorySize() const {
+ STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::
+ kExpectedHeapSnapshotsCollectionSize ==
+ sizeof(HeapSnapshotsCollection)); // NOLINT
+ size_t size = sizeof(*this);
+ size += names_.GetUsedMemorySize();
+ size += ids_.GetUsedMemorySize();
+ size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
+ size += GetMemoryUsedByList(snapshots_);
+ for (int i = 0; i < snapshots_.length(); ++i) {
+ size += snapshots_[i]->RawSnapshotSize();
}
+ return size;
}
-void HeapEntriesMap::AllocateEntries() {
- for (HashMap::Entry* p = entries_.Start();
- p != NULL;
- p = entries_.Next(p)) {
- EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
- entry_info->entry = entry_info->allocator->AllocateEntry(
- p->key,
- entry_info->children_count,
- entry_info->retainers_count);
- ASSERT(entry_info->entry != NULL);
- ASSERT(entry_info->entry != kHeapEntryPlaceholder);
- entry_info->children_count = 0;
- entry_info->retainers_count = 0;
- }
+HeapEntriesMap::HeapEntriesMap()
+ : entries_(HeapThingsMatch) {
}
-HeapEntry* HeapEntriesMap::Map(HeapThing thing) {
+int HeapEntriesMap::Map(HeapThing thing) {
HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
- if (cache_entry != NULL) {
- EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(cache_entry->value);
- return entry_info->entry;
- } else {
- return NULL;
- }
+ if (cache_entry == NULL) return HeapEntry::kNoEntry;
+ return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
}
-void HeapEntriesMap::Pair(
- HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry) {
+void HeapEntriesMap::Pair(HeapThing thing, int entry) {
HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
ASSERT(cache_entry->value == NULL);
- cache_entry->value = new EntryInfo(entry, allocator);
- ++entries_count_;
-}
-
-
-void HeapEntriesMap::CountReference(HeapThing from, HeapThing to,
- int* prev_children_count,
- int* prev_retainers_count) {
- HashMap::Entry* from_cache_entry = entries_.Lookup(from, Hash(from), false);
- HashMap::Entry* to_cache_entry = entries_.Lookup(to, Hash(to), false);
- ASSERT(from_cache_entry != NULL);
- ASSERT(to_cache_entry != NULL);
- EntryInfo* from_entry_info =
- reinterpret_cast<EntryInfo*>(from_cache_entry->value);
- EntryInfo* to_entry_info =
- reinterpret_cast<EntryInfo*>(to_cache_entry->value);
- if (prev_children_count)
- *prev_children_count = from_entry_info->children_count;
- if (prev_retainers_count)
- *prev_retainers_count = to_entry_info->retainers_count;
- ++from_entry_info->children_count;
- ++to_entry_info->retainers_count;
- ++total_children_count_;
- ++total_retainers_count_;
+ cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
}
@@ -1580,20 +1598,14 @@ void HeapObjectsSet::Clear() {
bool HeapObjectsSet::Contains(Object* obj) {
if (!obj->IsHeapObject()) return false;
HeapObject* object = HeapObject::cast(obj);
- HashMap::Entry* cache_entry =
- entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
- return cache_entry != NULL;
+ return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
}
void HeapObjectsSet::Insert(Object* obj) {
if (!obj->IsHeapObject()) return;
HeapObject* object = HeapObject::cast(obj);
- HashMap::Entry* cache_entry =
- entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
- if (cache_entry->value == NULL) {
- cache_entry->value = HeapEntriesMap::kHeapEntryPlaceholder;
- }
+ entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
}
@@ -1601,12 +1613,9 @@ const char* HeapObjectsSet::GetTag(Object* obj) {
HeapObject* object = HeapObject::cast(obj);
HashMap::Entry* cache_entry =
entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
- if (cache_entry != NULL
- && cache_entry->value != HeapEntriesMap::kHeapEntryPlaceholder) {
- return reinterpret_cast<const char*>(cache_entry->value);
- } else {
- return NULL;
- }
+ return cache_entry != NULL
+ ? reinterpret_cast<const char*>(cache_entry->value)
+ : NULL;
}
@@ -1648,126 +1657,83 @@ V8HeapExplorer::~V8HeapExplorer() {
}
-HeapEntry* V8HeapExplorer::AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count) {
- return AddEntry(
- reinterpret_cast<HeapObject*>(ptr), children_count, retainers_count);
+HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
+ return AddEntry(reinterpret_cast<HeapObject*>(ptr));
}
-HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
- int children_count,
- int retainers_count) {
+HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
if (object == kInternalRootObject) {
- ASSERT(retainers_count == 0);
- return snapshot_->AddRootEntry(children_count);
+ snapshot_->AddRootEntry();
+ return snapshot_->root();
} else if (object == kGcRootsObject) {
- return snapshot_->AddGcRootsEntry(children_count, retainers_count);
+ HeapEntry* entry = snapshot_->AddGcRootsEntry();
+ return entry;
} else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
- return snapshot_->AddGcSubrootEntry(
- GetGcSubrootOrder(object),
- children_count,
- retainers_count);
+ HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
+ return entry;
} else if (object->IsJSFunction()) {
JSFunction* func = JSFunction::cast(object);
SharedFunctionInfo* shared = func->shared();
const char* name = shared->bound() ? "native_bind" :
collection_->names()->GetName(String::cast(shared->name()));
- return AddEntry(object,
- HeapEntry::kClosure,
- name,
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kClosure, name);
} else if (object->IsJSRegExp()) {
JSRegExp* re = JSRegExp::cast(object);
return AddEntry(object,
HeapEntry::kRegExp,
- collection_->names()->GetName(re->Pattern()),
- children_count,
- retainers_count);
+ collection_->names()->GetName(re->Pattern()));
} else if (object->IsJSObject()) {
- return AddEntry(object,
- HeapEntry::kObject,
- "",
- children_count,
- retainers_count);
+ const char* name = collection_->names()->GetName(
+ GetConstructorName(JSObject::cast(object)));
+ if (object->IsJSGlobalObject()) {
+ const char* tag = objects_tags_.GetTag(object);
+ if (tag != NULL) {
+ name = collection_->names()->GetFormatted("%s / %s", name, tag);
+ }
+ }
+ return AddEntry(object, HeapEntry::kObject, name);
} else if (object->IsString()) {
return AddEntry(object,
HeapEntry::kString,
- collection_->names()->GetName(String::cast(object)),
- children_count,
- retainers_count);
+ collection_->names()->GetName(String::cast(object)));
} else if (object->IsCode()) {
- return AddEntry(object,
- HeapEntry::kCode,
- "",
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kCode, "");
} else if (object->IsSharedFunctionInfo()) {
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
+ String* name = String::cast(SharedFunctionInfo::cast(object)->name());
return AddEntry(object,
HeapEntry::kCode,
- collection_->names()->GetName(String::cast(shared->name())),
- children_count,
- retainers_count);
+ collection_->names()->GetName(name));
} else if (object->IsScript()) {
- Script* script = Script::cast(object);
+ Object* name = Script::cast(object)->name();
return AddEntry(object,
HeapEntry::kCode,
- script->name()->IsString() ?
- collection_->names()->GetName(
- String::cast(script->name()))
- : "",
- children_count,
- retainers_count);
+ name->IsString()
+ ? collection_->names()->GetName(String::cast(name))
+ : "");
} else if (object->IsGlobalContext()) {
- return AddEntry(object,
- HeapEntry::kHidden,
- "system / GlobalContext",
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kHidden, "system / GlobalContext");
} else if (object->IsContext()) {
- return AddEntry(object,
- HeapEntry::kHidden,
- "system / Context",
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kHidden, "system / Context");
} else if (object->IsFixedArray() ||
object->IsFixedDoubleArray() ||
object->IsByteArray() ||
object->IsExternalArray()) {
- const char* tag = objects_tags_.GetTag(object);
- return AddEntry(object,
- HeapEntry::kArray,
- tag != NULL ? tag : "",
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kArray, "");
} else if (object->IsHeapNumber()) {
- return AddEntry(object,
- HeapEntry::kHeapNumber,
- "number",
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kHeapNumber, "number");
}
- return AddEntry(object,
- HeapEntry::kHidden,
- GetSystemEntryName(object),
- children_count,
- retainers_count);
+ return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
}
HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
HeapEntry::Type type,
- const char* name,
- int children_count,
- int retainers_count) {
- return snapshot_->AddEntry(type,
- name,
- collection_->GetObjectId(object->address()),
- object->Size(),
- children_count,
- retainers_count);
+ const char* name) {
+ int object_size = object->Size();
+ SnapshotObjectId object_id =
+ collection_->GetObjectId(object->address(), object_size);
+ return snapshot_->AddEntry(type, name, object_id, object_size);
}
@@ -1836,10 +1802,10 @@ class IndexedReferencesExtractor : public ObjectVisitor {
public:
IndexedReferencesExtractor(V8HeapExplorer* generator,
HeapObject* parent_obj,
- HeapEntry* parent_entry)
+ int parent)
: generator_(generator),
parent_obj_(parent_obj),
- parent_(parent_entry),
+ parent_(parent),
next_index_(1) {
}
void VisitPointers(Object** start, Object** end) {
@@ -1868,178 +1834,40 @@ class IndexedReferencesExtractor : public ObjectVisitor {
}
V8HeapExplorer* generator_;
HeapObject* parent_obj_;
- HeapEntry* parent_;
+ int parent_;
int next_index_;
};
void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
- HeapEntry* entry = GetEntry(obj);
- if (entry == NULL) return; // No interest in this object.
+ HeapEntry* heap_entry = GetEntry(obj);
+ if (heap_entry == NULL) return; // No interest in this object.
+ int entry = heap_entry->index();
bool extract_indexed_refs = true;
if (obj->IsJSGlobalProxy()) {
- // We need to reference JS global objects from snapshot's root.
- // We use JSGlobalProxy because this is what embedder (e.g. browser)
- // uses for the global object.
- JSGlobalProxy* proxy = JSGlobalProxy::cast(obj);
- SetRootShortcutReference(proxy->map()->prototype());
+ ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
} else if (obj->IsJSObject()) {
- JSObject* js_obj = JSObject::cast(obj);
- ExtractClosureReferences(js_obj, entry);
- ExtractPropertyReferences(js_obj, entry);
- ExtractElementReferences(js_obj, entry);
- ExtractInternalReferences(js_obj, entry);
- SetPropertyReference(
- obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
- if (obj->IsJSFunction()) {
- JSFunction* js_fun = JSFunction::cast(js_obj);
- Object* proto_or_map = js_fun->prototype_or_initial_map();
- if (!proto_or_map->IsTheHole()) {
- if (!proto_or_map->IsMap()) {
- SetPropertyReference(
- obj, entry,
- heap_->prototype_symbol(), proto_or_map,
- NULL,
- JSFunction::kPrototypeOrInitialMapOffset);
- } else {
- SetPropertyReference(
- obj, entry,
- heap_->prototype_symbol(), js_fun->prototype());
- }
- }
- SharedFunctionInfo* shared_info = js_fun->shared();
- // JSFunction has either bindings or literals and never both.
- bool bound = shared_info->bound();
- TagObject(js_fun->literals_or_bindings(),
- bound ? "(function bindings)" : "(function literals)");
- SetInternalReference(js_fun, entry,
- bound ? "bindings" : "literals",
- js_fun->literals_or_bindings(),
- JSFunction::kLiteralsOffset);
- SetInternalReference(js_fun, entry,
- "shared", shared_info,
- JSFunction::kSharedFunctionInfoOffset);
- TagObject(js_fun->unchecked_context(), "(context)");
- SetInternalReference(js_fun, entry,
- "context", js_fun->unchecked_context(),
- JSFunction::kContextOffset);
- for (int i = JSFunction::kNonWeakFieldsEndOffset;
- i < JSFunction::kSize;
- i += kPointerSize) {
- SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
- }
- }
- TagObject(js_obj->properties(), "(object properties)");
- SetInternalReference(obj, entry,
- "properties", js_obj->properties(),
- JSObject::kPropertiesOffset);
- TagObject(js_obj->elements(), "(object elements)");
- SetInternalReference(obj, entry,
- "elements", js_obj->elements(),
- JSObject::kElementsOffset);
+ ExtractJSObjectReferences(entry, JSObject::cast(obj));
} else if (obj->IsString()) {
- if (obj->IsConsString()) {
- ConsString* cs = ConsString::cast(obj);
- SetInternalReference(obj, entry, 1, cs->first());
- SetInternalReference(obj, entry, 2, cs->second());
- }
- if (obj->IsSlicedString()) {
- SlicedString* ss = SlicedString::cast(obj);
- SetInternalReference(obj, entry, "parent", ss->parent());
- }
+ ExtractStringReferences(entry, String::cast(obj));
extract_indexed_refs = false;
- } else if (obj->IsGlobalContext()) {
- Context* context = Context::cast(obj);
- TagObject(context->jsfunction_result_caches(),
- "(context func. result caches)");
- TagObject(context->normalized_map_cache(), "(context norm. map cache)");
- TagObject(context->runtime_context(), "(runtime context)");
- TagObject(context->data(), "(context data)");
- for (int i = Context::FIRST_WEAK_SLOT;
- i < Context::GLOBAL_CONTEXT_SLOTS;
- ++i) {
- SetWeakReference(obj, entry,
- i, context->get(i),
- FixedArray::OffsetOfElementAt(i));
- }
+ } else if (obj->IsContext()) {
+ ExtractContextReferences(entry, Context::cast(obj));
} else if (obj->IsMap()) {
- Map* map = Map::cast(obj);
- SetInternalReference(obj, entry,
- "prototype", map->prototype(), Map::kPrototypeOffset);
- SetInternalReference(obj, entry,
- "constructor", map->constructor(),
- Map::kConstructorOffset);
- if (!map->instance_descriptors()->IsEmpty()) {
- TagObject(map->instance_descriptors(), "(map descriptors)");
- SetInternalReference(obj, entry,
- "descriptors", map->instance_descriptors(),
- Map::kInstanceDescriptorsOrBitField3Offset);
- }
- if (map->prototype_transitions() != heap_->empty_fixed_array()) {
- TagObject(map->prototype_transitions(), "(prototype transitions)");
- SetInternalReference(obj,
- entry,
- "prototype_transitions",
- map->prototype_transitions(),
- Map::kPrototypeTransitionsOffset);
- }
- SetInternalReference(obj, entry,
- "code_cache", map->code_cache(),
- Map::kCodeCacheOffset);
+ ExtractMapReferences(entry, Map::cast(obj));
} else if (obj->IsSharedFunctionInfo()) {
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
- SetInternalReference(obj, entry,
- "name", shared->name(),
- SharedFunctionInfo::kNameOffset);
- SetInternalReference(obj, entry,
- "code", shared->unchecked_code(),
- SharedFunctionInfo::kCodeOffset);
- TagObject(shared->scope_info(), "(function scope info)");
- SetInternalReference(obj, entry,
- "scope_info", shared->scope_info(),
- SharedFunctionInfo::kScopeInfoOffset);
- SetInternalReference(obj, entry,
- "instance_class_name", shared->instance_class_name(),
- SharedFunctionInfo::kInstanceClassNameOffset);
- SetInternalReference(obj, entry,
- "script", shared->script(),
- SharedFunctionInfo::kScriptOffset);
- SetWeakReference(obj, entry,
- 1, shared->initial_map(),
- SharedFunctionInfo::kInitialMapOffset);
+ ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
} else if (obj->IsScript()) {
- Script* script = Script::cast(obj);
- SetInternalReference(obj, entry,
- "source", script->source(),
- Script::kSourceOffset);
- SetInternalReference(obj, entry,
- "name", script->name(),
- Script::kNameOffset);
- SetInternalReference(obj, entry,
- "data", script->data(),
- Script::kDataOffset);
- SetInternalReference(obj, entry,
- "context_data", script->context_data(),
- Script::kContextOffset);
- TagObject(script->line_ends(), "(script line ends)");
- SetInternalReference(obj, entry,
- "line_ends", script->line_ends(),
- Script::kLineEndsOffset);
+ ExtractScriptReferences(entry, Script::cast(obj));
} else if (obj->IsCodeCache()) {
- CodeCache* code_cache = CodeCache::cast(obj);
- TagObject(code_cache->default_cache(), "(default code cache)");
- SetInternalReference(obj, entry,
- "default_cache", code_cache->default_cache(),
- CodeCache::kDefaultCacheOffset);
- TagObject(code_cache->normal_type_cache(), "(code type cache)");
- SetInternalReference(obj, entry,
- "type_cache", code_cache->normal_type_cache(),
- CodeCache::kNormalTypeCacheOffset);
+ ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
} else if (obj->IsCode()) {
- Code* code = Code::cast(obj);
- TagObject(code->unchecked_relocation_info(), "(code relocation info)");
- TagObject(code->unchecked_deoptimization_data(), "(code deopt data)");
+ ExtractCodeReferences(entry, Code::cast(obj));
+ } else if (obj->IsJSGlobalPropertyCell()) {
+ ExtractJSGlobalPropertyCellReferences(
+ entry, JSGlobalPropertyCell::cast(obj));
+ extract_indexed_refs = false;
}
if (extract_indexed_refs) {
SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
@@ -2049,14 +1877,266 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
}
-void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
+ // We need to reference JS global objects from snapshot's root.
+ // We use JSGlobalProxy because this is what embedder (e.g. browser)
+ // uses for the global object.
+ Object* object = proxy->map()->prototype();
+ bool is_debug_object = false;
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ is_debug_object = object->IsGlobalObject() &&
+ Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
+#endif
+ if (!is_debug_object) {
+ SetUserGlobalReference(object);
+ }
+}
+
+
+void V8HeapExplorer::ExtractJSObjectReferences(
+ int entry, JSObject* js_obj) {
+ HeapObject* obj = js_obj;
+ ExtractClosureReferences(js_obj, entry);
+ ExtractPropertyReferences(js_obj, entry);
+ ExtractElementReferences(js_obj, entry);
+ ExtractInternalReferences(js_obj, entry);
+ SetPropertyReference(
+ obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
+ if (obj->IsJSFunction()) {
+ JSFunction* js_fun = JSFunction::cast(js_obj);
+ Object* proto_or_map = js_fun->prototype_or_initial_map();
+ if (!proto_or_map->IsTheHole()) {
+ if (!proto_or_map->IsMap()) {
+ SetPropertyReference(
+ obj, entry,
+ heap_->prototype_symbol(), proto_or_map,
+ NULL,
+ JSFunction::kPrototypeOrInitialMapOffset);
+ } else {
+ SetPropertyReference(
+ obj, entry,
+ heap_->prototype_symbol(), js_fun->prototype());
+ }
+ }
+ SharedFunctionInfo* shared_info = js_fun->shared();
+ // JSFunction has either bindings or literals and never both.
+ bool bound = shared_info->bound();
+ TagObject(js_fun->literals_or_bindings(),
+ bound ? "(function bindings)" : "(function literals)");
+ SetInternalReference(js_fun, entry,
+ bound ? "bindings" : "literals",
+ js_fun->literals_or_bindings(),
+ JSFunction::kLiteralsOffset);
+ TagObject(shared_info, "(shared function info)");
+ SetInternalReference(js_fun, entry,
+ "shared", shared_info,
+ JSFunction::kSharedFunctionInfoOffset);
+ TagObject(js_fun->unchecked_context(), "(context)");
+ SetInternalReference(js_fun, entry,
+ "context", js_fun->unchecked_context(),
+ JSFunction::kContextOffset);
+ for (int i = JSFunction::kNonWeakFieldsEndOffset;
+ i < JSFunction::kSize;
+ i += kPointerSize) {
+ SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
+ }
+ } else if (obj->IsGlobalObject()) {
+ GlobalObject* global_obj = GlobalObject::cast(obj);
+ SetInternalReference(global_obj, entry,
+ "builtins", global_obj->builtins(),
+ GlobalObject::kBuiltinsOffset);
+ SetInternalReference(global_obj, entry,
+ "global_context", global_obj->global_context(),
+ GlobalObject::kGlobalContextOffset);
+ SetInternalReference(global_obj, entry,
+ "global_receiver", global_obj->global_receiver(),
+ GlobalObject::kGlobalReceiverOffset);
+ }
+ TagObject(js_obj->properties(), "(object properties)");
+ SetInternalReference(obj, entry,
+ "properties", js_obj->properties(),
+ JSObject::kPropertiesOffset);
+ TagObject(js_obj->elements(), "(object elements)");
+ SetInternalReference(obj, entry,
+ "elements", js_obj->elements(),
+ JSObject::kElementsOffset);
+}
+
+
+void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
+ if (string->IsConsString()) {
+ ConsString* cs = ConsString::cast(string);
+ SetInternalReference(cs, entry, "first", cs->first());
+ SetInternalReference(cs, entry, "second", cs->second());
+ } else if (string->IsSlicedString()) {
+ SlicedString* ss = SlicedString::cast(string);
+ SetInternalReference(ss, entry, "parent", ss->parent());
+ }
+}
+
+
+void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
+#define EXTRACT_CONTEXT_FIELD(index, type, name) \
+ SetInternalReference(context, entry, #name, context->get(Context::index), \
+ FixedArray::OffsetOfElementAt(Context::index));
+ EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
+ EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
+ EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
+ EXTRACT_CONTEXT_FIELD(GLOBAL_INDEX, GlobalObject, global);
+ if (context->IsGlobalContext()) {
+ TagObject(context->jsfunction_result_caches(),
+ "(context func. result caches)");
+ TagObject(context->normalized_map_cache(), "(context norm. map cache)");
+ TagObject(context->runtime_context(), "(runtime context)");
+ TagObject(context->data(), "(context data)");
+ GLOBAL_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
+#undef EXTRACT_CONTEXT_FIELD
+ for (int i = Context::FIRST_WEAK_SLOT;
+ i < Context::GLOBAL_CONTEXT_SLOTS;
+ ++i) {
+ SetWeakReference(context, entry, i, context->get(i),
+ FixedArray::OffsetOfElementAt(i));
+ }
+ }
+}
+
+
+void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
+ SetInternalReference(map, entry,
+ "prototype", map->prototype(), Map::kPrototypeOffset);
+ SetInternalReference(map, entry,
+ "constructor", map->constructor(),
+ Map::kConstructorOffset);
+ if (!map->instance_descriptors()->IsEmpty()) {
+ TagObject(map->instance_descriptors(), "(map descriptors)");
+ SetInternalReference(map, entry,
+ "descriptors", map->instance_descriptors(),
+ Map::kInstanceDescriptorsOrBitField3Offset);
+ }
+ if (map->unchecked_prototype_transitions()->IsFixedArray()) {
+ TagObject(map->prototype_transitions(), "(prototype transitions)");
+ SetInternalReference(map, entry,
+ "prototype_transitions", map->prototype_transitions(),
+ Map::kPrototypeTransitionsOrBackPointerOffset);
+ } else {
+ SetInternalReference(map, entry,
+ "back_pointer", map->GetBackPointer(),
+ Map::kPrototypeTransitionsOrBackPointerOffset);
+ }
+ SetInternalReference(map, entry,
+ "code_cache", map->code_cache(),
+ Map::kCodeCacheOffset);
+}
+
+
+void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
+ int entry, SharedFunctionInfo* shared) {
+ HeapObject* obj = shared;
+ SetInternalReference(obj, entry,
+ "name", shared->name(),
+ SharedFunctionInfo::kNameOffset);
+ TagObject(shared->code(), "(code)");
+ SetInternalReference(obj, entry,
+ "code", shared->code(),
+ SharedFunctionInfo::kCodeOffset);
+ TagObject(shared->scope_info(), "(function scope info)");
+ SetInternalReference(obj, entry,
+ "scope_info", shared->scope_info(),
+ SharedFunctionInfo::kScopeInfoOffset);
+ SetInternalReference(obj, entry,
+ "instance_class_name", shared->instance_class_name(),
+ SharedFunctionInfo::kInstanceClassNameOffset);
+ SetInternalReference(obj, entry,
+ "script", shared->script(),
+ SharedFunctionInfo::kScriptOffset);
+ TagObject(shared->construct_stub(), "(code)");
+ SetInternalReference(obj, entry,
+ "construct_stub", shared->construct_stub(),
+ SharedFunctionInfo::kConstructStubOffset);
+ SetInternalReference(obj, entry,
+ "function_data", shared->function_data(),
+ SharedFunctionInfo::kFunctionDataOffset);
+ SetInternalReference(obj, entry,
+ "debug_info", shared->debug_info(),
+ SharedFunctionInfo::kDebugInfoOffset);
+ SetInternalReference(obj, entry,
+ "inferred_name", shared->inferred_name(),
+ SharedFunctionInfo::kInferredNameOffset);
+ SetInternalReference(obj, entry,
+ "this_property_assignments",
+ shared->this_property_assignments(),
+ SharedFunctionInfo::kThisPropertyAssignmentsOffset);
+ SetWeakReference(obj, entry,
+ 1, shared->initial_map(),
+ SharedFunctionInfo::kInitialMapOffset);
+}
+
+
+void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
+ HeapObject* obj = script;
+ SetInternalReference(obj, entry,
+ "source", script->source(),
+ Script::kSourceOffset);
+ SetInternalReference(obj, entry,
+ "name", script->name(),
+ Script::kNameOffset);
+ SetInternalReference(obj, entry,
+ "data", script->data(),
+ Script::kDataOffset);
+ SetInternalReference(obj, entry,
+ "context_data", script->context_data(),
+ Script::kContextOffset);
+ TagObject(script->line_ends(), "(script line ends)");
+ SetInternalReference(obj, entry,
+ "line_ends", script->line_ends(),
+ Script::kLineEndsOffset);
+}
+
+
+void V8HeapExplorer::ExtractCodeCacheReferences(
+ int entry, CodeCache* code_cache) {
+ TagObject(code_cache->default_cache(), "(default code cache)");
+ SetInternalReference(code_cache, entry,
+ "default_cache", code_cache->default_cache(),
+ CodeCache::kDefaultCacheOffset);
+ TagObject(code_cache->normal_type_cache(), "(code type cache)");
+ SetInternalReference(code_cache, entry,
+ "type_cache", code_cache->normal_type_cache(),
+ CodeCache::kNormalTypeCacheOffset);
+}
+
+
+void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
+ TagObject(code->relocation_info(), "(code relocation info)");
+ SetInternalReference(code, entry,
+ "relocation_info", code->relocation_info(),
+ Code::kRelocationInfoOffset);
+ SetInternalReference(code, entry,
+ "handler_table", code->handler_table(),
+ Code::kHandlerTableOffset);
+ TagObject(code->deoptimization_data(), "(code deopt data)");
+ SetInternalReference(code, entry,
+ "deoptimization_data", code->deoptimization_data(),
+ Code::kDeoptimizationDataOffset);
+ SetInternalReference(code, entry,
+ "type_feedback_info", code->type_feedback_info(),
+ Code::kTypeFeedbackInfoOffset);
+ SetInternalReference(code, entry,
+ "gc_metadata", code->gc_metadata(),
+ Code::kGCMetadataOffset);
+}
+
+
+void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
+ int entry, JSGlobalPropertyCell* cell) {
+ SetInternalReference(cell, entry, "value", cell->value());
+}
+
+
+void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
if (!js_obj->IsJSFunction()) return;
JSFunction* func = JSFunction::cast(js_obj);
- Context* context = func->context();
- ScopeInfo* scope_info = context->closure()->shared()->scope_info();
-
if (func->shared()->bound()) {
FixedArray* bindings = func->function_bindings();
SetNativeBindReference(js_obj, entry, "bound_this",
@@ -2072,6 +2152,8 @@ void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
bindings->get(i));
}
} else {
+ Context* context = func->context()->declaration_context();
+ ScopeInfo* scope_info = context->closure()->shared()->scope_info();
// Add context allocated locals.
int context_locals = scope_info->ContextLocalCount();
for (int i = 0; i < context_locals; ++i) {
@@ -2083,19 +2165,17 @@ void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
// Add function variable.
if (scope_info->HasFunctionName()) {
String* name = scope_info->FunctionName();
- int idx = Context::MIN_CONTEXT_SLOTS + context_locals;
-#ifdef DEBUG
VariableMode mode;
- ASSERT(idx == scope_info->FunctionContextSlotIndex(name, &mode));
-#endif
- SetClosureReference(js_obj, entry, name, context->get(idx));
+ int idx = scope_info->FunctionContextSlotIndex(name, &mode);
+ if (idx >= 0) {
+ SetClosureReference(js_obj, entry, name, context->get(idx));
+ }
}
}
}
-void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
if (js_obj->HasFastProperties()) {
DescriptorArray* descs = js_obj->map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -2139,7 +2219,6 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
case HANDLER: // only in lookup results, not in descriptors
case INTERCEPTOR: // only in lookup results, not in descriptors
case MAP_TRANSITION: // we do not care about transitions here...
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR: // ... and not about "holes"
break;
@@ -2152,15 +2231,15 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
Object* k = dictionary->KeyAt(i);
if (dictionary->IsKey(k)) {
Object* target = dictionary->ValueAt(i);
- SetPropertyReference(
- js_obj, entry, String::cast(k), target);
// We assume that global objects can only have slow properties.
- if (target->IsJSGlobalPropertyCell()) {
- SetPropertyShortcutReference(js_obj,
- entry,
- String::cast(k),
- JSGlobalPropertyCell::cast(
- target)->value());
+ Object* value = target->IsJSGlobalPropertyCell()
+ ? JSGlobalPropertyCell::cast(target)->value()
+ : target;
+ if (String::cast(k)->length() > 0) {
+ SetPropertyReference(js_obj, entry, String::cast(k), value);
+ } else {
+ TagObject(value, "(hidden properties)");
+ SetInternalReference(js_obj, entry, "hidden_properties", value);
}
}
}
@@ -2168,9 +2247,8 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
}
-void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
- HeapEntry* entry) {
- if (js_obj->HasFastElements()) {
+void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
+ if (js_obj->HasFastObjectElements()) {
FixedArray* elements = FixedArray::cast(js_obj->elements());
int length = js_obj->IsJSArray() ?
Smi::cast(JSArray::cast(js_obj)->length())->value() :
@@ -2195,8 +2273,7 @@ void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
}
-void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj,
- HeapEntry* entry) {
+void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
int length = js_obj->GetInternalFieldCount();
for (int i = 0; i < length; ++i) {
Object* o = js_obj->GetInternalField(i);
@@ -2322,6 +2399,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
filler_ = NULL;
return false;
}
+
SetRootGcRootsReference();
RootsReferencesExtractor extractor;
heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
@@ -2329,148 +2407,127 @@ bool V8HeapExplorer::IterateAndExtractReferences(
heap_->IterateRoots(&extractor, VISIT_ALL);
extractor.FillReferences(this);
filler_ = NULL;
- return progress_->ProgressReport(false);
+ return progress_->ProgressReport(true);
}
-bool V8HeapExplorer::IterateAndSetObjectNames(SnapshotFillerInterface* filler) {
- HeapIterator iterator(HeapIterator::kFilterUnreachable);
- filler_ = filler;
- for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
- SetObjectName(obj);
- }
- return true;
-}
-
-
-void V8HeapExplorer::SetObjectName(HeapObject* object) {
- if (!object->IsJSObject() || object->IsJSRegExp() || object->IsJSFunction()) {
- return;
- }
- const char* name = collection_->names()->GetName(
- GetConstructorName(JSObject::cast(object)));
- if (object->IsJSGlobalObject()) {
- const char* tag = objects_tags_.GetTag(object);
- if (tag != NULL) {
- name = collection_->names()->GetFormatted("%s / %s", name, tag);
- }
- }
- GetEntry(object)->set_name(name);
+bool V8HeapExplorer::IsEssentialObject(Object* object) {
+ // We have to use raw_unchecked_* versions because checked versions
+ // would fail during iteration over object properties.
+ return object->IsHeapObject()
+ && !object->IsOddball()
+ && object != heap_->raw_unchecked_empty_byte_array()
+ && object != heap_->raw_unchecked_empty_fixed_array()
+ && object != heap_->raw_unchecked_empty_descriptor_array()
+ && object != heap_->raw_unchecked_fixed_array_map()
+ && object != heap_->raw_unchecked_global_property_cell_map()
+ && object != heap_->raw_unchecked_shared_function_info_map()
+ && object != heap_->raw_unchecked_free_space_map()
+ && object != heap_->raw_unchecked_one_pointer_filler_map()
+ && object != heap_->raw_unchecked_two_pointer_filler_map();
}
void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
String* reference_name,
Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
- parent_obj,
parent_entry,
collection_->names()->GetName(reference_name),
- child_obj,
child_entry);
}
}
void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
const char* reference_name,
Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kShortcut,
- parent_obj,
parent_entry,
reference_name,
- child_obj,
child_entry);
}
}
void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
int index,
Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetIndexedReference(HeapGraphEdge::kElement,
- parent_obj,
parent_entry,
index,
- child_obj,
child_entry);
}
}
void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
const char* reference_name,
Object* child_obj,
int field_offset) {
HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry != NULL) {
+ if (child_entry == NULL) return;
+ if (IsEssentialObject(child_obj)) {
filler_->SetNamedReference(HeapGraphEdge::kInternal,
- parent_obj,
parent_entry,
reference_name,
- child_obj,
child_entry);
- IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
+ IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
int index,
Object* child_obj,
int field_offset) {
HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry != NULL) {
+ if (child_entry == NULL) return;
+ if (IsEssentialObject(child_obj)) {
filler_->SetNamedReference(HeapGraphEdge::kInternal,
- parent_obj,
parent_entry,
collection_->names()->GetName(index),
- child_obj,
child_entry);
- IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
+ IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
int index,
Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
- if (child_entry != NULL) {
+ if (child_entry != NULL && IsEssentialObject(child_obj)) {
filler_->SetIndexedReference(HeapGraphEdge::kHidden,
- parent_obj,
parent_entry,
index,
- child_obj,
child_entry);
}
}
void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
int index,
Object* child_obj,
int field_offset) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetIndexedReference(HeapGraphEdge::kWeak,
- parent_obj,
parent_entry,
index,
- child_obj,
child_entry);
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
@@ -2478,7 +2535,7 @@ void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
String* reference_name,
Object* child_obj,
const char* name_format_string,
@@ -2495,10 +2552,8 @@ void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
collection_->names()->GetName(reference_name);
filler_->SetNamedReference(type,
- parent_obj,
parent_entry,
name,
- child_obj,
child_entry);
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
}
@@ -2506,16 +2561,14 @@ void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent_entry,
String* reference_name,
Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetNamedReference(HeapGraphEdge::kShortcut,
- parent_obj,
parent_entry,
collection_->names()->GetName(reference_name),
- child_obj,
child_entry);
}
}
@@ -2524,26 +2577,26 @@ void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
void V8HeapExplorer::SetRootGcRootsReference() {
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
- kInternalRootObject, snapshot_->root(),
- kGcRootsObject, snapshot_->gc_roots());
+ snapshot_->root()->index(),
+ snapshot_->gc_roots());
}
-void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
+void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
ASSERT(child_entry != NULL);
filler_->SetNamedAutoIndexReference(
HeapGraphEdge::kShortcut,
- kInternalRootObject, snapshot_->root(),
- child_obj, child_entry);
+ snapshot_->root()->index(),
+ child_entry);
}
void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
- kGcRootsObject, snapshot_->gc_roots(),
- GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag));
+ snapshot_->gc_roots()->index(),
+ snapshot_->gc_subroot(tag));
}
@@ -2551,21 +2604,48 @@ void V8HeapExplorer::SetGcSubrootReference(
VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
- filler_->SetIndexedAutoIndexReference(
- is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
- GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag),
- child_obj, child_entry);
+ const char* name = GetStrongGcSubrootName(child_obj);
+ if (name != NULL) {
+ filler_->SetNamedReference(
+ HeapGraphEdge::kInternal,
+ snapshot_->gc_subroot(tag)->index(),
+ name,
+ child_entry);
+ } else {
+ filler_->SetIndexedAutoIndexReference(
+ is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
+ snapshot_->gc_subroot(tag)->index(),
+ child_entry);
+ }
}
}
+const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
+ if (strong_gc_subroot_names_.is_empty()) {
+#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
+#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
+ STRONG_ROOT_LIST(ROOT_NAME)
+#undef ROOT_NAME
+#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
+ STRUCT_LIST(STRUCT_MAP_NAME)
+#undef STRUCT_MAP_NAME
+#define SYMBOL_NAME(name, str) NAME_ENTRY(name)
+ SYMBOL_LIST(SYMBOL_NAME)
+#undef SYMBOL_NAME
+#undef NAME_ENTRY
+ CHECK(!strong_gc_subroot_names_.is_empty());
+ }
+ return strong_gc_subroot_names_.GetTag(object);
+}
+
+
void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
- if (obj->IsHeapObject() &&
- !obj->IsOddball() &&
- obj != heap_->raw_unchecked_empty_byte_array() &&
- obj != heap_->raw_unchecked_empty_fixed_array() &&
- obj != heap_->raw_unchecked_empty_descriptor_array()) {
- objects_tags_.SetTag(obj, tag);
+ if (IsEssentialObject(obj)) {
+ HeapEntry* entry = GetEntry(obj);
+ if (entry->name()[0] == '\0') {
+ entry->set_name(tag);
+ }
}
}
@@ -2611,7 +2691,7 @@ void V8HeapExplorer::TagGlobalObjects() {
Handle<JSGlobalObject> global_obj = enumerator.at(i);
Object* obj_document;
if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
- obj_document->IsJSObject()) {
+ obj_document->IsJSObject()) {
JSObject* document = JSObject::cast(obj_document);
Object* obj_url;
if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
@@ -2655,8 +2735,7 @@ class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
collection_(snapshot_->collection()),
entries_type_(entries_type) {
}
- virtual HeapEntry* AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count);
+ virtual HeapEntry* AllocateEntry(HeapThing ptr);
private:
HeapSnapshot* snapshot_;
HeapSnapshotsCollection* collection_;
@@ -2664,23 +2743,19 @@ class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
};
-HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count) {
+HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
intptr_t elements = info->GetElementCount();
intptr_t size = info->GetSizeInBytes();
+ const char* name = elements != -1
+ ? collection_->names()->GetFormatted(
+ "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
+ : collection_->names()->GetCopy(info->GetLabel());
return snapshot_->AddEntry(
entries_type_,
- elements != -1 ?
- collection_->names()->GetFormatted(
- "%s / %" V8_PTR_PREFIX "d entries",
- info->GetLabel(),
- info->GetElementCount()) :
- collection_->names()->GetCopy(info->GetLabel()),
+ name,
HeapObjectsMap::GenerateId(info),
- size != -1 ? static_cast<int>(size) : 0,
- children_count,
- retainers_count);
+ size != -1 ? static_cast<int>(size) : 0);
}
@@ -2761,9 +2836,9 @@ void NativeObjectsExplorer::FillImplicitReferences() {
for (int i = 0; i < groups->length(); ++i) {
ImplicitRefGroup* group = groups->at(i);
HeapObject* parent = *group->parent_;
- HeapEntry* parent_entry =
- filler_->FindOrAddEntry(parent, native_entries_allocator_);
- ASSERT(parent_entry != NULL);
+ int parent_entry =
+ filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
+ ASSERT(parent_entry != HeapEntry::kNoEntry);
Object*** children = group->children_;
for (size_t j = 0; j < group->length_; ++j) {
Object* child = *children[j];
@@ -2771,9 +2846,9 @@ void NativeObjectsExplorer::FillImplicitReferences() {
filler_->FindOrAddEntry(child, native_entries_allocator_);
filler_->SetNamedReference(
HeapGraphEdge::kInternal,
- parent, parent_entry,
+ parent_entry,
"native",
- child, child_entry);
+ child_entry);
}
}
}
@@ -2851,8 +2926,9 @@ NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
HEAP->HashSeed());
HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
hash, true);
- if (entry->value == NULL)
+ if (entry->value == NULL) {
entry->value = new NativeGroupRetainedObjectInfo(label);
+ }
return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
}
@@ -2868,8 +2944,8 @@ void NativeObjectsExplorer::SetNativeRootReference(
filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
filler_->SetNamedAutoIndexReference(
HeapGraphEdge::kInternal,
- group_info, group_entry,
- info, child_entry);
+ group_entry->index(),
+ child_entry);
}
@@ -2881,12 +2957,12 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(info_entry != NULL);
filler_->SetNamedReference(HeapGraphEdge::kInternal,
- wrapper, wrapper_entry,
+ wrapper_entry->index(),
"native",
- info, info_entry);
+ info_entry);
filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
- info, info_entry,
- wrapper, wrapper_entry);
+ info_entry->index(),
+ wrapper_entry);
}
@@ -2901,8 +2977,8 @@ void NativeObjectsExplorer::SetRootNativeRootsReference() {
ASSERT(group_entry != NULL);
filler_->SetIndexedAutoIndexReference(
HeapGraphEdge::kElement,
- V8HeapExplorer::kInternalRootObject, snapshot_->root(),
- group_info, group_entry);
+ snapshot_->root()->index(),
+ group_entry);
}
}
@@ -2917,56 +2993,6 @@ void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
}
-class SnapshotCounter : public SnapshotFillerInterface {
- public:
- explicit SnapshotCounter(HeapEntriesMap* entries) : entries_(entries) { }
- HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
- entries_->Pair(ptr, allocator, HeapEntriesMap::kHeapEntryPlaceholder);
- return HeapEntriesMap::kHeapEntryPlaceholder;
- }
- HeapEntry* FindEntry(HeapThing ptr) {
- return entries_->Map(ptr);
- }
- HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
- HeapEntry* entry = FindEntry(ptr);
- return entry != NULL ? entry : AddEntry(ptr, allocator);
- }
- void SetIndexedReference(HeapGraphEdge::Type,
- HeapThing parent_ptr,
- HeapEntry*,
- int,
- HeapThing child_ptr,
- HeapEntry*) {
- entries_->CountReference(parent_ptr, child_ptr);
- }
- void SetIndexedAutoIndexReference(HeapGraphEdge::Type,
- HeapThing parent_ptr,
- HeapEntry*,
- HeapThing child_ptr,
- HeapEntry*) {
- entries_->CountReference(parent_ptr, child_ptr);
- }
- void SetNamedReference(HeapGraphEdge::Type,
- HeapThing parent_ptr,
- HeapEntry*,
- const char*,
- HeapThing child_ptr,
- HeapEntry*) {
- entries_->CountReference(parent_ptr, child_ptr);
- }
- void SetNamedAutoIndexReference(HeapGraphEdge::Type,
- HeapThing parent_ptr,
- HeapEntry*,
- HeapThing child_ptr,
- HeapEntry*) {
- entries_->CountReference(parent_ptr, child_ptr);
- }
-
- private:
- HeapEntriesMap* entries_;
-};
-
-
class SnapshotFiller : public SnapshotFillerInterface {
public:
explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
@@ -2974,64 +3000,48 @@ class SnapshotFiller : public SnapshotFillerInterface {
collection_(snapshot->collection()),
entries_(entries) { }
HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
- UNREACHABLE();
- return NULL;
+ HeapEntry* entry = allocator->AllocateEntry(ptr);
+ entries_->Pair(ptr, entry->index());
+ return entry;
}
HeapEntry* FindEntry(HeapThing ptr) {
- return entries_->Map(ptr);
+ int index = entries_->Map(ptr);
+ return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
}
HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
HeapEntry* entry = FindEntry(ptr);
return entry != NULL ? entry : AddEntry(ptr, allocator);
}
void SetIndexedReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
+ int parent,
int index,
- HeapThing child_ptr,
HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(
- parent_ptr, child_ptr, &child_index, &retainer_index);
- parent_entry->SetIndexedReference(
- type, child_index, index, child_entry, retainer_index);
+ HeapEntry* parent_entry = &snapshot_->entries()[parent];
+ parent_entry->SetIndexedReference(type, index, child_entry);
}
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
- HeapThing child_ptr,
+ int parent,
HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(
- parent_ptr, child_ptr, &child_index, &retainer_index);
- parent_entry->SetIndexedReference(
- type, child_index, child_index + 1, child_entry, retainer_index);
+ HeapEntry* parent_entry = &snapshot_->entries()[parent];
+ int index = parent_entry->children_count() + 1;
+ parent_entry->SetIndexedReference(type, index, child_entry);
}
void SetNamedReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
+ int parent,
const char* reference_name,
- HeapThing child_ptr,
HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(
- parent_ptr, child_ptr, &child_index, &retainer_index);
- parent_entry->SetNamedReference(
- type, child_index, reference_name, child_entry, retainer_index);
+ HeapEntry* parent_entry = &snapshot_->entries()[parent];
+ parent_entry->SetNamedReference(type, reference_name, child_entry);
}
void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
- HeapThing child_ptr,
+ int parent,
HeapEntry* child_entry) {
- int child_index, retainer_index;
- entries_->CountReference(
- parent_ptr, child_ptr, &child_index, &retainer_index);
- parent_entry->SetNamedReference(type,
- child_index,
- collection_->names()->GetName(child_index + 1),
- child_entry,
- retainer_index);
+ HeapEntry* parent_entry = &snapshot_->entries()[parent];
+ int index = parent_entry->children_count() + 1;
+ parent_entry->SetNamedReference(
+ type,
+ collection_->names()->GetName(index),
+ child_entry);
}
private:
@@ -3081,32 +3091,16 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
debug_heap->Verify();
#endif
- SetProgressTotal(2); // 2 passes.
-
-#ifdef DEBUG
- debug_heap->Verify();
-#endif
-
- // Pass 1. Iterate heap contents to count entries and references.
- if (!CountEntriesAndReferences()) return false;
+ SetProgressTotal(1); // 1 pass.
#ifdef DEBUG
debug_heap->Verify();
#endif
- // Allocate memory for entries and references.
- snapshot_->AllocateEntries(entries_.entries_count(),
- entries_.total_children_count(),
- entries_.total_retainers_count());
-
- // Allocate heap objects to entries hash map.
- entries_.AllocateEntries();
-
- // Pass 2. Fill references.
if (!FillReferences()) return false;
- if (!SetEntriesDominators()) return false;
- if (!CalculateRetainedSizes()) return false;
+ snapshot_->FillChildren();
+ snapshot_->RememberLastJSObjectId();
progress_counter_ = progress_total_;
if (!ProgressReport(true)) return false;
@@ -3134,169 +3128,18 @@ bool HeapSnapshotGenerator::ProgressReport(bool force) {
void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
if (control_ == NULL) return;
HeapIterator iterator(HeapIterator::kFilterUnreachable);
- progress_total_ = (
+ progress_total_ = iterations_count * (
v8_heap_explorer_.EstimateObjectsCount(&iterator) +
- dom_explorer_.EstimateObjectsCount()) * iterations_count;
+ dom_explorer_.EstimateObjectsCount());
progress_counter_ = 0;
}
-bool HeapSnapshotGenerator::CountEntriesAndReferences() {
- SnapshotCounter counter(&entries_);
- v8_heap_explorer_.AddRootEntries(&counter);
- return v8_heap_explorer_.IterateAndExtractReferences(&counter)
- && dom_explorer_.IterateAndExtractReferences(&counter);
-}
-
-
bool HeapSnapshotGenerator::FillReferences() {
SnapshotFiller filler(snapshot_, &entries_);
- // IterateAndExtractReferences cannot set object names because
- // it makes call to JSObject::LocalLookupRealNamedProperty which
- // in turn may relocate objects in property maps thus changing the heap
- // layout and affecting retainer counts. This is not acceptable because
- // number of retainers must not change between count and fill passes.
- // To avoid this there's a separate postpass that set object names.
+ v8_heap_explorer_.AddRootEntries(&filler);
return v8_heap_explorer_.IterateAndExtractReferences(&filler)
- && dom_explorer_.IterateAndExtractReferences(&filler)
- && v8_heap_explorer_.IterateAndSetObjectNames(&filler);
-}
-
-
-void HeapSnapshotGenerator::FillReversePostorderIndexes(
- Vector<HeapEntry*>* entries) {
- snapshot_->ClearPaint();
- int current_entry = 0;
- List<HeapEntry*> nodes_to_visit;
- nodes_to_visit.Add(snapshot_->root());
- snapshot_->root()->paint();
- while (!nodes_to_visit.is_empty()) {
- HeapEntry* entry = nodes_to_visit.last();
- Vector<HeapGraphEdge> children = entry->children();
- bool has_new_edges = false;
- for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* child = children[i].to();
- if (!child->painted()) {
- nodes_to_visit.Add(child);
- child->paint();
- has_new_edges = true;
- }
- }
- if (!has_new_edges) {
- entry->set_ordered_index(current_entry);
- (*entries)[current_entry++] = entry;
- nodes_to_visit.RemoveLast();
- }
- }
- ASSERT_EQ(current_entry, entries->length());
-}
-
-
-static int Intersect(int i1, int i2, const Vector<int>& dominators) {
- int finger1 = i1, finger2 = i2;
- while (finger1 != finger2) {
- while (finger1 < finger2) finger1 = dominators[finger1];
- while (finger2 < finger1) finger2 = dominators[finger2];
- }
- return finger1;
-}
-
-
-// The algorithm is based on the article:
-// K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
-// Softw. Pract. Exper. 4 (2001), pp. 1-10.
-bool HeapSnapshotGenerator::BuildDominatorTree(
- const Vector<HeapEntry*>& entries,
- Vector<int>* dominators) {
- if (entries.length() == 0) return true;
- const int entries_length = entries.length(), root_index = entries_length - 1;
- static const int kNoDominator = -1;
- for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
- (*dominators)[root_index] = root_index;
-
- // The affected array is used to mark entries which dominators
- // have to be racalculated because of changes in their retainers.
- ScopedVector<bool> affected(entries_length);
- for (int i = 0; i < affected.length(); ++i) affected[i] = false;
- // Mark the root direct children as affected.
- Vector<HeapGraphEdge> children = entries[root_index]->children();
- for (int i = 0; i < children.length(); ++i) {
- affected[children[i].to()->ordered_index()] = true;
- }
-
- bool changed = true;
- while (changed) {
- changed = false;
- if (!ProgressReport(true)) return false;
- for (int i = root_index - 1; i >= 0; --i) {
- if (!affected[i]) continue;
- affected[i] = false;
- // If dominator of the entry has already been set to root,
- // then it can't propagate any further.
- if ((*dominators)[i] == root_index) continue;
- int new_idom_index = kNoDominator;
- Vector<HeapGraphEdge*> rets = entries[i]->retainers();
- for (int j = 0; j < rets.length(); ++j) {
- if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
- int ret_index = rets[j]->From()->ordered_index();
- if (dominators->at(ret_index) != kNoDominator) {
- new_idom_index = new_idom_index == kNoDominator
- ? ret_index
- : Intersect(ret_index, new_idom_index, *dominators);
- // If idom has already reached the root, it doesn't make sense
- // to check other retainers.
- if (new_idom_index == root_index) break;
- }
- }
- if (new_idom_index != kNoDominator
- && dominators->at(i) != new_idom_index) {
- (*dominators)[i] = new_idom_index;
- changed = true;
- Vector<HeapGraphEdge> children = entries[i]->children();
- for (int j = 0; j < children.length(); ++j) {
- affected[children[j].to()->ordered_index()] = true;
- }
- }
- }
- }
- return true;
-}
-
-
-bool HeapSnapshotGenerator::SetEntriesDominators() {
- // This array is used for maintaining reverse postorder of nodes.
- ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries()->length());
- FillReversePostorderIndexes(&ordered_entries);
- ScopedVector<int> dominators(ordered_entries.length());
- if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
- for (int i = 0; i < ordered_entries.length(); ++i) {
- ASSERT(dominators[i] >= 0);
- ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
- }
- return true;
-}
-
-
-bool HeapSnapshotGenerator::CalculateRetainedSizes() {
- // As for the dominators tree we only know parent nodes, not
- // children, to sum up total sizes we "bubble" node's self size
- // adding it to all of its parents.
- List<HeapEntry*>& entries = *snapshot_->entries();
- for (int i = 0; i < entries.length(); ++i) {
- HeapEntry* entry = entries[i];
- entry->set_retained_size(entry->self_size());
- }
- for (int i = 0; i < entries.length(); ++i) {
- HeapEntry* entry = entries[i];
- int entry_size = entry->self_size();
- for (HeapEntry* dominator = entry->dominator();
- dominator != entry;
- entry = dominator, dominator = entry->dominator()) {
- dominator->add_retained_size(entry_size);
- }
- }
- return true;
+ && dom_explorer_.IterateAndExtractReferences(&filler);
}
@@ -3345,9 +3188,7 @@ class OutputStreamWriter {
MaybeWriteChunk();
}
}
- void AddNumber(int n) { AddNumberImpl<int>(n, "%d"); }
void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
- void AddNumber(uint64_t n) { AddNumberImpl<uint64_t>(n, "%llu"); }
void Finalize() {
if (aborted_) return;
ASSERT(chunk_pos_ < chunk_size_);
@@ -3398,20 +3239,23 @@ class OutputStreamWriter {
};
+// type, name|index, to_node.
+const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
+// type, name, id, self_size, children_index.
+const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
+
void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
ASSERT(writer_ == NULL);
writer_ = new OutputStreamWriter(stream);
HeapSnapshot* original_snapshot = NULL;
- if (snapshot_->raw_entries_size() >=
+ if (snapshot_->RawSnapshotSize() >=
SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
// The snapshot is too big. Serialize a fake snapshot.
original_snapshot = snapshot_;
snapshot_ = CreateFakeSnapshot();
}
- // Since nodes graph is cyclic, we need the first pass to enumerate
- // them. Strings can be serialized in one pass.
- EnumerateNodes();
+
SerializeImpl();
delete writer_;
@@ -3429,23 +3273,22 @@ HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
HeapSnapshot::kFull,
snapshot_->title(),
snapshot_->uid());
- result->AllocateEntries(2, 1, 0);
- HeapEntry* root = result->AddRootEntry(1);
+ result->AddRootEntry();
const char* text = snapshot_->collection()->names()->GetFormatted(
"The snapshot is too big. "
"Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
"Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
- (snapshot_->raw_entries_size() + MB - 1) / MB);
- HeapEntry* message = result->AddEntry(
- HeapEntry::kString, text, 0, 4, 0, 0);
- root->SetUnidirElementReference(0, 1, message);
- result->SetDominatorsToSelf();
+ (snapshot_->RawSnapshotSize() + MB - 1) / MB);
+ HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
+ result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
+ result->FillChildren();
return result;
}
void HeapSnapshotJSONSerializer::SerializeImpl() {
+ ASSERT(0 == snapshot_->root()->index());
writer_->AddCharacter('{');
writer_->AddString("\"snapshot\":{");
SerializeSnapshot();
@@ -3455,6 +3298,10 @@ void HeapSnapshotJSONSerializer::SerializeImpl() {
SerializeNodes();
if (writer_->aborted()) return;
writer_->AddString("],\n");
+ writer_->AddString("\"edges\":[");
+ SerializeEdges();
+ if (writer_->aborted()) return;
+ writer_->AddString("],\n");
writer_->AddString("\"strings\":[");
SerializeStrings();
if (writer_->aborted()) return;
@@ -3464,34 +3311,6 @@ void HeapSnapshotJSONSerializer::SerializeImpl() {
}
-class HeapSnapshotJSONSerializerEnumerator {
- public:
- explicit HeapSnapshotJSONSerializerEnumerator(HeapSnapshotJSONSerializer* s)
- : s_(s) {
- }
- void Apply(HeapEntry** entry) {
- s_->GetNodeId(*entry);
- }
- private:
- HeapSnapshotJSONSerializer* s_;
-};
-
-void HeapSnapshotJSONSerializer::EnumerateNodes() {
- GetNodeId(snapshot_->root()); // Make sure root gets the first id.
- HeapSnapshotJSONSerializerEnumerator iter(this);
- snapshot_->IterateEntries(&iter);
-}
-
-
-int HeapSnapshotJSONSerializer::GetNodeId(HeapEntry* entry) {
- HashMap::Entry* cache_entry = nodes_.Lookup(entry, ObjectHash(entry), true);
- if (cache_entry->value == NULL) {
- cache_entry->value = reinterpret_cast<void*>(next_node_id_++);
- }
- return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
-}
-
-
int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
HashMap::Entry* cache_entry = strings_.Lookup(
const_cast<char*>(s), ObjectHash(s), true);
@@ -3502,134 +3321,88 @@ int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
}
-void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
- // The buffer needs space for 3 ints, 3 commas and \0
+static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
+ int number_of_digits = 0;
+ unsigned t = value;
+ do {
+ ++number_of_digits;
+ } while (t /= 10);
+
+ buffer_pos += number_of_digits;
+ int result = buffer_pos;
+ do {
+ int last_digit = value % 10;
+ buffer[--buffer_pos] = '0' + last_digit;
+ value /= 10;
+ } while (value);
+ return result;
+}
+
+
+void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
+ bool first_edge) {
+ // The buffer needs space for 3 unsigned ints, 3 commas and \0
static const int kBufferSize =
- MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1; // NOLINT
+ MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 1; // NOLINT
EmbeddedVector<char, kBufferSize> buffer;
int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
|| edge->type() == HeapGraphEdge::kHidden
|| edge->type() == HeapGraphEdge::kWeak
? edge->index() : GetStringId(edge->name());
- STATIC_CHECK(sizeof(int) == sizeof(edge->type())); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(edge_name_or_index)); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(edge->to()))); // NOLINT
- int result = OS::SNPrintF(buffer, ",%d,%d,%d",
- edge->type(), edge_name_or_index, GetNodeId(edge->to()));
- USE(result);
- ASSERT(result != -1);
+ int buffer_pos = 0;
+ if (!first_edge) {
+ buffer[buffer_pos++] = ',';
+ }
+ buffer_pos = utoa(edge->type(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
+ buffer[buffer_pos++] = '\0';
writer_->AddString(buffer.start());
}
+void HeapSnapshotJSONSerializer::SerializeEdges() {
+ List<HeapGraphEdge*>& edges = snapshot_->children();
+ for (int i = 0; i < edges.length(); ++i) {
+ ASSERT(i == 0 ||
+ edges[i - 1]->from()->index() <= edges[i]->from()->index());
+ SerializeEdge(edges[i], i == 0);
+ if (writer_->aborted()) return;
+ }
+}
+
+
void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
- // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
+ // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
static const int kBufferSize =
- 6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned // NOLINT
- + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned // NOLINT
- + 7 + 1 + 1;
+ 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
+ + 5 + 1 + 1;
EmbeddedVector<char, kBufferSize> buffer;
- Vector<HeapGraphEdge> children = entry->children();
- STATIC_CHECK(sizeof(int) == sizeof(entry->type())); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(GetStringId(entry->name()))); // NOLINT
- STATIC_CHECK(sizeof(unsigned) == sizeof(entry->id())); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(entry->self_size())); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(entry->retained_size())); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(GetNodeId(entry->dominator()))); // NOLINT
- STATIC_CHECK(sizeof(int) == sizeof(children.length())); // NOLINT
- int result = OS::SNPrintF(buffer, "\n,%d,%d,%u,%d,%d,%d,%d",
- entry->type(),
- GetStringId(entry->name()),
- entry->id(),
- entry->self_size(),
- entry->retained_size(),
- GetNodeId(entry->dominator()),
- children.length());
- USE(result);
- ASSERT(result != -1);
+ int buffer_pos = 0;
+ if (entry_index(entry) != 0) {
+ buffer[buffer_pos++] = ',';
+ }
+ buffer_pos = utoa(entry->type(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(entry->id(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
+ buffer[buffer_pos++] = '\n';
+ buffer[buffer_pos++] = '\0';
writer_->AddString(buffer.start());
- for (int i = 0; i < children.length(); ++i) {
- SerializeEdge(&children[i]);
- if (writer_->aborted()) return;
- }
}
void HeapSnapshotJSONSerializer::SerializeNodes() {
- // The first (zero) item of nodes array is an object describing node
- // serialization layout. We use a set of macros to improve
- // readability.
-#define JSON_A(s) "["s"]"
-#define JSON_O(s) "{"s"}"
-#define JSON_S(s) "\""s"\""
- writer_->AddString(JSON_O(
- JSON_S("fields") ":" JSON_A(
- JSON_S("type")
- "," JSON_S("name")
- "," JSON_S("id")
- "," JSON_S("self_size")
- "," JSON_S("retained_size")
- "," JSON_S("dominator")
- "," JSON_S("children_count")
- "," JSON_S("children"))
- "," JSON_S("types") ":" JSON_A(
- JSON_A(
- JSON_S("hidden")
- "," JSON_S("array")
- "," JSON_S("string")
- "," JSON_S("object")
- "," JSON_S("code")
- "," JSON_S("closure")
- "," JSON_S("regexp")
- "," JSON_S("number")
- "," JSON_S("native")
- "," JSON_S("synthetic"))
- "," JSON_S("string")
- "," JSON_S("number")
- "," JSON_S("number")
- "," JSON_S("number")
- "," JSON_S("number")
- "," JSON_S("number")
- "," JSON_O(
- JSON_S("fields") ":" JSON_A(
- JSON_S("type")
- "," JSON_S("name_or_index")
- "," JSON_S("to_node"))
- "," JSON_S("types") ":" JSON_A(
- JSON_A(
- JSON_S("context")
- "," JSON_S("element")
- "," JSON_S("property")
- "," JSON_S("internal")
- "," JSON_S("hidden")
- "," JSON_S("shortcut")
- "," JSON_S("weak"))
- "," JSON_S("string_or_number")
- "," JSON_S("node"))))));
-#undef JSON_S
-#undef JSON_O
-#undef JSON_A
-
- const int node_fields_count = 7;
- // type,name,id,self_size,retained_size,dominator,children_count.
- const int edge_fields_count = 3; // type,name|index,to_node.
- List<HashMap::Entry*> sorted_nodes;
- SortHashMap(&nodes_, &sorted_nodes);
- // Rewrite node ids, so they refer to actual array positions.
- if (sorted_nodes.length() > 1) {
- // Nodes start from array index 1.
- int prev_value = 1;
- sorted_nodes[0]->value = reinterpret_cast<void*>(prev_value);
- for (int i = 1; i < sorted_nodes.length(); ++i) {
- HeapEntry* prev_heap_entry =
- reinterpret_cast<HeapEntry*>(sorted_nodes[i-1]->key);
- prev_value += node_fields_count +
- prev_heap_entry->children().length() * edge_fields_count;
- sorted_nodes[i]->value = reinterpret_cast<void*>(prev_value);
- }
- }
- for (int i = 0; i < sorted_nodes.length(); ++i) {
- SerializeNode(reinterpret_cast<HeapEntry*>(sorted_nodes[i]->key));
+ List<HeapEntry>& entries = snapshot_->entries();
+ for (int i = 0; i < entries.length(); ++i) {
+ SerializeNode(&entries[i]);
if (writer_->aborted()) return;
}
}
@@ -3641,6 +3414,59 @@ void HeapSnapshotJSONSerializer::SerializeSnapshot() {
writer_->AddString("\"");
writer_->AddString(",\"uid\":");
writer_->AddNumber(snapshot_->uid());
+ writer_->AddString(",\"meta\":");
+ // The object describing node serialization layout.
+ // We use a set of macros to improve readability.
+#define JSON_A(s) "[" s "]"
+#define JSON_O(s) "{" s "}"
+#define JSON_S(s) "\"" s "\""
+ writer_->AddString(JSON_O(
+ JSON_S("node_fields") ":" JSON_A(
+ JSON_S("type") ","
+ JSON_S("name") ","
+ JSON_S("id") ","
+ JSON_S("self_size") ","
+ JSON_S("edge_count")) ","
+ JSON_S("node_types") ":" JSON_A(
+ JSON_A(
+ JSON_S("hidden") ","
+ JSON_S("array") ","
+ JSON_S("string") ","
+ JSON_S("object") ","
+ JSON_S("code") ","
+ JSON_S("closure") ","
+ JSON_S("regexp") ","
+ JSON_S("number") ","
+ JSON_S("native") ","
+ JSON_S("synthetic")) ","
+ JSON_S("string") ","
+ JSON_S("number") ","
+ JSON_S("number") ","
+ JSON_S("number") ","
+ JSON_S("number") ","
+ JSON_S("number")) ","
+ JSON_S("edge_fields") ":" JSON_A(
+ JSON_S("type") ","
+ JSON_S("name_or_index") ","
+ JSON_S("to_node")) ","
+ JSON_S("edge_types") ":" JSON_A(
+ JSON_A(
+ JSON_S("context") ","
+ JSON_S("element") ","
+ JSON_S("property") ","
+ JSON_S("internal") ","
+ JSON_S("hidden") ","
+ JSON_S("shortcut") ","
+ JSON_S("weak")) ","
+ JSON_S("string_or_number") ","
+ JSON_S("node"))));
+#undef JSON_S
+#undef JSON_O
+#undef JSON_A
+ writer_->AddString(",\"node_count\":");
+ writer_->AddNumber(snapshot_->entries().length());
+ writer_->AddString(",\"edge_count\":");
+ writer_->AddNumber(snapshot_->edges().length());
}
diff --git a/deps/v8/src/profile-generator.h b/deps/v8/src/profile-generator.h
index d9a1319b87..d56d874705 100644
--- a/deps/v8/src/profile-generator.h
+++ b/deps/v8/src/profile-generator.h
@@ -35,8 +35,6 @@
namespace v8 {
namespace internal {
-typedef uint32_t SnapshotObjectId;
-
class TokenEnumerator {
public:
TokenEnumerator();
@@ -74,6 +72,7 @@ class StringsStorage {
const char* GetName(int index);
inline const char* GetFunctionName(String* name);
inline const char* GetFunctionName(const char* name);
+ size_t GetUsedMemorySize() const;
private:
static const int kMaxNameSize = 1024;
@@ -448,6 +447,7 @@ class ProfileGenerator {
class HeapEntry;
+class HeapSnapshot;
class HeapGraphEdge BASE_EMBEDDED {
public:
@@ -462,60 +462,45 @@ class HeapGraphEdge BASE_EMBEDDED {
};
HeapGraphEdge() { }
- void Init(int child_index, Type type, const char* name, HeapEntry* to);
- void Init(int child_index, Type type, int index, HeapEntry* to);
- void Init(int child_index, int index, HeapEntry* to);
+ HeapGraphEdge(Type type, const char* name, int from, int to);
+ HeapGraphEdge(Type type, int index, int from, int to);
+ void ReplaceToIndexWithEntry(HeapSnapshot* snapshot);
- Type type() { return static_cast<Type>(type_); }
- int index() {
+ Type type() const { return static_cast<Type>(type_); }
+ int index() const {
ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak);
return index_;
}
- const char* name() {
+ const char* name() const {
ASSERT(type_ == kContextVariable
- || type_ == kProperty
- || type_ == kInternal
- || type_ == kShortcut);
+ || type_ == kProperty
+ || type_ == kInternal
+ || type_ == kShortcut);
return name_;
}
- HeapEntry* to() { return to_; }
-
- HeapEntry* From();
+ INLINE(HeapEntry* from() const);
+ HeapEntry* to() const { return to_entry_; }
private:
- int child_index_ : 29;
+ INLINE(HeapSnapshot* snapshot() const);
+
unsigned type_ : 3;
+ int from_index_ : 29;
+ union {
+ // During entries population |to_index_| is used for storing the index,
+ // afterwards it is replaced with a pointer to the entry.
+ int to_index_;
+ HeapEntry* to_entry_;
+ };
union {
int index_;
const char* name_;
};
- HeapEntry* to_;
-
- DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge);
};
-class HeapSnapshot;
-
// HeapEntry instances represent an entity from the heap (or a special
-// virtual node, e.g. root). To make heap snapshots more compact,
-// HeapEntries has a special memory layout (no Vectors or Lists used):
-//
-// +-----------------+
-// HeapEntry
-// +-----------------+
-// HeapGraphEdge |
-// ... } children_count
-// HeapGraphEdge |
-// +-----------------+
-// HeapGraphEdge* |
-// ... } retainers_count
-// HeapGraphEdge* |
-// +-----------------+
-//
-// In a HeapSnapshot, all entries are hand-allocated in a continuous array
-// of raw bytes.
-//
+// virtual node, e.g. root).
class HeapEntry BASE_EMBEDDED {
public:
enum Type {
@@ -530,15 +515,14 @@ class HeapEntry BASE_EMBEDDED {
kNative = v8::HeapGraphNode::kNative,
kSynthetic = v8::HeapGraphNode::kSynthetic
};
+ static const int kNoEntry;
HeapEntry() { }
- void Init(HeapSnapshot* snapshot,
+ HeapEntry(HeapSnapshot* snapshot,
Type type,
const char* name,
SnapshotObjectId id,
- int self_size,
- int children_count,
- int retainers_count);
+ int self_size);
HeapSnapshot* snapshot() { return snapshot_; }
Type type() { return static_cast<Type>(type_); }
@@ -546,74 +530,36 @@ class HeapEntry BASE_EMBEDDED {
void set_name(const char* name) { name_ = name; }
inline SnapshotObjectId id() { return id_; }
int self_size() { return self_size_; }
- int retained_size() { return retained_size_; }
- void add_retained_size(int size) { retained_size_ += size; }
- void set_retained_size(int value) { retained_size_ = value; }
- int ordered_index() { return ordered_index_; }
- void set_ordered_index(int value) { ordered_index_ = value; }
-
- Vector<HeapGraphEdge> children() {
- return Vector<HeapGraphEdge>(children_arr(), children_count_); }
- Vector<HeapGraphEdge*> retainers() {
- return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); }
- HeapEntry* dominator() { return dominator_; }
- void set_dominator(HeapEntry* entry) {
- ASSERT(entry != NULL);
- dominator_ = entry;
+ INLINE(int index() const);
+ int children_count() const { return children_count_; }
+ INLINE(int set_children_index(int index));
+ void add_child(HeapGraphEdge* edge) {
+ children_arr()[children_count_++] = edge;
}
- void clear_paint() { painted_ = false; }
- bool painted() { return painted_; }
- void paint() { painted_ = true; }
+ Vector<HeapGraphEdge*> children() {
+ return Vector<HeapGraphEdge*>(children_arr(), children_count_); }
- void SetIndexedReference(HeapGraphEdge::Type type,
- int child_index,
- int index,
- HeapEntry* entry,
- int retainer_index);
- void SetNamedReference(HeapGraphEdge::Type type,
- int child_index,
- const char* name,
- HeapEntry* entry,
- int retainer_index);
- void SetUnidirElementReference(int child_index, int index, HeapEntry* entry);
-
- size_t EntrySize() {
- return EntriesSize(1, children_count_, retainers_count_);
- }
+ void SetIndexedReference(
+ HeapGraphEdge::Type type, int index, HeapEntry* entry);
+ void SetNamedReference(
+ HeapGraphEdge::Type type, const char* name, HeapEntry* entry);
void Print(
const char* prefix, const char* edge_name, int max_depth, int indent);
Handle<HeapObject> GetHeapObject();
- static size_t EntriesSize(int entries_count,
- int children_count,
- int retainers_count);
-
private:
- HeapGraphEdge* children_arr() {
- return reinterpret_cast<HeapGraphEdge*>(this + 1);
- }
- HeapGraphEdge** retainers_arr() {
- return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_);
- }
+ INLINE(HeapGraphEdge** children_arr());
const char* TypeAsString();
- unsigned painted_: 1;
unsigned type_: 4;
- int children_count_: 27;
- int retainers_count_;
+ int children_count_: 28;
+ int children_index_;
int self_size_;
- union {
- int ordered_index_; // Used during dominator tree building.
- int retained_size_; // At that moment, there is no retained size yet.
- };
SnapshotObjectId id_;
- HeapEntry* dominator_;
HeapSnapshot* snapshot_;
const char* name_;
-
- DISALLOW_COPY_AND_ASSIGN(HeapEntry);
};
@@ -634,59 +580,56 @@ class HeapSnapshot {
Type type,
const char* title,
unsigned uid);
- ~HeapSnapshot();
void Delete();
HeapSnapshotsCollection* collection() { return collection_; }
Type type() { return type_; }
const char* title() { return title_; }
unsigned uid() { return uid_; }
- HeapEntry* root() { return root_entry_; }
- HeapEntry* gc_roots() { return gc_roots_entry_; }
- HeapEntry* natives_root() { return natives_root_entry_; }
- HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; }
- List<HeapEntry*>* entries() { return &entries_; }
- size_t raw_entries_size() { return raw_entries_size_; }
-
- void AllocateEntries(
- int entries_count, int children_count, int retainers_count);
+ size_t RawSnapshotSize() const;
+ HeapEntry* root() { return &entries_[root_index_]; }
+ HeapEntry* gc_roots() { return &entries_[gc_roots_index_]; }
+ HeapEntry* natives_root() { return &entries_[natives_root_index_]; }
+ HeapEntry* gc_subroot(int index) {
+ return &entries_[gc_subroot_indexes_[index]];
+ }
+ List<HeapEntry>& entries() { return entries_; }
+ List<HeapGraphEdge>& edges() { return edges_; }
+ List<HeapGraphEdge*>& children() { return children_; }
+ void RememberLastJSObjectId();
+ SnapshotObjectId max_snapshot_js_object_id() const {
+ return max_snapshot_js_object_id_;
+ }
+
HeapEntry* AddEntry(HeapEntry::Type type,
const char* name,
SnapshotObjectId id,
- int size,
- int children_count,
- int retainers_count);
- HeapEntry* AddRootEntry(int children_count);
- HeapEntry* AddGcRootsEntry(int children_count, int retainers_count);
- HeapEntry* AddGcSubrootEntry(int tag,
- int children_count,
- int retainers_count);
- HeapEntry* AddNativesRootEntry(int children_count, int retainers_count);
- void ClearPaint();
+ int size);
+ HeapEntry* AddRootEntry();
+ HeapEntry* AddGcRootsEntry();
+ HeapEntry* AddGcSubrootEntry(int tag);
+ HeapEntry* AddNativesRootEntry();
HeapEntry* GetEntryById(SnapshotObjectId id);
List<HeapEntry*>* GetSortedEntriesList();
- template<class Visitor>
- void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); }
- void SetDominatorsToSelf();
+ void FillChildren();
void Print(int max_depth);
void PrintEntriesSize();
private:
- HeapEntry* GetNextEntryToInit();
-
HeapSnapshotsCollection* collection_;
Type type_;
const char* title_;
unsigned uid_;
- HeapEntry* root_entry_;
- HeapEntry* gc_roots_entry_;
- HeapEntry* natives_root_entry_;
- HeapEntry* gc_subroot_entries_[VisitorSynchronization::kNumberOfSyncTags];
- char* raw_entries_;
- List<HeapEntry*> entries_;
- bool entries_sorted_;
- size_t raw_entries_size_;
+ int root_index_;
+ int gc_roots_index_;
+ int natives_root_index_;
+ int gc_subroot_indexes_[VisitorSynchronization::kNumberOfSyncTags];
+ List<HeapEntry> entries_;
+ List<HeapGraphEdge> edges_;
+ List<HeapGraphEdge*> children_;
+ List<HeapEntry*> sorted_entries_;
+ SnapshotObjectId max_snapshot_js_object_id_;
friend class HeapSnapshotTester;
@@ -697,11 +640,18 @@ class HeapSnapshot {
class HeapObjectsMap {
public:
HeapObjectsMap();
- ~HeapObjectsMap();
void SnapshotGenerationFinished();
- SnapshotObjectId FindObject(Address addr);
+ SnapshotObjectId FindEntry(Address addr);
+ SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size);
void MoveObject(Address from, Address to);
+ SnapshotObjectId last_assigned_id() const {
+ return next_id_ - kObjectIdStep;
+ }
+
+ void StopHeapObjectsTracking();
+ SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
+ size_t GetUsedMemorySize() const;
static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
static inline SnapshotObjectId GetNthGcSubrootId(int delta);
@@ -715,16 +665,23 @@ class HeapObjectsMap {
private:
struct EntryInfo {
- explicit EntryInfo(SnapshotObjectId id) : id(id), accessed(true) { }
- EntryInfo(SnapshotObjectId id, bool accessed)
- : id(id),
- accessed(accessed) { }
+ EntryInfo(SnapshotObjectId id, Address addr, unsigned int size)
+ : id(id), addr(addr), size(size), accessed(true) { }
+ EntryInfo(SnapshotObjectId id, Address addr, unsigned int size, bool accessed)
+ : id(id), addr(addr), size(size), accessed(accessed) { }
SnapshotObjectId id;
+ Address addr;
+ unsigned int size;
bool accessed;
};
+ struct TimeInterval {
+ explicit TimeInterval(SnapshotObjectId id) : id(id), size(0), count(0) { }
+ SnapshotObjectId id;
+ uint32_t size;
+ uint32_t count;
+ };
- void AddEntry(Address addr, SnapshotObjectId id);
- SnapshotObjectId FindEntry(Address addr);
+ void UpdateHeapObjectsMap();
void RemoveDeadEntries();
static bool AddressesMatch(void* key1, void* key2) {
@@ -737,10 +694,10 @@ class HeapObjectsMap {
v8::internal::kZeroHashSeed);
}
- bool initial_fill_mode_;
SnapshotObjectId next_id_;
HashMap entries_map_;
- List<EntryInfo>* entries_;
+ List<EntryInfo> entries_;
+ List<TimeInterval> time_intervals_;
DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
};
@@ -752,6 +709,11 @@ class HeapSnapshotsCollection {
~HeapSnapshotsCollection();
bool is_tracking_objects() { return is_tracking_objects_; }
+ SnapshotObjectId PushHeapObjectsStats(OutputStream* stream) {
+ return ids_.PushHeapObjectsStats(stream);
+ }
+ void StartHeapObjectsTracking() { is_tracking_objects_ = true; }
+ void StopHeapObjectsTracking() { ids_.StopHeapObjectsTracking(); }
HeapSnapshot* NewSnapshot(
HeapSnapshot::Type type, const char* name, unsigned uid);
@@ -763,9 +725,18 @@ class HeapSnapshotsCollection {
StringsStorage* names() { return &names_; }
TokenEnumerator* token_enumerator() { return token_enumerator_; }
- SnapshotObjectId GetObjectId(Address addr) { return ids_.FindObject(addr); }
+ SnapshotObjectId FindObjectId(Address object_addr) {
+ return ids_.FindEntry(object_addr);
+ }
+ SnapshotObjectId GetObjectId(Address object_addr, int object_size) {
+ return ids_.FindOrAddEntry(object_addr, object_size);
+ }
Handle<HeapObject> FindHeapObjectById(SnapshotObjectId id);
void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
+ SnapshotObjectId last_assigned_id() const {
+ return ids_.last_assigned_id();
+ }
+ size_t GetUsedMemorySize() const;
private:
INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) {
@@ -794,8 +765,7 @@ typedef void* HeapThing;
class HeapEntriesAllocator {
public:
virtual ~HeapEntriesAllocator() { }
- virtual HeapEntry* AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count) = 0;
+ virtual HeapEntry* AllocateEntry(HeapThing ptr) = 0;
};
@@ -804,35 +774,11 @@ class HeapEntriesAllocator {
class HeapEntriesMap {
public:
HeapEntriesMap();
- ~HeapEntriesMap();
-
- void AllocateEntries();
- HeapEntry* Map(HeapThing thing);
- void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry);
- void CountReference(HeapThing from, HeapThing to,
- int* prev_children_count = NULL,
- int* prev_retainers_count = NULL);
- int entries_count() { return entries_count_; }
- int total_children_count() { return total_children_count_; }
- int total_retainers_count() { return total_retainers_count_; }
-
- static HeapEntry* const kHeapEntryPlaceholder;
+ int Map(HeapThing thing);
+ void Pair(HeapThing thing, int entry);
private:
- struct EntryInfo {
- EntryInfo(HeapEntry* entry, HeapEntriesAllocator* allocator)
- : entry(entry),
- allocator(allocator),
- children_count(0),
- retainers_count(0) {
- }
- HeapEntry* entry;
- HeapEntriesAllocator* allocator;
- int children_count;
- int retainers_count;
- };
-
static uint32_t Hash(HeapThing thing) {
return ComputeIntegerHash(
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)),
@@ -843,9 +789,6 @@ class HeapEntriesMap {
}
HashMap entries_;
- int entries_count_;
- int total_children_count_;
- int total_retainers_count_;
friend class HeapObjectsSet;
@@ -861,6 +804,7 @@ class HeapObjectsSet {
void Insert(Object* obj);
const char* GetTag(Object* obj);
void SetTag(Object* obj, const char* tag);
+ bool is_empty() const { return entries_.occupancy() == 0; }
private:
HashMap entries_;
@@ -879,26 +823,18 @@ class SnapshotFillerInterface {
virtual HeapEntry* FindOrAddEntry(HeapThing ptr,
HeapEntriesAllocator* allocator) = 0;
virtual void SetIndexedReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
+ int parent_entry,
int index,
- HeapThing child_ptr,
HeapEntry* child_entry) = 0;
virtual void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
- HeapThing child_ptr,
+ int parent_entry,
HeapEntry* child_entry) = 0;
virtual void SetNamedReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
+ int parent_entry,
const char* reference_name,
- HeapThing child_ptr,
HeapEntry* child_entry) = 0;
virtual void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
- HeapThing parent_ptr,
- HeapEntry* parent_entry,
- HeapThing child_ptr,
+ int parent_entry,
HeapEntry* child_entry) = 0;
};
@@ -917,12 +853,10 @@ class V8HeapExplorer : public HeapEntriesAllocator {
V8HeapExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress);
virtual ~V8HeapExplorer();
- virtual HeapEntry* AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count);
+ virtual HeapEntry* AllocateEntry(HeapThing ptr);
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount(HeapIterator* iterator);
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
- bool IterateAndSetObjectNames(SnapshotFillerInterface* filler);
void TagGlobalObjects();
static String* GetConstructorName(JSObject* object);
@@ -930,66 +864,77 @@ class V8HeapExplorer : public HeapEntriesAllocator {
static HeapObject* const kInternalRootObject;
private:
- HeapEntry* AddEntry(
- HeapObject* object, int children_count, int retainers_count);
+ HeapEntry* AddEntry(HeapObject* object);
HeapEntry* AddEntry(HeapObject* object,
HeapEntry::Type type,
- const char* name,
- int children_count,
- int retainers_count);
+ const char* name);
const char* GetSystemEntryName(HeapObject* object);
+
void ExtractReferences(HeapObject* obj);
- void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry);
- void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry);
- void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry);
- void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry);
+ void ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy);
+ void ExtractJSObjectReferences(int entry, JSObject* js_obj);
+ void ExtractStringReferences(int entry, String* obj);
+ void ExtractContextReferences(int entry, Context* context);
+ void ExtractMapReferences(int entry, Map* map);
+ void ExtractSharedFunctionInfoReferences(int entry,
+ SharedFunctionInfo* shared);
+ void ExtractScriptReferences(int entry, Script* script);
+ void ExtractCodeCacheReferences(int entry, CodeCache* code_cache);
+ void ExtractCodeReferences(int entry, Code* code);
+ void ExtractJSGlobalPropertyCellReferences(int entry,
+ JSGlobalPropertyCell* cell);
+ void ExtractClosureReferences(JSObject* js_obj, int entry);
+ void ExtractPropertyReferences(JSObject* js_obj, int entry);
+ void ExtractElementReferences(JSObject* js_obj, int entry);
+ void ExtractInternalReferences(JSObject* js_obj, int entry);
+ bool IsEssentialObject(Object* object);
void SetClosureReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
String* reference_name,
Object* child);
void SetNativeBindReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
const char* reference_name,
Object* child);
void SetElementReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
int index,
Object* child);
void SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
const char* reference_name,
Object* child,
int field_offset = -1);
void SetInternalReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
int index,
Object* child,
int field_offset = -1);
void SetHiddenReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
int index,
Object* child);
void SetWeakReference(HeapObject* parent_obj,
- HeapEntry* parent_entry,
+ int parent,
int index,
Object* child_obj,
int field_offset);
void SetPropertyReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
String* reference_name,
Object* child,
const char* name_format_string = NULL,
int field_offset = -1);
void SetPropertyShortcutReference(HeapObject* parent_obj,
- HeapEntry* parent,
+ int parent,
String* reference_name,
Object* child);
- void SetRootShortcutReference(Object* child);
+ void SetUserGlobalReference(Object* user_global);
void SetRootGcRootsReference();
void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
void SetGcSubrootReference(
VisitorSynchronization::SyncTag tag, bool is_weak, Object* child);
- void SetObjectName(HeapObject* object);
+ const char* GetStrongGcSubrootName(Object* object);
void TagObject(Object* obj, const char* tag);
HeapEntry* GetEntry(Object* obj);
@@ -1003,6 +948,7 @@ class V8HeapExplorer : public HeapEntriesAllocator {
SnapshottingProgressReportingInterface* progress_;
SnapshotFillerInterface* filler_;
HeapObjectsSet objects_tags_;
+ HeapObjectsSet strong_gc_subroot_names_;
static HeapObject* const kGcRootsObject;
static HeapObject* const kFirstGcSubrootObject;
@@ -1083,15 +1029,9 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
bool GenerateSnapshot();
private:
- bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
- Vector<int>* dominators);
- bool CalculateRetainedSizes();
- bool CountEntriesAndReferences();
bool FillReferences();
- void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
void ProgressStep();
bool ProgressReport(bool force = false);
- bool SetEntriesDominators();
void SetProgressTotal(int iterations_count);
HeapSnapshot* snapshot_;
@@ -1113,7 +1053,6 @@ class HeapSnapshotJSONSerializer {
public:
explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot)
: snapshot_(snapshot),
- nodes_(ObjectsMatch),
strings_(ObjectsMatch),
next_node_id_(1),
next_string_id_(1),
@@ -1132,11 +1071,11 @@ class HeapSnapshotJSONSerializer {
v8::internal::kZeroHashSeed);
}
- void EnumerateNodes();
HeapSnapshot* CreateFakeSnapshot();
- int GetNodeId(HeapEntry* entry);
int GetStringId(const char* s);
- void SerializeEdge(HeapGraphEdge* edge);
+ int entry_index(HeapEntry* e) { return e->index() * kNodeFieldsCount; }
+ void SerializeEdge(HeapGraphEdge* edge, bool first_edge);
+ void SerializeEdges();
void SerializeImpl();
void SerializeNode(HeapEntry* entry);
void SerializeNodes();
@@ -1145,10 +1084,10 @@ class HeapSnapshotJSONSerializer {
void SerializeStrings();
void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries);
- static const int kMaxSerializableSnapshotRawSize;
+ static const int kEdgeFieldsCount;
+ static const int kNodeFieldsCount;
HeapSnapshot* snapshot_;
- HashMap nodes_;
HashMap strings_;
int next_node_id_;
int next_string_id_;
diff --git a/deps/v8/src/property-details.h b/deps/v8/src/property-details.h
index c79aa969d3..a623fe9b1a 100644
--- a/deps/v8/src/property-details.h
+++ b/deps/v8/src/property-details.h
@@ -63,9 +63,8 @@ enum PropertyType {
INTERCEPTOR = 5, // only in lookup results, not in descriptors
// All properties before MAP_TRANSITION are real.
MAP_TRANSITION = 6, // only in fast mode
- ELEMENTS_TRANSITION = 7,
- CONSTANT_TRANSITION = 8, // only in fast mode
- NULL_DESCRIPTOR = 9, // only in fast mode
+ CONSTANT_TRANSITION = 7, // only in fast mode
+ NULL_DESCRIPTOR = 8, // only in fast mode
// There are no IC stubs for NULL_DESCRIPTORS. Therefore,
// NULL_DESCRIPTOR can be used as the type flag for IC stubs for
// nonexistent properties.
diff --git a/deps/v8/src/property.cc b/deps/v8/src/property.cc
index 78f237d6c7..8c69541be5 100644
--- a/deps/v8/src/property.cc
+++ b/deps/v8/src/property.cc
@@ -61,12 +61,6 @@ void LookupResult::Print(FILE* out) {
GetTransitionMap()->Print(out);
PrintF(out, "\n");
break;
- case ELEMENTS_TRANSITION:
- PrintF(out, " -type = elements transition\n");
- PrintF(out, " -map:\n");
- GetTransitionMap()->Print(out);
- PrintF(out, "\n");
- break;
case CONSTANT_FUNCTION:
PrintF(out, " -type = constant function\n");
PrintF(out, " -function:\n");
@@ -118,7 +112,6 @@ bool Descriptor::ContainsTransition() {
switch (details_.type()) {
case MAP_TRANSITION:
case CONSTANT_TRANSITION:
- case ELEMENTS_TRANSITION:
return true;
case CALLBACKS: {
if (!value_->IsAccessorPair()) return false;
diff --git a/deps/v8/src/property.h b/deps/v8/src/property.h
index 04f78b22d4..aa851f1c88 100644
--- a/deps/v8/src/property.h
+++ b/deps/v8/src/property.h
@@ -111,14 +111,6 @@ class MapTransitionDescriptor: public Descriptor {
: Descriptor(key, map, attributes, MAP_TRANSITION) { }
};
-class ElementsTransitionDescriptor: public Descriptor {
- public:
- ElementsTransitionDescriptor(String* key,
- Object* map_or_array)
- : Descriptor(key, map_or_array, PropertyDetails(NONE,
- ELEMENTS_TRANSITION)) { }
-};
-
// Marks a field name in a map so that adding the field is guaranteed
// to create a FIELD descriptor in the new map. Used after adding
// a constant function the first time, creating a CONSTANT_FUNCTION
@@ -180,7 +172,6 @@ bool IsPropertyDescriptor(T* desc) {
AccessorPair::cast(callback_object)->ContainsAccessor());
}
case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
return false;
@@ -214,13 +205,6 @@ class LookupResult BASE_EMBEDDED {
number_ = number;
}
- void DescriptorResult(JSObject* holder, Smi* details, int number) {
- lookup_type_ = DESCRIPTOR_TYPE;
- holder_ = holder;
- details_ = PropertyDetails(details);
- number_ = number;
- }
-
void ConstantResult(JSObject* holder) {
lookup_type_ = CONSTANT_TYPE;
holder_ = holder;
@@ -318,7 +302,6 @@ class LookupResult BASE_EMBEDDED {
Map* GetTransitionMap() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
ASSERT(type() == MAP_TRANSITION ||
- type() == ELEMENTS_TRANSITION ||
type() == CONSTANT_TRANSITION);
return Map::cast(GetValue());
}
diff --git a/deps/v8/src/regexp-macro-assembler-irregexp-inl.h b/deps/v8/src/regexp-macro-assembler-irregexp-inl.h
index f2a4e851f7..a767ec0089 100644
--- a/deps/v8/src/regexp-macro-assembler-irregexp-inl.h
+++ b/deps/v8/src/regexp-macro-assembler-irregexp-inl.h
@@ -62,6 +62,16 @@ void RegExpMacroAssemblerIrregexp::Emit16(uint32_t word) {
}
+void RegExpMacroAssemblerIrregexp::Emit8(uint32_t word) {
+ ASSERT(pc_ <= buffer_.length());
+ if (pc_ == buffer_.length()) {
+ Expand();
+ }
+ *reinterpret_cast<unsigned char*>(buffer_.start() + pc_) = word;
+ pc_ += 1;
+}
+
+
void RegExpMacroAssemblerIrregexp::Emit32(uint32_t word) {
ASSERT(pc_ <= buffer_.length());
if (pc_ + 3 >= buffer_.length()) {
diff --git a/deps/v8/src/regexp-macro-assembler-irregexp.cc b/deps/v8/src/regexp-macro-assembler-irregexp.cc
index 322efa1365..d2cd22e9ad 100644
--- a/deps/v8/src/regexp-macro-assembler-irregexp.cc
+++ b/deps/v8/src/regexp-macro-assembler-irregexp.cc
@@ -203,8 +203,9 @@ void RegExpMacroAssemblerIrregexp::PushBacktrack(Label* l) {
}
-void RegExpMacroAssemblerIrregexp::Succeed() {
+bool RegExpMacroAssemblerIrregexp::Succeed() {
Emit(BC_SUCCEED, 0);
+ return false; // Restart matching for global regexp not supported.
}
@@ -352,6 +353,42 @@ void RegExpMacroAssemblerIrregexp::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerIrregexp::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ Emit(BC_CHECK_CHAR_IN_RANGE, 0);
+ Emit16(from);
+ Emit16(to);
+ EmitOrLink(on_in_range);
+}
+
+
+void RegExpMacroAssemblerIrregexp::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ Emit(BC_CHECK_CHAR_NOT_IN_RANGE, 0);
+ Emit16(from);
+ Emit16(to);
+ EmitOrLink(on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerIrregexp::CheckBitInTable(
+ Handle<ByteArray> table, Label* on_bit_set) {
+ Emit(BC_CHECK_BIT_IN_TABLE, 0);
+ EmitOrLink(on_bit_set);
+ for (int i = 0; i < kTableSize; i += kBitsPerByte) {
+ int byte = 0;
+ for (int j = 0; j < kBitsPerByte; j++) {
+ if (table->get(i + j) != 0) byte |= 1 << j;
+ }
+ Emit8(byte);
+ }
+}
+
+
void RegExpMacroAssemblerIrregexp::CheckNotBackReference(int start_reg,
Label* on_not_equal) {
ASSERT(start_reg >= 0);
@@ -371,17 +408,6 @@ void RegExpMacroAssemblerIrregexp::CheckNotBackReferenceIgnoreCase(
}
-void RegExpMacroAssemblerIrregexp::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- ASSERT(reg1 >= 0);
- ASSERT(reg1 <= kMaxRegister);
- Emit(BC_CHECK_NOT_REGS_EQUAL, reg1);
- Emit32(reg2);
- EmitOrLink(on_not_equal);
-}
-
-
void RegExpMacroAssemblerIrregexp::CheckCharacters(
Vector<const uc16> str,
int cp_offset,
diff --git a/deps/v8/src/regexp-macro-assembler-irregexp.h b/deps/v8/src/regexp-macro-assembler-irregexp.h
index 262ead297c..7232342dc5 100644
--- a/deps/v8/src/regexp-macro-assembler-irregexp.h
+++ b/deps/v8/src/regexp-macro-assembler-irregexp.h
@@ -1,4 +1,4 @@
-// Copyright 2008-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -59,7 +59,7 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
virtual void Backtrack();
virtual void GoTo(Label* label);
virtual void PushBacktrack(Label* label);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void Fail();
virtual void PopRegister(int register_index);
virtual void PushRegister(int register_index,
@@ -93,10 +93,16 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckCharacters(Vector<const uc16> str,
int cp_offset,
Label* on_failure,
@@ -114,6 +120,7 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
inline void EmitOrLink(Label* label);
inline void Emit32(uint32_t x);
inline void Emit16(uint32_t x);
+ inline void Emit8(uint32_t x);
inline void Emit(uint32_t bc, uint32_t arg);
// Bytecode buffer.
int length();
diff --git a/deps/v8/src/regexp-macro-assembler-tracer.cc b/deps/v8/src/regexp-macro-assembler-tracer.cc
index f8432784f2..f878e8c460 100644
--- a/deps/v8/src/regexp-macro-assembler-tracer.cc
+++ b/deps/v8/src/regexp-macro-assembler-tracer.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,6 +35,7 @@ namespace internal {
RegExpMacroAssemblerTracer::RegExpMacroAssemblerTracer(
RegExpMacroAssembler* assembler) :
+ RegExpMacroAssembler(assembler->zone()),
assembler_(assembler) {
unsigned int type = assembler->Implementation();
ASSERT(type < 5);
@@ -102,14 +103,15 @@ void RegExpMacroAssemblerTracer::PushBacktrack(Label* label) {
}
-void RegExpMacroAssemblerTracer::Succeed() {
- PrintF(" Succeed();\n");
- assembler_->Succeed();
+bool RegExpMacroAssemblerTracer::Succeed() {
+ bool restart = assembler_->Succeed();
+ PrintF(" Succeed();%s\n", restart ? " [restart for global match]" : "");
+ return restart;
}
void RegExpMacroAssemblerTracer::Fail() {
- PrintF(" Fail();\n");
+ PrintF(" Fail();");
assembler_->Fail();
}
@@ -198,24 +200,55 @@ void RegExpMacroAssemblerTracer::LoadCurrentCharacter(int cp_offset,
}
+class PrintablePrinter {
+ public:
+ explicit PrintablePrinter(uc16 character) : character_(character) { }
+
+ const char* operator*() {
+ if (character_ >= ' ' && character_ <= '~') {
+ buffer_[0] = '(';
+ buffer_[1] = static_cast<char>(character_);
+ buffer_[2] = ')';
+ buffer_[3] = '\0';
+ } else {
+ buffer_[0] = '\0';
+ }
+ return &buffer_[0];
+ };
+
+ private:
+ uc16 character_;
+ char buffer_[4];
+};
+
+
void RegExpMacroAssemblerTracer::CheckCharacterLT(uc16 limit, Label* on_less) {
- PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n",
- limit, LabelToInt(on_less));
+ PrintablePrinter printable(limit);
+ PrintF(" CheckCharacterLT(c=0x%04x%s, label[%08x]);\n",
+ limit,
+ *printable,
+ LabelToInt(on_less));
assembler_->CheckCharacterLT(limit, on_less);
}
void RegExpMacroAssemblerTracer::CheckCharacterGT(uc16 limit,
Label* on_greater) {
- PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n",
- limit, LabelToInt(on_greater));
+ PrintablePrinter printable(limit);
+ PrintF(" CheckCharacterGT(c=0x%04x%s, label[%08x]);\n",
+ limit,
+ *printable,
+ LabelToInt(on_greater));
assembler_->CheckCharacterGT(limit, on_greater);
}
void RegExpMacroAssemblerTracer::CheckCharacter(unsigned c, Label* on_equal) {
- PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n",
- c, LabelToInt(on_equal));
+ PrintablePrinter printable(c);
+ PrintF(" CheckCharacter(c=0x%04x%s, label[%08x]);\n",
+ c,
+ *printable,
+ LabelToInt(on_equal));
assembler_->CheckCharacter(c, on_equal);
}
@@ -234,8 +267,11 @@ void RegExpMacroAssemblerTracer::CheckNotAtStart(Label* on_not_at_start) {
void RegExpMacroAssemblerTracer::CheckNotCharacter(unsigned c,
Label* on_not_equal) {
- PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n",
- c, LabelToInt(on_not_equal));
+ PrintablePrinter printable(c);
+ PrintF(" CheckNotCharacter(c=0x%04x%s, label[%08x]);\n",
+ c,
+ *printable,
+ LabelToInt(on_not_equal));
assembler_->CheckNotCharacter(c, on_not_equal);
}
@@ -244,8 +280,10 @@ void RegExpMacroAssemblerTracer::CheckCharacterAfterAnd(
unsigned c,
unsigned mask,
Label* on_equal) {
- PrintF(" CheckCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
+ PrintablePrinter printable(c);
+ PrintF(" CheckCharacterAfterAnd(c=0x%04x%s, mask=0x%04x, label[%08x]);\n",
c,
+ *printable,
mask,
LabelToInt(on_equal));
assembler_->CheckCharacterAfterAnd(c, mask, on_equal);
@@ -256,8 +294,10 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterAnd(
unsigned c,
unsigned mask,
Label* on_not_equal) {
- PrintF(" CheckNotCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
+ PrintablePrinter printable(c);
+ PrintF(" CheckNotCharacterAfterAnd(c=0x%04x%s, mask=0x%04x, label[%08x]);\n",
c,
+ *printable,
mask,
LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterAnd(c, mask, on_not_equal);
@@ -269,7 +309,7 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- PrintF(" CheckNotCharacterAfterMinusAnd(c='u%04x', minus=%04x, mask=0x%04x, "
+ PrintF(" CheckNotCharacterAfterMinusAnd(c=0x%04x, minus=%04x, mask=0x%04x, "
"label[%08x]);\n",
c,
minus,
@@ -279,6 +319,53 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerTracer::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ PrintablePrinter printable_from(from);
+ PrintablePrinter printable_to(to);
+ PrintF(" CheckCharacterInRange(from=0x%04x%s, to=0x%04x%s, label[%08x]);\n",
+ from,
+ *printable_from,
+ to,
+ *printable_to,
+ LabelToInt(on_not_in_range));
+ assembler_->CheckCharacterInRange(from, to, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerTracer::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ PrintablePrinter printable_from(from);
+ PrintablePrinter printable_to(to);
+ PrintF(
+ " CheckCharacterNotInRange(from=0x%04x%s," " to=%04x%s, label[%08x]);\n",
+ from,
+ *printable_from,
+ to,
+ *printable_to,
+ LabelToInt(on_in_range));
+ assembler_->CheckCharacterNotInRange(from, to, on_in_range);
+}
+
+
+void RegExpMacroAssemblerTracer::CheckBitInTable(
+ Handle<ByteArray> table, Label* on_bit_set) {
+ PrintF(" CheckBitInTable(label[%08x] ", LabelToInt(on_bit_set));
+ for (int i = 0; i < kTableSize; i++) {
+ PrintF("%c", table->get(i) != 0 ? 'X' : '.');
+ if (i % 32 == 31 && i != kTableMask) {
+ PrintF("\n ");
+ }
+ }
+ PrintF(");\n");
+ assembler_->CheckBitInTable(table, on_bit_set);
+}
+
+
void RegExpMacroAssemblerTracer::CheckNotBackReference(int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReference(register=%d, label[%08x]);\n", start_reg,
@@ -296,17 +383,6 @@ void RegExpMacroAssemblerTracer::CheckNotBackReferenceIgnoreCase(
}
-void RegExpMacroAssemblerTracer::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- PrintF(" CheckNotRegistersEqual(reg1=%d, reg2=%d, label[%08x]);\n",
- reg1,
- reg2,
- LabelToInt(on_not_equal));
- assembler_->CheckNotRegistersEqual(reg1, reg2, on_not_equal);
-}
-
-
void RegExpMacroAssemblerTracer::CheckCharacters(Vector<const uc16> str,
int cp_offset,
Label* on_failure,
@@ -314,7 +390,7 @@ void RegExpMacroAssemblerTracer::CheckCharacters(Vector<const uc16> str,
PrintF(" %s(str=\"",
check_end_of_string ? "CheckCharacters" : "CheckCharactersUnchecked");
for (int i = 0; i < str.length(); i++) {
- PrintF("u%04x", str[i]);
+ PrintF("0x%04x", str[i]);
}
PrintF("\", cp_offset=%d, label[%08x])\n",
cp_offset, LabelToInt(on_failure));
diff --git a/deps/v8/src/regexp-macro-assembler-tracer.h b/deps/v8/src/regexp-macro-assembler-tracer.h
index 1cf0349d86..ac262df76f 100644
--- a/deps/v8/src/regexp-macro-assembler-tracer.h
+++ b/deps/v8/src/regexp-macro-assembler-tracer.h
@@ -59,7 +59,6 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(unsigned c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned and_with,
@@ -68,6 +67,13 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
uc16 minus,
uc16 and_with,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
@@ -91,7 +97,7 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
diff --git a/deps/v8/src/regexp-macro-assembler.cc b/deps/v8/src/regexp-macro-assembler.cc
index b6fb3c5214..a4719b53fc 100644
--- a/deps/v8/src/regexp-macro-assembler.cc
+++ b/deps/v8/src/regexp-macro-assembler.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,7 +35,10 @@
namespace v8 {
namespace internal {
-RegExpMacroAssembler::RegExpMacroAssembler() : slow_safe_compiler_(false) {
+RegExpMacroAssembler::RegExpMacroAssembler(Zone* zone)
+ : slow_safe_compiler_(false),
+ global_mode_(NOT_GLOBAL),
+ zone_(zone) {
}
@@ -54,8 +57,8 @@ bool RegExpMacroAssembler::CanReadUnaligned() {
#ifndef V8_INTERPRETED_REGEXP // Avoid unused code, e.g., on ARM.
-NativeRegExpMacroAssembler::NativeRegExpMacroAssembler()
- : RegExpMacroAssembler() {
+NativeRegExpMacroAssembler::NativeRegExpMacroAssembler(Zone* zone)
+ : RegExpMacroAssembler(zone) {
}
@@ -149,6 +152,7 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Match(
input_start,
input_end,
offsets_vector,
+ offsets_vector_length,
isolate);
return res;
}
@@ -161,6 +165,7 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
const byte* input_start,
const byte* input_end,
int* output,
+ int output_size,
Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
// Ensure that the minimum stack has been allocated.
@@ -174,10 +179,10 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
input_start,
input_end,
output,
+ output_size,
stack_base,
direct_call,
isolate);
- ASSERT(result <= SUCCESS);
ASSERT(result >= RETRY);
if (result == EXCEPTION && !isolate->has_pending_exception()) {
diff --git a/deps/v8/src/regexp-macro-assembler.h b/deps/v8/src/regexp-macro-assembler.h
index 0314c707c6..bcf36735c4 100644
--- a/deps/v8/src/regexp-macro-assembler.h
+++ b/deps/v8/src/regexp-macro-assembler.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,6 +45,11 @@ class RegExpMacroAssembler {
static const int kMaxRegister = (1 << 16) - 1;
static const int kMaxCPOffset = (1 << 15) - 1;
static const int kMinCPOffset = -(1 << 15);
+
+ static const int kTableSizeBits = 7;
+ static const int kTableSize = 1 << kTableSizeBits;
+ static const int kTableMask = kTableSize - 1;
+
enum IrregexpImplementation {
kIA32Implementation,
kARMImplementation,
@@ -58,7 +63,7 @@ class RegExpMacroAssembler {
kCheckStackLimit = true
};
- RegExpMacroAssembler();
+ explicit RegExpMacroAssembler(Zone* zone);
virtual ~RegExpMacroAssembler();
// The maximal number of pushes between stack checks. Users must supply
// kCheckStackLimit flag to push operations (instead of kNoStackLimitCheck)
@@ -106,15 +111,22 @@ class RegExpMacroAssembler {
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned and_with,
Label* on_not_equal) = 0;
- // Subtract a constant from the current character, then or with the given
+ // Subtract a constant from the current character, then and with the given
// constant and then check for a match with c.
virtual void CheckNotCharacterAfterMinusAnd(uc16 c,
uc16 minus,
uc16 and_with,
Label* on_not_equal) = 0;
- virtual void CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) = 0;
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to, // Both inclusive.
+ Label* on_in_range) = 0;
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to, // Both inclusive.
+ Label* on_not_in_range) = 0;
+
+ // The current character (modulus the kTableSize) is looked up in the byte
+ // array, and if the found byte is non-zero, we jump to the on_bit_set label.
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set) = 0;
// Checks whether the given offset from the current position is before
// the end of the string. May overwrite the current character.
@@ -158,7 +170,8 @@ class RegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg) = 0;
virtual void SetCurrentPositionFromEnd(int by) = 0;
virtual void SetRegister(int register_index, int to) = 0;
- virtual void Succeed() = 0;
+ // Return whether the matching (with a global regexp) will be restarted.
+ virtual bool Succeed() = 0;
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset) = 0;
virtual void ClearRegisters(int reg_from, int reg_to) = 0;
virtual void WriteStackPointerToRegister(int reg) = 0;
@@ -167,8 +180,21 @@ class RegExpMacroAssembler {
void set_slow_safe(bool ssc) { slow_safe_compiler_ = ssc; }
bool slow_safe() { return slow_safe_compiler_; }
+ enum GlobalMode { NOT_GLOBAL, GLOBAL, GLOBAL_NO_ZERO_LENGTH_CHECK };
+ // Set whether the regular expression has the global flag. Exiting due to
+ // a failure in a global regexp may still mean success overall.
+ inline void set_global_mode(GlobalMode mode) { global_mode_ = mode; }
+ inline bool global() { return global_mode_ != NOT_GLOBAL; }
+ inline bool global_with_zero_length_check() {
+ return global_mode_ == GLOBAL;
+ }
+
+ Zone* zone() const { return zone_; }
+
private:
bool slow_safe_compiler_;
+ bool global_mode_;
+ Zone* zone_;
};
@@ -190,7 +216,7 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
// capture positions.
enum Result { RETRY = -2, EXCEPTION = -1, FAILURE = 0, SUCCESS = 1 };
- NativeRegExpMacroAssembler();
+ explicit NativeRegExpMacroAssembler(Zone* zone);
virtual ~NativeRegExpMacroAssembler();
virtual bool CanReadUnaligned();
@@ -233,6 +259,7 @@ class NativeRegExpMacroAssembler: public RegExpMacroAssembler {
const byte* input_start,
const byte* input_end,
int* output,
+ int output_size,
Isolate* isolate);
};
diff --git a/deps/v8/src/regexp.js b/deps/v8/src/regexp.js
index eb617eae42..38090397aa 100644
--- a/deps/v8/src/regexp.js
+++ b/deps/v8/src/regexp.js
@@ -278,11 +278,11 @@ function TrimRegExp(regexp) {
function RegExpToString() {
- // If this.source is an empty string, output /(?:)/.
- // http://bugzilla.mozilla.org/show_bug.cgi?id=225550
- // ecma_2/RegExp/properties-001.js.
- var src = this.source ? this.source : '(?:)';
- var result = '/' + src + '/';
+ if (!IS_REGEXP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['RegExp.prototype.toString', this]);
+ }
+ var result = '/' + this.source + '/';
if (this.global) result += 'g';
if (this.ignoreCase) result += 'i';
if (this.multiline) result += 'm';
@@ -296,7 +296,7 @@ function RegExpToString() {
// of the last successful match.
function RegExpGetLastMatch() {
if (lastMatchInfoOverride !== null) {
- return lastMatchInfoOverride[0];
+ return OVERRIDE_MATCH(lastMatchInfoOverride);
}
var regExpSubject = LAST_SUBJECT(lastMatchInfo);
return SubString(regExpSubject,
@@ -334,8 +334,8 @@ function RegExpGetLeftContext() {
subject = LAST_SUBJECT(lastMatchInfo);
} else {
var override = lastMatchInfoOverride;
- start_index = override[override.length - 2];
- subject = override[override.length - 1];
+ start_index = OVERRIDE_POS(override);
+ subject = OVERRIDE_SUBJECT(override);
}
return SubString(subject, 0, start_index);
}
@@ -349,9 +349,9 @@ function RegExpGetRightContext() {
subject = LAST_SUBJECT(lastMatchInfo);
} else {
var override = lastMatchInfoOverride;
- subject = override[override.length - 1];
- var pattern = override[override.length - 3];
- start_index = override[override.length - 2] + pattern.length;
+ subject = OVERRIDE_SUBJECT(override);
+ var match = OVERRIDE_MATCH(override);
+ start_index = OVERRIDE_POS(override) + match.length;
}
return SubString(subject, start_index, subject.length);
}
@@ -363,7 +363,9 @@ function RegExpGetRightContext() {
function RegExpMakeCaptureGetter(n) {
return function() {
if (lastMatchInfoOverride) {
- if (n < lastMatchInfoOverride.length - 2) return lastMatchInfoOverride[n];
+ if (n < lastMatchInfoOverride.length - 2) {
+ return OVERRIDE_CAPTURE(lastMatchInfoOverride, n);
+ }
return '';
}
var index = n * 2;
@@ -425,6 +427,7 @@ function SetUpRegExp() {
LAST_INPUT(lastMatchInfo) = ToString(string);
};
+ %OptimizeObjectForAddingMultipleProperties($RegExp, 22);
%DefineOrRedefineAccessorProperty($RegExp, 'input', RegExpGetInput,
RegExpSetInput, DONT_DELETE);
%DefineOrRedefineAccessorProperty($RegExp, '$_', RegExpGetInput,
@@ -479,6 +482,7 @@ function SetUpRegExp() {
RegExpMakeCaptureGetter(i), NoOpSetter,
DONT_DELETE);
}
+ %ToFastProperties($RegExp);
}
SetUpRegExp();
diff --git a/deps/v8/src/rewriter.cc b/deps/v8/src/rewriter.cc
index 55f93ee0d7..3fcd603fff 100644
--- a/deps/v8/src/rewriter.cc
+++ b/deps/v8/src/rewriter.cc
@@ -111,7 +111,7 @@ void Processor::VisitBlock(Block* node) {
void Processor::VisitExpressionStatement(ExpressionStatement* node) {
// Rewrite : <x>; -> .result = <x>;
- if (!is_set_) {
+ if (!is_set_ && !node->expression()->IsThrow()) {
node->set_expression(SetResult(node->expression()));
if (!in_try_) is_set_ = true;
}
@@ -262,7 +262,7 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy);
result_statement->set_statement_pos(position);
- body->Add(result_statement);
+ body->Add(result_statement, info->isolate()->zone());
}
}
diff --git a/deps/v8/src/runtime-profiler.cc b/deps/v8/src/runtime-profiler.cc
index b06168a246..003b882f36 100644
--- a/deps/v8/src/runtime-profiler.cc
+++ b/deps/v8/src/runtime-profiler.cc
@@ -65,13 +65,20 @@ static const int kSizeLimit = 1500;
// Number of times a function has to be seen on the stack before it is
// optimized.
static const int kProfilerTicksBeforeOptimization = 2;
+// If the function optimization was disabled due to high deoptimization count,
+// but the function is hot and has been seen on the stack this number of times,
+// then we try to reenable optimization for this function.
+static const int kProfilerTicksBeforeReenablingOptimization = 250;
// If a function does not have enough type info (according to
// FLAG_type_info_threshold), but has seen a huge number of ticks,
// optimize it as it is.
static const int kTicksWhenNotEnoughTypeInfo = 100;
// We only have one byte to store the number of ticks.
+STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
+STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
+
// Maximum size in bytes of generated code for a function to be optimized
// the very first time it is seen on the stack.
static const int kMaxSizeEarlyOpt = 500;
@@ -94,12 +101,14 @@ RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
sampler_ticks_until_threshold_adjustment_(
kSamplerTicksBetweenThresholdAdjustment),
- sampler_window_position_(0) {
+ sampler_window_position_(0),
+ any_ic_changed_(false),
+ code_generated_(false) {
ClearSampleBuffer();
}
-void RuntimeProfiler::GlobalSetup() {
+void RuntimeProfiler::GlobalSetUp() {
ASSERT(!has_been_globally_set_up_);
enabled_ = V8::UseCrankshaft() && FLAG_opt;
#ifdef DEBUG
@@ -179,14 +188,10 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
// prepared to generate it, but we don't expect to have to.
bool found_code = false;
Code* stack_check_code = NULL;
-#if defined(V8_TARGET_ARCH_IA32) || \
- defined(V8_TARGET_ARCH_ARM) || \
- defined(V8_TARGET_ARCH_MIPS)
if (FLAG_count_based_interrupts) {
InterruptStub interrupt_stub;
found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
} else // NOLINT
-#endif
{ // NOLINT
StackCheckStub check_stub;
found_code = check_stub.FindCodeInCache(&stack_check_code);
@@ -265,7 +270,9 @@ void RuntimeProfiler::OptimizeNow() {
}
}
- Code* shared_code = function->shared()->code();
+ SharedFunctionInfo* shared = function->shared();
+ Code* shared_code = shared->code();
+
if (shared_code->kind() != Code::FUNCTION) continue;
if (function->IsMarkedForLazyRecompilation()) {
@@ -275,20 +282,34 @@ void RuntimeProfiler::OptimizeNow() {
shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
}
- // Do not record non-optimizable functions.
- if (!function->IsOptimizable()) continue;
- if (function->shared()->optimization_disabled()) continue;
-
// Only record top-level code on top of the execution stack and
// avoid optimizing excessively large scripts since top-level code
// will be executed only once.
const int kMaxToplevelSourceSize = 10 * 1024;
- if (function->shared()->is_toplevel()
- && (frame_count > 1
- || function->shared()->SourceSize() > kMaxToplevelSourceSize)) {
+ if (shared->is_toplevel() &&
+ (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
continue;
}
+ // Do not record non-optimizable functions.
+ if (shared->optimization_disabled()) {
+ if (shared->deopt_count() >= Compiler::kDefaultMaxOptCount) {
+ // If optimization was disabled due to many deoptimizations,
+ // then check if the function is hot and try to reenable optimization.
+ int ticks = shared_code->profiler_ticks();
+ if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
+ shared_code->set_profiler_ticks(0);
+ shared->TryReenableOptimization();
+ } else {
+ shared_code->set_profiler_ticks(ticks + 1);
+ }
+ }
+ continue;
+ }
+ if (!function->IsOptimizable()) continue;
+
+
+
if (FLAG_watch_ic_patching) {
int ticks = shared_code->profiler_ticks();
@@ -315,14 +336,6 @@ void RuntimeProfiler::OptimizeNow() {
// If no IC was patched since the last tick and this function is very
// small, optimistically optimize it now.
Optimize(function, "small function");
- } else if (!code_generated_ &&
- !any_ic_changed_ &&
- total_code_generated_ > 0 &&
- total_code_generated_ < 2000) {
- // If no code was generated and no IC was patched since the last tick,
- // but a little code has already been generated since last Reset(),
- // then type info might already be stable and we can optimize now.
- Optimize(function, "stable on startup");
} else {
shared_code->set_profiler_ticks(ticks + 1);
}
@@ -343,7 +356,6 @@ void RuntimeProfiler::OptimizeNow() {
}
if (FLAG_watch_ic_patching) {
any_ic_changed_ = false;
- code_generated_ = false;
} else { // !FLAG_watch_ic_patching
// Add the collected functions as samples. It's important not to do
// this as part of collecting them because this will interfere with
@@ -356,11 +368,7 @@ void RuntimeProfiler::OptimizeNow() {
void RuntimeProfiler::NotifyTick() {
-#if defined(V8_TARGET_ARCH_IA32) || \
- defined(V8_TARGET_ARCH_ARM) || \
- defined(V8_TARGET_ARCH_MIPS)
if (FLAG_count_based_interrupts) return;
-#endif
isolate_->stack_guard()->RequestRuntimeProfilerTick();
}
@@ -377,9 +385,7 @@ void RuntimeProfiler::SetUp() {
void RuntimeProfiler::Reset() {
- if (FLAG_watch_ic_patching) {
- total_code_generated_ = 0;
- } else { // !FLAG_watch_ic_patching
+ if (!FLAG_watch_ic_patching) {
sampler_threshold_ = kSamplerThresholdInit;
sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
sampler_ticks_until_threshold_adjustment_ =
diff --git a/deps/v8/src/runtime-profiler.h b/deps/v8/src/runtime-profiler.h
index e3388492cb..ab6cb378ea 100644
--- a/deps/v8/src/runtime-profiler.h
+++ b/deps/v8/src/runtime-profiler.h
@@ -43,7 +43,7 @@ class RuntimeProfiler {
public:
explicit RuntimeProfiler(Isolate* isolate);
- static void GlobalSetup();
+ static void GlobalSetUp();
static inline bool IsEnabled() {
ASSERT(has_been_globally_set_up_);
@@ -63,13 +63,6 @@ class RuntimeProfiler {
void NotifyICChanged() { any_ic_changed_ = true; }
- void NotifyCodeGenerated(int generated_code_size) {
- if (FLAG_watch_ic_patching) {
- code_generated_ = true;
- total_code_generated_ += generated_code_size;
- }
- }
-
// Rate limiting support.
// VM thread interface.
@@ -130,7 +123,6 @@ class RuntimeProfiler {
bool any_ic_changed_;
bool code_generated_;
- int total_code_generated_;
// Possible state values:
// -1 => the profiler thread is waiting on the semaphore
diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc
index 5996b82673..9e389492ec 100644
--- a/deps/v8/src/runtime.cc
+++ b/deps/v8/src/runtime.cc
@@ -208,8 +208,10 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
// Pixel elements cannot be created using an object literal.
ASSERT(!copy->HasExternalArrayElements());
switch (copy->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
FixedArray* elements = FixedArray::cast(copy->elements());
if (elements->map() == heap->fixed_cow_array_map()) {
isolate->counters()->cow_arrays_created_runtime()->Increment();
@@ -223,7 +225,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
Object* value = elements->get(i);
ASSERT(value->IsSmi() ||
value->IsTheHole() ||
- (copy->GetElementsKind() == FAST_ELEMENTS));
+ (IsFastObjectElementsKind(copy->GetElementsKind())));
if (value->IsJSObject()) {
JSObject* js_object = JSObject::cast(value);
{ MaybeObject* maybe_result = DeepCopyBoilerplate(isolate,
@@ -268,6 +270,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// No contained objects, nothing to do.
break;
}
@@ -452,7 +455,7 @@ MaybeObject* TransitionElements(Handle<Object> object,
}
-static const int kSmiOnlyLiteralMinimumLength = 1024;
+static const int kSmiLiteralMinimumLength = 1024;
Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
@@ -470,23 +473,22 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(elements->get(1)));
+ ASSERT(IsFastElementsKind(constant_elements_kind));
Context* global_context = isolate->context()->global_context();
- if (constant_elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- object->set_map(Map::cast(global_context->smi_js_array_map()));
- } else if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
- object->set_map(Map::cast(global_context->double_js_array_map()));
- } else {
- object->set_map(Map::cast(global_context->object_js_array_map()));
- }
+ Object* maybe_maps_array = global_context->js_array_maps();
+ ASSERT(!maybe_maps_array->IsUndefined());
+ Object* maybe_map = FixedArray::cast(maybe_maps_array)->get(
+ constant_elements_kind);
+ ASSERT(maybe_map->IsMap());
+ object->set_map(Map::cast(maybe_map));
Handle<FixedArrayBase> copied_elements_values;
- if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastDoubleElementsKind(constant_elements_kind)) {
ASSERT(FLAG_smi_only_arrays);
copied_elements_values = isolate->factory()->CopyFixedDoubleArray(
Handle<FixedDoubleArray>::cast(constant_elements_values));
} else {
- ASSERT(constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- constant_elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind));
const bool is_cow =
(constant_elements_values->map() ==
isolate->heap()->fixed_cow_array_map());
@@ -522,15 +524,22 @@ Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
object->set_elements(*copied_elements_values);
object->set_length(Smi::FromInt(copied_elements_values->length()));
- // Ensure that the boilerplate object has FAST_ELEMENTS, unless the flag is
+ // Ensure that the boilerplate object has FAST_*_ELEMENTS, unless the flag is
// on or the object is larger than the threshold.
if (!FLAG_smi_only_arrays &&
- constant_elements_values->length() < kSmiOnlyLiteralMinimumLength) {
- if (object->GetElementsKind() != FAST_ELEMENTS) {
- CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure());
+ constant_elements_values->length() < kSmiLiteralMinimumLength) {
+ ElementsKind elements_kind = object->GetElementsKind();
+ if (!IsFastObjectElementsKind(elements_kind)) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ CHECK(!TransitionElements(object, FAST_HOLEY_ELEMENTS,
+ isolate)->IsFailure());
+ } else {
+ CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure());
+ }
}
}
+ object->ValidateElements();
return object;
}
@@ -1227,7 +1236,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) {
if (needs_access_checks) {
// Copy map so it won't interfere constructor's initial map.
Object* new_map;
- { MaybeObject* maybe_new_map = old_map->CopyDropTransitions();
+ { MaybeObject* maybe_new_map =
+ old_map->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
}
@@ -1245,7 +1255,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_EnableAccessChecks) {
if (!old_map->is_access_check_needed()) {
// Copy map so it won't interfere constructor's initial map.
Object* new_map;
- { MaybeObject* maybe_new_map = old_map->CopyDropTransitions();
+ { MaybeObject* maybe_new_map =
+ old_map->CopyDropTransitions(DescriptorArray::MAY_BE_SHARED);
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
}
@@ -1289,90 +1300,79 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
// We have to declare a global const property. To capture we only
// assign to it when evaluating the assignment for "const x =
// <expr>" the initial value is the hole.
- bool is_const_property = value->IsTheHole();
- bool is_function_declaration = false;
- if (value->IsUndefined() || is_const_property) {
+ bool is_var = value->IsUndefined();
+ bool is_const = value->IsTheHole();
+ bool is_function = value->IsSharedFunctionInfo();
+ bool is_module = value->IsJSModule();
+ ASSERT(is_var + is_const + is_function + is_module == 1);
+
+ if (is_var || is_const) {
// Lookup the property in the global object, and don't set the
// value of the variable if the property is already there.
+ // Do the lookup locally only, see ES5 errata.
LookupResult lookup(isolate);
- global->Lookup(*name, &lookup);
+ if (FLAG_es52_globals)
+ global->LocalLookup(*name, &lookup);
+ else
+ global->Lookup(*name, &lookup);
if (lookup.IsProperty()) {
// We found an existing property. Unless it was an interceptor
// that claims the property is absent, skip this declaration.
- if (lookup.type() != INTERCEPTOR) {
- continue;
- }
+ if (lookup.type() != INTERCEPTOR) continue;
PropertyAttributes attributes = global->GetPropertyAttribute(*name);
- if (attributes != ABSENT) {
- continue;
- }
+ if (attributes != ABSENT) continue;
// Fall-through and introduce the absent property by using
// SetProperty.
}
- } else {
- is_function_declaration = true;
+ } else if (is_function) {
// Copy the function and update its context. Use it as value.
Handle<SharedFunctionInfo> shared =
Handle<SharedFunctionInfo>::cast(value);
Handle<JSFunction> function =
- isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
- context,
- TENURED);
+ isolate->factory()->NewFunctionFromSharedFunctionInfo(
+ shared, context, TENURED);
value = function;
}
LookupResult lookup(isolate);
global->LocalLookup(*name, &lookup);
- // Compute the property attributes. According to ECMA-262, section
- // 13, page 71, the property must be read-only and
- // non-deletable. However, neither SpiderMonkey nor KJS creates the
- // property as read-only, so we don't either.
+ // Compute the property attributes. According to ECMA-262,
+ // the property must be non-configurable except in eval.
int attr = NONE;
- if (!DeclareGlobalsEvalFlag::decode(flags)) {
+ bool is_eval = DeclareGlobalsEvalFlag::decode(flags);
+ if (!is_eval || is_module) {
attr |= DONT_DELETE;
}
bool is_native = DeclareGlobalsNativeFlag::decode(flags);
- if (is_const_property || (is_native && is_function_declaration)) {
+ if (is_const || is_module || (is_native && is_function)) {
attr |= READ_ONLY;
}
LanguageMode language_mode = DeclareGlobalsLanguageMode::decode(flags);
- // Safari does not allow the invocation of callback setters for
- // function declarations. To mimic this behavior, we do not allow
- // the invocation of setters for function values. This makes a
- // difference for global functions with the same names as event
- // handlers such as "function onload() {}". Firefox does call the
- // onload setter in those case and Safari does not. We follow
- // Safari for compatibility.
- if (is_function_declaration) {
- if (lookup.IsProperty() && (lookup.type() != INTERCEPTOR)) {
- // Do not overwrite READ_ONLY properties.
- if (lookup.GetAttributes() & READ_ONLY) {
- if (language_mode != CLASSIC_MODE) {
- Handle<Object> args[] = { name };
- return isolate->Throw(*isolate->factory()->NewTypeError(
- "strict_cannot_assign", HandleVector(args, ARRAY_SIZE(args))));
- }
- continue;
+ if (!lookup.IsProperty() || is_function || is_module) {
+ // If the local property exists, check that we can reconfigure it
+ // as required for function declarations.
+ if (lookup.IsProperty() && lookup.IsDontDelete()) {
+ if (lookup.IsReadOnly() || lookup.IsDontEnum() ||
+ lookup.type() == CALLBACKS) {
+ return ThrowRedeclarationError(
+ isolate, is_function ? "function" : "module", name);
}
- // Do not change DONT_DELETE to false from true.
- attr |= lookup.GetAttributes() & DONT_DELETE;
+ // If the existing property is not configurable, keep its attributes.
+ attr = lookup.GetAttributes();
}
- PropertyAttributes attributes = static_cast<PropertyAttributes>(attr);
-
- RETURN_IF_EMPTY_HANDLE(
- isolate,
- JSObject::SetLocalPropertyIgnoreAttributes(global, name, value,
- attributes));
+ // Define or redefine own property.
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ global, name, value, static_cast<PropertyAttributes>(attr)));
} else {
- RETURN_IF_EMPTY_HANDLE(
- isolate,
- JSReceiver::SetProperty(global, name, value,
- static_cast<PropertyAttributes>(attr),
- language_mode == CLASSIC_MODE
- ? kNonStrictMode : kStrictMode));
+ // Do a [[Put]] on the existing (own) property.
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetProperty(
+ global, name, value, static_cast<PropertyAttributes>(attr),
+ language_mode == CLASSIC_MODE ? kNonStrictMode : kStrictMode));
}
}
@@ -1405,6 +1405,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
if (attributes != ABSENT) {
// The name was declared before; check for conflicting re-declarations.
+ // Note: this is actually inconsistent with what happens for globals (where
+ // we silently ignore such declarations).
if (((attributes & READ_ONLY) != 0) || (mode == READ_ONLY)) {
// Functions are not read-only.
ASSERT(mode != READ_ONLY || initial_value->IsTheHole());
@@ -1467,9 +1469,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
return ThrowRedeclarationError(isolate, "const", name);
}
}
- RETURN_IF_EMPTY_HANDLE(
- isolate,
- JSReceiver::SetProperty(object, name, value, mode, kNonStrictMode));
+ if (object->IsJSGlobalObject()) {
+ // Define own property on the global object.
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(object, name, value, mode));
+ } else {
+ RETURN_IF_EMPTY_HANDLE(isolate,
+ JSReceiver::SetProperty(object, name, value, mode, kNonStrictMode));
+ }
}
return isolate->heap()->undefined_value();
@@ -1734,14 +1741,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) {
// length of a string, i.e. it is always a Smi. We check anyway for security.
CONVERT_SMI_ARG_CHECKED(index, 2);
CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 3);
- RUNTIME_ASSERT(last_match_info->HasFastElements());
+ RUNTIME_ASSERT(last_match_info->HasFastObjectElements());
RUNTIME_ASSERT(index >= 0);
RUNTIME_ASSERT(index <= subject->length());
isolate->counters()->regexp_entry_runtime()->Increment();
Handle<Object> result = RegExpImpl::Exec(regexp,
subject,
index,
- last_match_info);
+ last_match_info,
+ isolate->zone());
if (result.is_null()) return Failure::Exception();
return *result;
}
@@ -1787,6 +1795,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) {
ASSERT(args.length() == 5);
CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
CONVERT_ARG_CHECKED(String, source, 1);
+ // If source is the empty string we set it to "(?:)" instead as
+ // suggested by ECMA-262, 5th, section 15.10.4.1.
+ if (source->length() == 0) source = isolate->heap()->query_colon_symbol();
Object* global = args[2];
if (!global->IsTrue()) global = isolate->heap()->false_value();
@@ -2101,7 +2112,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
DescriptorArray* instance_desc = function->map()->instance_descriptors();
int index = instance_desc->Search(name);
ASSERT(index != DescriptorArray::kNotFound);
- PropertyDetails details(instance_desc->GetDetails(index));
+ PropertyDetails details = instance_desc->GetDetails(index);
CallbacksDescriptor new_desc(name,
instance_desc->GetValue(index),
static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
@@ -2166,60 +2177,58 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
CONVERT_ARG_HANDLE_CHECKED(JSFunction, target, 0);
Handle<Object> code = args.at<Object>(1);
- Handle<Context> context(target->context());
+ if (code->IsNull()) return *target;
+ RUNTIME_ASSERT(code->IsJSFunction());
+ Handle<JSFunction> source = Handle<JSFunction>::cast(code);
+ Handle<SharedFunctionInfo> target_shared(target->shared());
+ Handle<SharedFunctionInfo> source_shared(source->shared());
- if (!code->IsNull()) {
- RUNTIME_ASSERT(code->IsJSFunction());
- Handle<JSFunction> fun = Handle<JSFunction>::cast(code);
- Handle<SharedFunctionInfo> shared(fun->shared());
-
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
- return Failure::Exception();
- }
- // Since we don't store the source for this we should never
- // optimize this.
- shared->code()->set_optimizable(false);
- // Set the code, scope info, formal parameter count,
- // and the length of the target function.
- target->shared()->set_code(shared->code());
- target->ReplaceCode(shared->code());
- target->shared()->set_scope_info(shared->scope_info());
- target->shared()->set_length(shared->length());
- target->shared()->set_formal_parameter_count(
- shared->formal_parameter_count());
- // Set the source code of the target function to undefined.
- // SetCode is only used for built-in constructors like String,
- // Array, and Object, and some web code
- // doesn't like seeing source code for constructors.
- target->shared()->set_script(isolate->heap()->undefined_value());
- target->shared()->code()->set_optimizable(false);
- // Clear the optimization hints related to the compiled code as these are no
- // longer valid when the code is overwritten.
- target->shared()->ClearThisPropertyAssignmentsInfo();
- context = Handle<Context>(fun->context());
-
- // Make sure we get a fresh copy of the literal vector to avoid
- // cross context contamination.
- int number_of_literals = fun->NumberOfLiterals();
- Handle<FixedArray> literals =
- isolate->factory()->NewFixedArray(number_of_literals, TENURED);
- if (number_of_literals > 0) {
- // Insert the object, regexp and array functions in the literals
- // array prefix. These are the functions that will be used when
- // creating object, regexp and array literals.
- literals->set(JSFunction::kLiteralGlobalContextIndex,
- context->global_context());
- }
- target->set_literals(*literals);
- target->set_next_function_link(isolate->heap()->undefined_value());
-
- if (isolate->logger()->is_logging() || CpuProfiler::is_profiling(isolate)) {
- isolate->logger()->LogExistingFunction(
- shared, Handle<Code>(shared->code()));
- }
+ if (!source->is_compiled() &&
+ !JSFunction::CompileLazy(source, KEEP_EXCEPTION)) {
+ return Failure::Exception();
}
+ // Set the code, scope info, formal parameter count, and the length
+ // of the target shared function info. Set the source code of the
+ // target function to undefined. SetCode is only used for built-in
+ // constructors like String, Array, and Object, and some web code
+ // doesn't like seeing source code for constructors.
+ target_shared->set_code(source_shared->code());
+ target_shared->set_scope_info(source_shared->scope_info());
+ target_shared->set_length(source_shared->length());
+ target_shared->set_formal_parameter_count(
+ source_shared->formal_parameter_count());
+ target_shared->set_script(isolate->heap()->undefined_value());
+
+ // Since we don't store the source we should never optimize this.
+ target_shared->code()->set_optimizable(false);
+
+ // Clear the optimization hints related to the compiled code as these
+ // are no longer valid when the code is overwritten.
+ target_shared->ClearThisPropertyAssignmentsInfo();
+
+ // Set the code of the target function.
+ target->ReplaceCode(source_shared->code());
+
+ // Make sure we get a fresh copy of the literal vector to avoid cross
+ // context contamination.
+ Handle<Context> context(source->context());
+ int number_of_literals = source->NumberOfLiterals();
+ Handle<FixedArray> literals =
+ isolate->factory()->NewFixedArray(number_of_literals, TENURED);
+ if (number_of_literals > 0) {
+ literals->set(JSFunction::kLiteralGlobalContextIndex,
+ context->global_context());
+ }
target->set_context(*context);
+ target->set_literals(*literals);
+ target->set_next_function_link(isolate->heap()->undefined_value());
+
+ if (isolate->logger()->is_logging() || CpuProfiler::is_profiling(isolate)) {
+ isolate->logger()->LogExistingFunction(
+ source_shared, Handle<Code>(source_shared->code()));
+ }
+
return *target;
}
@@ -2519,8 +2528,10 @@ class ReplacementStringBuilder {
class CompiledReplacement {
public:
- CompiledReplacement()
- : parts_(1), replacement_substrings_(0), simple_hint_(false) {}
+ explicit CompiledReplacement(Zone* zone)
+ : parts_(1, zone), replacement_substrings_(0, zone),
+ simple_hint_(false),
+ zone_(zone) {}
void Compile(Handle<String> replacement,
int capture_count,
@@ -2540,6 +2551,8 @@ class CompiledReplacement {
return simple_hint_;
}
+ Zone* zone() const { return zone_; }
+
private:
enum PartType {
SUBJECT_PREFIX = 1,
@@ -2601,7 +2614,8 @@ class CompiledReplacement {
static bool ParseReplacementPattern(ZoneList<ReplacementPart>* parts,
Vector<Char> characters,
int capture_count,
- int subject_length) {
+ int subject_length,
+ Zone* zone) {
int length = characters.length();
int last = 0;
for (int i = 0; i < length; i++) {
@@ -2616,7 +2630,8 @@ class CompiledReplacement {
case '$':
if (i > last) {
// There is a substring before. Include the first "$".
- parts->Add(ReplacementPart::ReplacementSubString(last, next_index));
+ parts->Add(ReplacementPart::ReplacementSubString(last, next_index),
+ zone);
last = next_index + 1; // Continue after the second "$".
} else {
// Let the next substring start with the second "$".
@@ -2626,25 +2641,25 @@ class CompiledReplacement {
break;
case '`':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectPrefix());
+ parts->Add(ReplacementPart::SubjectPrefix(), zone);
i = next_index;
last = i + 1;
break;
case '\'':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectSuffix(subject_length));
+ parts->Add(ReplacementPart::SubjectSuffix(subject_length), zone);
i = next_index;
last = i + 1;
break;
case '&':
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
- parts->Add(ReplacementPart::SubjectMatch());
+ parts->Add(ReplacementPart::SubjectMatch(), zone);
i = next_index;
last = i + 1;
break;
@@ -2677,10 +2692,10 @@ class CompiledReplacement {
}
if (capture_ref > 0) {
if (i > last) {
- parts->Add(ReplacementPart::ReplacementSubString(last, i));
+ parts->Add(ReplacementPart::ReplacementSubString(last, i), zone);
}
ASSERT(capture_ref <= capture_count);
- parts->Add(ReplacementPart::SubjectCapture(capture_ref));
+ parts->Add(ReplacementPart::SubjectCapture(capture_ref), zone);
last = next_index + 1;
}
i = next_index;
@@ -2694,10 +2709,10 @@ class CompiledReplacement {
}
if (length > last) {
if (last == 0) {
- parts->Add(ReplacementPart::ReplacementString());
+ parts->Add(ReplacementPart::ReplacementString(), zone);
return true;
} else {
- parts->Add(ReplacementPart::ReplacementSubString(last, length));
+ parts->Add(ReplacementPart::ReplacementSubString(last, length), zone);
}
}
return false;
@@ -2706,6 +2721,7 @@ class CompiledReplacement {
ZoneList<ReplacementPart> parts_;
ZoneList<Handle<String> > replacement_substrings_;
bool simple_hint_;
+ Zone* zone_;
};
@@ -2720,13 +2736,15 @@ void CompiledReplacement::Compile(Handle<String> replacement,
simple_hint_ = ParseReplacementPattern(&parts_,
content.ToAsciiVector(),
capture_count,
- subject_length);
+ subject_length,
+ zone());
} else {
ASSERT(content.IsTwoByte());
simple_hint_ = ParseReplacementPattern(&parts_,
content.ToUC16Vector(),
capture_count,
- subject_length);
+ subject_length,
+ zone());
}
}
Isolate* isolate = replacement->GetIsolate();
@@ -2738,12 +2756,12 @@ void CompiledReplacement::Compile(Handle<String> replacement,
int from = -tag;
int to = parts_[i].data;
replacement_substrings_.Add(
- isolate->factory()->NewSubString(replacement, from, to));
+ isolate->factory()->NewSubString(replacement, from, to), zone());
parts_[i].tag = REPLACEMENT_SUBSTRING;
parts_[i].data = substring_index;
substring_index++;
} else if (tag == REPLACEMENT_STRING) {
- replacement_substrings_.Add(replacement);
+ replacement_substrings_.Add(replacement, zone());
parts_[i].data = substring_index;
substring_index++;
}
@@ -2792,7 +2810,8 @@ void CompiledReplacement::Apply(ReplacementStringBuilder* builder,
void FindAsciiStringIndices(Vector<const char> subject,
char pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
ASSERT(limit > 0);
// Collect indices of pattern in subject using memchr.
// Stop after finding at most limit values.
@@ -2803,7 +2822,7 @@ void FindAsciiStringIndices(Vector<const char> subject,
pos = reinterpret_cast<const char*>(
memchr(pos, pattern, subject_end - pos));
if (pos == NULL) return;
- indices->Add(static_cast<int>(pos - subject_start));
+ indices->Add(static_cast<int>(pos - subject_start), zone);
pos++;
limit--;
}
@@ -2815,7 +2834,8 @@ void FindStringIndices(Isolate* isolate,
Vector<const SubjectChar> subject,
Vector<const PatternChar> pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
ASSERT(limit > 0);
// Collect indices of pattern in subject.
// Stop after finding at most limit values.
@@ -2825,7 +2845,7 @@ void FindStringIndices(Isolate* isolate,
while (limit > 0) {
index = search.Search(subject, index);
if (index < 0) return;
- indices->Add(index);
+ indices->Add(index, zone);
index += pattern_length;
limit--;
}
@@ -2836,7 +2856,8 @@ void FindStringIndicesDispatch(Isolate* isolate,
String* subject,
String* pattern,
ZoneList<int>* indices,
- unsigned int limit) {
+ unsigned int limit,
+ Zone* zone) {
{
AssertNoAllocation no_gc;
String::FlatContent subject_content = subject->GetFlatContent();
@@ -2851,20 +2872,23 @@ void FindStringIndicesDispatch(Isolate* isolate,
FindAsciiStringIndices(subject_vector,
pattern_vector[0],
indices,
- limit);
+ limit,
+ zone);
} else {
FindStringIndices(isolate,
subject_vector,
pattern_vector,
indices,
- limit);
+ limit,
+ zone);
}
} else {
FindStringIndices(isolate,
subject_vector,
pattern_content.ToUC16Vector(),
indices,
- limit);
+ limit,
+ zone);
}
} else {
Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
@@ -2873,30 +2897,100 @@ void FindStringIndicesDispatch(Isolate* isolate,
subject_vector,
pattern_content.ToAsciiVector(),
indices,
- limit);
+ limit,
+ zone);
} else {
FindStringIndices(isolate,
subject_vector,
pattern_content.ToUC16Vector(),
indices,
- limit);
+ limit,
+ zone);
}
}
}
}
+// Two smis before and after the match, for very long strings.
+const int kMaxBuilderEntriesPerRegExpMatch = 5;
+
+
+static void SetLastMatchInfoNoCaptures(Handle<String> subject,
+ Handle<JSArray> last_match_info,
+ int match_start,
+ int match_end) {
+ // Fill last_match_info with a single capture.
+ last_match_info->EnsureSize(2 + RegExpImpl::kLastMatchOverhead);
+ AssertNoAllocation no_gc;
+ FixedArray* elements = FixedArray::cast(last_match_info->elements());
+ RegExpImpl::SetLastCaptureCount(elements, 2);
+ RegExpImpl::SetLastInput(elements, *subject);
+ RegExpImpl::SetLastSubject(elements, *subject);
+ RegExpImpl::SetCapture(elements, 0, match_start);
+ RegExpImpl::SetCapture(elements, 1, match_end);
+}
+
+
+template <typename SubjectChar, typename PatternChar>
+static bool SearchStringMultiple(Isolate* isolate,
+ Vector<const SubjectChar> subject,
+ Vector<const PatternChar> pattern,
+ String* pattern_string,
+ FixedArrayBuilder* builder,
+ int* match_pos) {
+ int pos = *match_pos;
+ int subject_length = subject.length();
+ int pattern_length = pattern.length();
+ int max_search_start = subject_length - pattern_length;
+ StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
+ while (pos <= max_search_start) {
+ if (!builder->HasCapacity(kMaxBuilderEntriesPerRegExpMatch)) {
+ *match_pos = pos;
+ return false;
+ }
+ // Position of end of previous match.
+ int match_end = pos + pattern_length;
+ int new_pos = search.Search(subject, match_end);
+ if (new_pos >= 0) {
+ // A match.
+ if (new_pos > match_end) {
+ ReplacementStringBuilder::AddSubjectSlice(builder,
+ match_end,
+ new_pos);
+ }
+ pos = new_pos;
+ builder->Add(pattern_string);
+ } else {
+ break;
+ }
+ }
+
+ if (pos < max_search_start) {
+ ReplacementStringBuilder::AddSubjectSlice(builder,
+ pos + pattern_length,
+ subject_length);
+ }
+ *match_pos = pos;
+ return true;
+}
+
+
+
+
template<typename ResultSeqString>
-MUST_USE_RESULT static MaybeObject* StringReplaceStringWithString(
+MUST_USE_RESULT static MaybeObject* StringReplaceAtomRegExpWithString(
Isolate* isolate,
Handle<String> subject,
Handle<JSRegExp> pattern_regexp,
- Handle<String> replacement) {
+ Handle<String> replacement,
+ Handle<JSArray> last_match_info,
+ Zone* zone) {
ASSERT(subject->IsFlat());
ASSERT(replacement->IsFlat());
ZoneScope zone_space(isolate, DELETE_ON_EXIT);
- ZoneList<int> indices(8);
+ ZoneList<int> indices(8, isolate->zone());
ASSERT_EQ(JSRegExp::ATOM, pattern_regexp->TypeTag());
String* pattern =
String::cast(pattern_regexp->DataAt(JSRegExp::kAtomPatternIndex));
@@ -2904,12 +2998,21 @@ MUST_USE_RESULT static MaybeObject* StringReplaceStringWithString(
int pattern_len = pattern->length();
int replacement_len = replacement->length();
- FindStringIndicesDispatch(isolate, *subject, pattern, &indices, 0xffffffff);
+ FindStringIndicesDispatch(isolate, *subject, pattern, &indices, 0xffffffff,
+ zone);
int matches = indices.length();
if (matches == 0) return *subject;
- int result_len = (replacement_len - pattern_len) * matches + subject_len;
+ // Detect integer overflow.
+ int64_t result_len_64 =
+ (static_cast<int64_t>(replacement_len) -
+ static_cast<int64_t>(pattern_len)) *
+ static_cast<int64_t>(matches) +
+ static_cast<int64_t>(subject_len);
+ if (result_len_64 > INT_MAX) return Failure::OutOfMemoryException();
+ int result_len = static_cast<int>(result_len_64);
+
int subject_pos = 0;
int result_pos = 0;
@@ -2950,6 +3053,12 @@ MUST_USE_RESULT static MaybeObject* StringReplaceStringWithString(
subject_pos,
subject_len);
}
+
+ SetLastMatchInfoNoCaptures(subject,
+ last_match_info,
+ indices.at(matches - 1),
+ indices.at(matches - 1) + pattern_len);
+
return *result;
}
@@ -2959,7 +3068,8 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
String* subject,
JSRegExp* regexp,
String* replacement,
- JSArray* last_match_info) {
+ JSArray* last_match_info,
+ Zone* zone) {
ASSERT(subject->IsFlat());
ASSERT(replacement->IsFlat());
@@ -2973,7 +3083,8 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
Handle<Object> match = RegExpImpl::Exec(regexp_handle,
subject_handle,
0,
- last_match_info_handle);
+ last_match_info_handle,
+ isolate->zone());
if (match.is_null()) {
return Failure::Exception();
}
@@ -2984,8 +3095,8 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
int capture_count = regexp_handle->CaptureCount();
// CompiledReplacement uses zone allocation.
- ZoneScope zone(isolate, DELETE_ON_EXIT);
- CompiledReplacement compiled_replacement;
+ ZoneScope zonescope(isolate, DELETE_ON_EXIT);
+ CompiledReplacement compiled_replacement(isolate->zone());
compiled_replacement.Compile(replacement_handle,
capture_count,
length);
@@ -2998,11 +3109,21 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
compiled_replacement.simple_hint()) {
if (subject_handle->HasOnlyAsciiChars() &&
replacement_handle->HasOnlyAsciiChars()) {
- return StringReplaceStringWithString<SeqAsciiString>(
- isolate, subject_handle, regexp_handle, replacement_handle);
+ return StringReplaceAtomRegExpWithString<SeqAsciiString>(
+ isolate,
+ subject_handle,
+ regexp_handle,
+ replacement_handle,
+ last_match_info_handle,
+ zone);
} else {
- return StringReplaceStringWithString<SeqTwoByteString>(
- isolate, subject_handle, regexp_handle, replacement_handle);
+ return StringReplaceAtomRegExpWithString<SeqTwoByteString>(
+ isolate,
+ subject_handle,
+ regexp_handle,
+ replacement_handle,
+ last_match_info_handle,
+ zone);
}
}
@@ -3024,7 +3145,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
const int parts_added_per_loop = 2 * (compiled_replacement.parts() + 2);
bool matched = true;
do {
- ASSERT(last_match_info_handle->HasFastElements());
+ ASSERT(last_match_info_handle->HasFastObjectElements());
// Increase the capacity of the builder before entering local handle-scope,
// so its internal buffer can safely allocate a new handle if it grows.
builder.EnsureCapacity(parts_added_per_loop);
@@ -3064,7 +3185,8 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithString(
match = RegExpImpl::Exec(regexp_handle,
subject_handle,
next,
- last_match_info_handle);
+ last_match_info_handle,
+ isolate->zone());
if (match.is_null()) {
return Failure::Exception();
}
@@ -3084,36 +3206,48 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
Isolate* isolate,
String* subject,
JSRegExp* regexp,
- JSArray* last_match_info) {
+ JSArray* last_match_info,
+ Zone* zone) {
ASSERT(subject->IsFlat());
HandleScope handles(isolate);
Handle<String> subject_handle(subject);
Handle<JSRegExp> regexp_handle(regexp);
+ Handle<JSArray> last_match_info_handle(last_match_info);
// Shortcut for simple non-regexp global replacements
if (regexp_handle->GetFlags().is_global() &&
regexp_handle->TypeTag() == JSRegExp::ATOM) {
Handle<String> empty_string_handle(HEAP->empty_string());
if (subject_handle->HasOnlyAsciiChars()) {
- return StringReplaceStringWithString<SeqAsciiString>(
- isolate, subject_handle, regexp_handle, empty_string_handle);
+ return StringReplaceAtomRegExpWithString<SeqAsciiString>(
+ isolate,
+ subject_handle,
+ regexp_handle,
+ empty_string_handle,
+ last_match_info_handle,
+ zone);
} else {
- return StringReplaceStringWithString<SeqTwoByteString>(
- isolate, subject_handle, regexp_handle, empty_string_handle);
+ return StringReplaceAtomRegExpWithString<SeqTwoByteString>(
+ isolate,
+ subject_handle,
+ regexp_handle,
+ empty_string_handle,
+ last_match_info_handle,
+ zone);
}
}
- Handle<JSArray> last_match_info_handle(last_match_info);
Handle<Object> match = RegExpImpl::Exec(regexp_handle,
subject_handle,
0,
- last_match_info_handle);
+ last_match_info_handle,
+ isolate->zone());
if (match.is_null()) return Failure::Exception();
if (match->IsNull()) return *subject_handle;
- ASSERT(last_match_info_handle->HasFastElements());
+ ASSERT(last_match_info_handle->HasFastObjectElements());
int start, end;
{
@@ -3125,6 +3259,10 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
end = RegExpImpl::GetCapture(match_info_array, 1);
}
+ bool global = regexp_handle->GetFlags().is_global();
+
+ if (start == end && !global) return *subject_handle;
+
int length = subject_handle->length();
int new_length = length - (end - start);
if (new_length == 0) {
@@ -3140,7 +3278,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
}
// If the regexp isn't global, only match once.
- if (!regexp_handle->GetFlags().is_global()) {
+ if (!global) {
if (start > 0) {
String::WriteToFlat(*subject_handle,
answer->GetChars(),
@@ -3179,11 +3317,12 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
match = RegExpImpl::Exec(regexp_handle,
subject_handle,
next,
- last_match_info_handle);
+ last_match_info_handle,
+ isolate->zone());
if (match.is_null()) return Failure::Exception();
if (match->IsNull()) break;
- ASSERT(last_match_info_handle->HasFastElements());
+ ASSERT(last_match_info_handle->HasFastObjectElements());
HandleScope loop_scope(isolate);
{
AssertNoAllocation match_info_array_is_not_in_a_handle;
@@ -3253,15 +3392,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
CONVERT_ARG_CHECKED(JSRegExp, regexp, 1);
CONVERT_ARG_CHECKED(JSArray, last_match_info, 3);
- ASSERT(last_match_info->HasFastElements());
+ ASSERT(last_match_info->HasFastObjectElements());
+ Zone* zone = isolate->zone();
if (replacement->length() == 0) {
if (subject->HasOnlyAsciiChars()) {
return StringReplaceRegExpWithEmptyString<SeqAsciiString>(
- isolate, subject, regexp, last_match_info);
+ isolate, subject, regexp, last_match_info, zone);
} else {
return StringReplaceRegExpWithEmptyString<SeqTwoByteString>(
- isolate, subject, regexp, last_match_info);
+ isolate, subject, regexp, last_match_info, zone);
}
}
@@ -3269,7 +3409,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
subject,
regexp,
replacement,
- last_match_info);
+ last_match_info,
+ zone);
}
@@ -3592,7 +3733,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, regexp_info, 2);
HandleScope handles;
- Handle<Object> match = RegExpImpl::Exec(regexp, subject, 0, regexp_info);
+ Handle<Object> match = RegExpImpl::Exec(regexp, subject, 0, regexp_info,
+ isolate->zone());
if (match.is_null()) {
return Failure::Exception();
@@ -3602,8 +3744,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
}
int length = subject->length();
+ Zone* zone = isolate->zone();
ZoneScope zone_space(isolate, DELETE_ON_EXIT);
- ZoneList<int> offsets(8);
+ ZoneList<int> offsets(8, zone);
int start;
int end;
do {
@@ -3613,10 +3756,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
start = Smi::cast(elements->get(RegExpImpl::kFirstCapture))->value();
end = Smi::cast(elements->get(RegExpImpl::kFirstCapture + 1))->value();
}
- offsets.Add(start);
- offsets.Add(end);
+ offsets.Add(start, zone);
+ offsets.Add(end, zone);
if (start == end) if (++end > length) break;
- match = RegExpImpl::Exec(regexp, subject, end, regexp_info);
+ match = RegExpImpl::Exec(regexp, subject, end, regexp_info,
+ isolate->zone());
if (match.is_null()) {
return Failure::Exception();
}
@@ -3639,70 +3783,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
}
-// Two smis before and after the match, for very long strings.
-const int kMaxBuilderEntriesPerRegExpMatch = 5;
-
-
-static void SetLastMatchInfoNoCaptures(Handle<String> subject,
- Handle<JSArray> last_match_info,
- int match_start,
- int match_end) {
- // Fill last_match_info with a single capture.
- last_match_info->EnsureSize(2 + RegExpImpl::kLastMatchOverhead);
- AssertNoAllocation no_gc;
- FixedArray* elements = FixedArray::cast(last_match_info->elements());
- RegExpImpl::SetLastCaptureCount(elements, 2);
- RegExpImpl::SetLastInput(elements, *subject);
- RegExpImpl::SetLastSubject(elements, *subject);
- RegExpImpl::SetCapture(elements, 0, match_start);
- RegExpImpl::SetCapture(elements, 1, match_end);
-}
-
-
-template <typename SubjectChar, typename PatternChar>
-static bool SearchStringMultiple(Isolate* isolate,
- Vector<const SubjectChar> subject,
- Vector<const PatternChar> pattern,
- String* pattern_string,
- FixedArrayBuilder* builder,
- int* match_pos) {
- int pos = *match_pos;
- int subject_length = subject.length();
- int pattern_length = pattern.length();
- int max_search_start = subject_length - pattern_length;
- StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
- while (pos <= max_search_start) {
- if (!builder->HasCapacity(kMaxBuilderEntriesPerRegExpMatch)) {
- *match_pos = pos;
- return false;
- }
- // Position of end of previous match.
- int match_end = pos + pattern_length;
- int new_pos = search.Search(subject, match_end);
- if (new_pos >= 0) {
- // A match.
- if (new_pos > match_end) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- new_pos);
- }
- pos = new_pos;
- builder->Add(pattern_string);
- } else {
- break;
- }
- }
-
- if (pos < max_search_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- pos + pattern_length,
- subject_length);
- }
- *match_pos = pos;
- return true;
-}
-
-
static bool SearchStringMultiple(Isolate* isolate,
Handle<String> subject,
Handle<String> pattern,
@@ -3767,62 +3847,75 @@ static bool SearchStringMultiple(Isolate* isolate,
}
-static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
+static int SearchRegExpNoCaptureMultiple(
Isolate* isolate,
Handle<String> subject,
Handle<JSRegExp> regexp,
Handle<JSArray> last_match_array,
FixedArrayBuilder* builder) {
ASSERT(subject->IsFlat());
+ ASSERT(regexp->CaptureCount() == 0);
int match_start = -1;
int match_end = 0;
int pos = 0;
- int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
- if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
-
- OffsetsVector registers(required_registers, isolate);
+ int registers_per_match = RegExpImpl::IrregexpPrepare(regexp, subject,
+ isolate->zone());
+ if (registers_per_match < 0) return RegExpImpl::RE_EXCEPTION;
+
+ int max_matches;
+ int num_registers = RegExpImpl::GlobalOffsetsVectorSize(regexp,
+ registers_per_match,
+ &max_matches);
+ OffsetsVector registers(num_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
int subject_length = subject->length();
bool first = true;
-
for (;;) { // Break on failure, return on exception.
- RegExpImpl::IrregexpResult result =
- RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- pos,
- register_vector);
- if (result == RegExpImpl::RE_SUCCESS) {
- match_start = register_vector[0];
- builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
- if (match_end < match_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- match_start);
- }
- match_end = register_vector[1];
- HandleScope loop_scope(isolate);
- if (!first) {
- builder->Add(*isolate->factory()->NewProperSubString(subject,
- match_start,
- match_end));
- } else {
- builder->Add(*isolate->factory()->NewSubString(subject,
- match_start,
- match_end));
+ int num_matches = RegExpImpl::IrregexpExecRaw(regexp,
+ subject,
+ pos,
+ register_vector,
+ isolate->zone());
+ if (num_matches > 0) {
+ for (int match_index = 0; match_index < num_matches; match_index++) {
+ int32_t* current_match = &register_vector[match_index * 2];
+ match_start = current_match[0];
+ builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
+ if (match_end < match_start) {
+ ReplacementStringBuilder::AddSubjectSlice(builder,
+ match_end,
+ match_start);
+ }
+ match_end = current_match[1];
+ HandleScope loop_scope(isolate);
+ if (!first) {
+ builder->Add(*isolate->factory()->NewProperSubString(subject,
+ match_start,
+ match_end));
+ } else {
+ builder->Add(*isolate->factory()->NewSubString(subject,
+ match_start,
+ match_end));
+ first = false;
+ }
}
+
+ // If we did not get the maximum number of matches, we can stop here
+ // since there are no matches left.
+ if (num_matches < max_matches) break;
+
if (match_start != match_end) {
pos = match_end;
} else {
pos = match_end + 1;
if (pos > subject_length) break;
}
- } else if (result == RegExpImpl::RE_FAILURE) {
+ } else if (num_matches == 0) {
break;
} else {
- ASSERT_EQ(result, RegExpImpl::RE_EXCEPTION);
- return result;
+ ASSERT_EQ(num_matches, RegExpImpl::RE_EXCEPTION);
+ return RegExpImpl::RE_EXCEPTION;
}
- first = false;
}
if (match_start >= 0) {
@@ -3842,7 +3935,9 @@ static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
}
-static RegExpImpl::IrregexpResult SearchRegExpMultiple(
+// Only called from Runtime_RegExpExecMultiple so it doesn't need to maintain
+// separate last match info. See comment on that function.
+static int SearchRegExpMultiple(
Isolate* isolate,
Handle<String> subject,
Handle<JSRegExp> regexp,
@@ -3850,17 +3945,22 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
FixedArrayBuilder* builder) {
ASSERT(subject->IsFlat());
- int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
- if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
-
- OffsetsVector registers(required_registers, isolate);
+ int registers_per_match = RegExpImpl::IrregexpPrepare(regexp, subject,
+ isolate->zone());
+ if (registers_per_match < 0) return RegExpImpl::RE_EXCEPTION;
+
+ int max_matches;
+ int num_registers = RegExpImpl::GlobalOffsetsVectorSize(regexp,
+ registers_per_match,
+ &max_matches);
+ OffsetsVector registers(num_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
- RegExpImpl::IrregexpResult result =
- RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- 0,
- register_vector);
+ int num_matches = RegExpImpl::IrregexpExecRaw(regexp,
+ subject,
+ 0,
+ register_vector,
+ isolate->zone());
int capture_count = regexp->CaptureCount();
int subject_length = subject->length();
@@ -3869,68 +3969,70 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
int pos = 0;
// End of previous match. Differs from pos if match was empty.
int match_end = 0;
- if (result == RegExpImpl::RE_SUCCESS) {
- // Need to keep a copy of the previous match for creating last_match_info
- // at the end, so we have two vectors that we swap between.
- OffsetsVector registers2(required_registers, isolate);
- Vector<int> prev_register_vector(registers2.vector(), registers2.length());
- bool first = true;
+ bool first = true;
+
+ if (num_matches > 0) {
do {
- int match_start = register_vector[0];
- builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
- if (match_end < match_start) {
- ReplacementStringBuilder::AddSubjectSlice(builder,
- match_end,
- match_start);
- }
- match_end = register_vector[1];
-
- {
- // Avoid accumulating new handles inside loop.
- HandleScope temp_scope(isolate);
- // Arguments array to replace function is match, captures, index and
- // subject, i.e., 3 + capture count in total.
- Handle<FixedArray> elements =
- isolate->factory()->NewFixedArray(3 + capture_count);
- Handle<String> match;
- if (!first) {
- match = isolate->factory()->NewProperSubString(subject,
- match_start,
- match_end);
- } else {
- match = isolate->factory()->NewSubString(subject,
- match_start,
- match_end);
+ int match_start = 0;
+ for (int match_index = 0; match_index < num_matches; match_index++) {
+ int32_t* current_match =
+ &register_vector[match_index * registers_per_match];
+ match_start = current_match[0];
+ builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
+ if (match_end < match_start) {
+ ReplacementStringBuilder::AddSubjectSlice(builder,
+ match_end,
+ match_start);
}
- elements->set(0, *match);
- for (int i = 1; i <= capture_count; i++) {
- int start = register_vector[i * 2];
- if (start >= 0) {
- int end = register_vector[i * 2 + 1];
- ASSERT(start <= end);
- Handle<String> substring;
- if (!first) {
- substring = isolate->factory()->NewProperSubString(subject,
- start,
- end);
+ match_end = current_match[1];
+
+ {
+ // Avoid accumulating new handles inside loop.
+ HandleScope temp_scope(isolate);
+ // Arguments array to replace function is match, captures, index and
+ // subject, i.e., 3 + capture count in total.
+ Handle<FixedArray> elements =
+ isolate->factory()->NewFixedArray(3 + capture_count);
+ Handle<String> match;
+ if (!first) {
+ match = isolate->factory()->NewProperSubString(subject,
+ match_start,
+ match_end);
+ } else {
+ match = isolate->factory()->NewSubString(subject,
+ match_start,
+ match_end);
+ }
+ elements->set(0, *match);
+ for (int i = 1; i <= capture_count; i++) {
+ int start = current_match[i * 2];
+ if (start >= 0) {
+ int end = current_match[i * 2 + 1];
+ ASSERT(start <= end);
+ Handle<String> substring;
+ if (!first) {
+ substring =
+ isolate->factory()->NewProperSubString(subject, start, end);
+ } else {
+ substring =
+ isolate->factory()->NewSubString(subject, start, end);
+ }
+ elements->set(i, *substring);
} else {
- substring = isolate->factory()->NewSubString(subject, start, end);
+ ASSERT(current_match[i * 2 + 1] < 0);
+ elements->set(i, isolate->heap()->undefined_value());
}
- elements->set(i, *substring);
- } else {
- ASSERT(register_vector[i * 2 + 1] < 0);
- elements->set(i, isolate->heap()->undefined_value());
}
+ elements->set(capture_count + 1, Smi::FromInt(match_start));
+ elements->set(capture_count + 2, *subject);
+ builder->Add(*isolate->factory()->NewJSArrayWithElements(elements));
}
- elements->set(capture_count + 1, Smi::FromInt(match_start));
- elements->set(capture_count + 2, *subject);
- builder->Add(*isolate->factory()->NewJSArrayWithElements(elements));
+ first = false;
}
- // Swap register vectors, so the last successful match is in
- // prev_register_vector.
- Vector<int32_t> tmp = prev_register_vector;
- prev_register_vector = register_vector;
- register_vector = tmp;
+
+ // If we did not get the maximum number of matches, we can stop here
+ // since there are no matches left.
+ if (num_matches < max_matches) break;
if (match_end > match_start) {
pos = match_end;
@@ -3941,14 +4043,14 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
}
}
- result = RegExpImpl::IrregexpExecOnce(regexp,
- subject,
- pos,
- register_vector);
- first = false;
- } while (result == RegExpImpl::RE_SUCCESS);
+ num_matches = RegExpImpl::IrregexpExecRaw(regexp,
+ subject,
+ pos,
+ register_vector,
+ isolate->zone());
+ } while (num_matches > 0);
- if (result != RegExpImpl::RE_EXCEPTION) {
+ if (num_matches != RegExpImpl::RE_EXCEPTION) {
// Finished matching, with at least one match.
if (match_end < subject_length) {
ReplacementStringBuilder::AddSubjectSlice(builder,
@@ -3962,20 +4064,23 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
last_match_array->EnsureSize(last_match_array_size);
AssertNoAllocation no_gc;
FixedArray* elements = FixedArray::cast(last_match_array->elements());
+ // We have to set this even though the rest of the last match array is
+ // ignored.
RegExpImpl::SetLastCaptureCount(elements, last_match_capture_count);
+ // These are also read without consulting the override.
RegExpImpl::SetLastSubject(elements, *subject);
RegExpImpl::SetLastInput(elements, *subject);
- for (int i = 0; i < last_match_capture_count; i++) {
- RegExpImpl::SetCapture(elements, i, prev_register_vector[i]);
- }
return RegExpImpl::RE_SUCCESS;
}
}
// No matches at all, return failure or exception result directly.
- return result;
+ return num_matches;
}
+// This is only called for StringReplaceGlobalRegExpWithFunction. This sets
+// lastMatchInfoOverride to maintain the last match info, so we don't need to
+// set any other last match array info.
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) {
ASSERT(args.length() == 4);
HandleScope handles(isolate);
@@ -3986,10 +4091,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 2);
CONVERT_ARG_HANDLE_CHECKED(JSArray, result_array, 3);
- ASSERT(last_match_info->HasFastElements());
+ ASSERT(last_match_info->HasFastObjectElements());
ASSERT(regexp->GetFlags().is_global());
Handle<FixedArray> result_elements;
- if (result_array->HasFastElements()) {
+ if (result_array->HasFastObjectElements()) {
result_elements =
Handle<FixedArray>(FixedArray::cast(result_array->elements()));
}
@@ -4011,7 +4116,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) {
ASSERT_EQ(regexp->TypeTag(), JSRegExp::IRREGEXP);
- RegExpImpl::IrregexpResult result;
+ int result;
if (regexp->CaptureCount() == 0) {
result = SearchRegExpNoCaptureMultiple(isolate,
subject,
@@ -4291,17 +4396,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
// JSObject without a string key. If the key is a Smi, check for a
// definite out-of-bounds access to elements, which is a strong indicator
// that subsequent accesses will also call the runtime. Proactively
- // transition elements to FAST_ELEMENTS to avoid excessive boxing of
+ // transition elements to FAST_*_ELEMENTS to avoid excessive boxing of
// doubles for those future calls in the case that the elements would
// become FAST_DOUBLE_ELEMENTS.
Handle<JSObject> js_object(args.at<JSObject>(0));
ElementsKind elements_kind = js_object->GetElementsKind();
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (IsFastElementsKind(elements_kind) &&
+ !IsFastObjectElementsKind(elements_kind)) {
FixedArrayBase* elements = js_object->elements();
if (args.at<Smi>(1)->value() >= elements->length()) {
+ if (IsFastHoleyElementsKind(elements_kind)) {
+ elements_kind = FAST_HOLEY_ELEMENTS;
+ } else {
+ elements_kind = FAST_ELEMENTS;
+ }
MaybeObject* maybe_object = TransitionElements(js_object,
- FAST_ELEMENTS,
+ elements_kind,
isolate);
if (maybe_object->IsFailure()) return maybe_object;
}
@@ -4471,8 +4581,10 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
return *value;
}
+ js_object->ValidateElements();
Handle<Object> result = JSObject::SetElement(
js_object, index, value, attr, strict_mode, set_mode);
+ js_object->ValidateElements();
if (result.is_null()) return Failure::Exception();
return *value;
}
@@ -4630,7 +4742,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsSmiToDouble) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
Handle<Object> object = args.at<Object>(0);
- return TransitionElements(object, FAST_DOUBLE_ELEMENTS, isolate);
+ if (object->IsJSObject()) {
+ Handle<JSObject> js_object(Handle<JSObject>::cast(object));
+ ElementsKind new_kind = js_object->HasFastHoleyElements()
+ ? FAST_HOLEY_DOUBLE_ELEMENTS
+ : FAST_DOUBLE_ELEMENTS;
+ return TransitionElements(object, new_kind, isolate);
+ } else {
+ return *object;
+ }
}
@@ -4638,7 +4758,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TransitionElementsDoubleToObject) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
Handle<Object> object = args.at<Object>(0);
- return TransitionElements(object, FAST_ELEMENTS, isolate);
+ if (object->IsJSObject()) {
+ Handle<JSObject> js_object(Handle<JSObject>::cast(object));
+ ElementsKind new_kind = js_object->HasFastHoleyElements()
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ return TransitionElements(object, new_kind, isolate);
+ } else {
+ return *object;
+ }
}
@@ -4670,35 +4798,75 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
Object* raw_boilerplate_object = literals->get(literal_index);
Handle<JSArray> boilerplate_object(JSArray::cast(raw_boilerplate_object));
-#if DEBUG
ElementsKind elements_kind = object->GetElementsKind();
-#endif
- ASSERT(elements_kind <= FAST_DOUBLE_ELEMENTS);
+ ASSERT(IsFastElementsKind(elements_kind));
// Smis should never trigger transitions.
ASSERT(!value->IsSmi());
if (value->IsNumber()) {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- JSObject::TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
- JSObject::TransitionElementsKind(boilerplate_object, FAST_DOUBLE_ELEMENTS);
- ASSERT(object->GetElementsKind() == FAST_DOUBLE_ELEMENTS);
- FixedDoubleArray* double_array =
- FixedDoubleArray::cast(object->elements());
+ ASSERT(IsFastSmiElementsKind(elements_kind));
+ ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
+ ? FAST_HOLEY_DOUBLE_ELEMENTS
+ : FAST_DOUBLE_ELEMENTS;
+ if (IsMoreGeneralElementsKindTransition(
+ boilerplate_object->GetElementsKind(),
+ transitioned_kind)) {
+ JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
+ }
+ JSObject::TransitionElementsKind(object, transitioned_kind);
+ ASSERT(IsFastDoubleElementsKind(object->GetElementsKind()));
+ FixedDoubleArray* double_array = FixedDoubleArray::cast(object->elements());
HeapNumber* number = HeapNumber::cast(*value);
double_array->set(store_index, number->Number());
} else {
- ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS ||
- elements_kind == FAST_DOUBLE_ELEMENTS);
- JSObject::TransitionElementsKind(object, FAST_ELEMENTS);
- JSObject::TransitionElementsKind(boilerplate_object, FAST_ELEMENTS);
- FixedArray* object_array =
- FixedArray::cast(object->elements());
+ ASSERT(IsFastSmiElementsKind(elements_kind) ||
+ IsFastDoubleElementsKind(elements_kind));
+ ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
+ ? FAST_HOLEY_ELEMENTS
+ : FAST_ELEMENTS;
+ JSObject::TransitionElementsKind(object, transitioned_kind);
+ if (IsMoreGeneralElementsKindTransition(
+ boilerplate_object->GetElementsKind(),
+ transitioned_kind)) {
+ JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
+ }
+ FixedArray* object_array = FixedArray::cast(object->elements());
object_array->set(store_index, *value);
}
return *object;
}
+// Check whether debugger and is about to step into the callback that is passed
+// to a built-in function such as Array.forEach.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugCallbackSupportsStepping) {
+ if (!isolate->IsDebuggerActive()) return isolate->heap()->false_value();
+ CONVERT_ARG_CHECKED(Object, callback, 0);
+ // We do not step into the callback if it's a builtin or not even a function.
+ if (!callback->IsJSFunction() || JSFunction::cast(callback)->IsBuiltin()) {
+ return isolate->heap()->false_value();
+ }
+ return isolate->heap()->true_value();
+}
+
+
+// Set one shot breakpoints for the callback function that is passed to a
+// built-in function such as Array.forEach to enable stepping into the callback.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPrepareStepInIfStepping) {
+ Debug* debug = isolate->debug();
+ if (!debug->IsStepping()) return NULL;
+ CONVERT_ARG_CHECKED(Object, callback, 0);
+ HandleScope scope(isolate);
+ Handle<SharedFunctionInfo> shared_info(JSFunction::cast(callback)->shared());
+ // When leaving the callback, step out has been activated, but not performed
+ // if we do not leave the builtin. To be able to step into the callback
+ // again, we need to clear the step out at this point.
+ debug->ClearStepOut();
+ debug->FloodWithOneShot(shared_info);
+ return NULL;
+}
+
+
// Set a local property, even if it is READ_ONLY. If the property does not
// exist, it will be added with attributes NONE.
RUNTIME_FUNCTION(MaybeObject*, Runtime_IgnoreAttributesAndSetProperty) {
@@ -5873,7 +6041,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringArray) {
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(JSArray, array, 0);
- if (!array->HasFastElements()) return isolate->heap()->undefined_value();
+ if (!array->HasFastObjectElements()) {
+ return isolate->heap()->undefined_value();
+ }
FixedArray* elements = FixedArray::cast(array->elements());
int n = elements->length();
bool ascii = true;
@@ -6293,17 +6463,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
static const int kMaxInitialListCapacity = 16;
+ Zone* zone = isolate->zone();
ZoneScope scope(isolate, DELETE_ON_EXIT);
// Find (up to limit) indices of separator and end-of-string in subject
int initial_capacity = Min<uint32_t>(kMaxInitialListCapacity, limit);
- ZoneList<int> indices(initial_capacity);
+ ZoneList<int> indices(initial_capacity, zone);
if (!pattern->IsFlat()) FlattenString(pattern);
- FindStringIndicesDispatch(isolate, *subject, *pattern, &indices, limit);
+ FindStringIndicesDispatch(isolate, *subject, *pattern, &indices, limit, zone);
if (static_cast<uint32_t>(indices.length()) < limit) {
- indices.Add(subject_length);
+ indices.Add(subject_length, zone);
}
// The list indices now contains the end of each part to create.
@@ -6316,7 +6487,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
if (maybe_result->IsFailure()) return maybe_result;
result->set_length(Smi::FromInt(part_count));
- ASSERT(result->HasFastElements());
+ ASSERT(result->HasFastObjectElements());
if (part_count == 1 && indices.at(0) == subject_length) {
FixedArray::cast(result->elements())->set(0, *subject);
@@ -6335,7 +6506,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
}
if (limit == 0xffffffffu) {
- if (result->HasFastElements()) {
+ if (result->HasFastObjectElements()) {
StringSplitCache::Enter(isolate->heap(),
isolate->heap()->string_split_cache(),
*subject,
@@ -6692,7 +6863,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
if (maybe_result->IsFailure()) return maybe_result;
int special_length = special->length();
- if (!array->HasFastElements()) {
+ if (!array->HasFastObjectElements()) {
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
}
FixedArray* fixed_array = FixedArray::cast(array->elements());
@@ -6802,7 +6973,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) {
int array_length = args.smi_at(1);
CONVERT_ARG_CHECKED(String, separator, 2);
- if (!array->HasFastElements()) {
+ if (!array->HasFastObjectElements()) {
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
}
FixedArray* fixed_array = FixedArray::cast(array->elements());
@@ -6919,8 +7090,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
CONVERT_ARG_CHECKED(JSArray, elements_array, 0);
- RUNTIME_ASSERT(elements_array->HasFastElements() ||
- elements_array->HasFastSmiOnlyElements());
+ RUNTIME_ASSERT(elements_array->HasFastSmiOrObjectElements());
CONVERT_NUMBER_CHECKED(uint32_t, array_length, Uint32, args[1]);
CONVERT_ARG_CHECKED(String, separator, 2);
// elements_array is fast-mode JSarray of alternating positions
@@ -8122,6 +8292,14 @@ static void MaterializeArgumentsObjectInFrame(Isolate* isolate,
ASSERT(*arguments != isolate->heap()->undefined_value());
}
frame->SetExpression(i, *arguments);
+ if (FLAG_trace_deopt) {
+ PrintF("Materializing arguments object for frame %p - %p: %p ",
+ reinterpret_cast<void*>(frame->sp()),
+ reinterpret_cast<void*>(frame->fp()),
+ reinterpret_cast<void*>(*arguments));
+ arguments->ShortPrint();
+ PrintF("\n");
+ }
}
}
}
@@ -8211,6 +8389,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeoptimizeFunction) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_ClearFunctionTypeFeedback) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
+ Code* unoptimized = function->shared()->code();
+ if (unoptimized->kind() == Code::FUNCTION) {
+ unoptimized->ClearInlineCaches();
+ unoptimized->ClearTypeFeedbackCells(isolate->heap());
+ }
+ return isolate->heap()->undefined_value();
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_RunningInSimulator) {
#if defined(USE_SIMULATOR)
return isolate->heap()->true_value();
@@ -8250,10 +8441,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
if (!V8::UseCrankshaft()) {
return Smi::FromInt(4); // 4 == "never".
}
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (FLAG_always_opt) {
- return Smi::FromInt(3); // 3 == "always".
+ // We may have always opt, but that is more best-effort than a real
+ // promise, so we still say "no" if it is not optimized.
+ return function->IsOptimized() ? Smi::FromInt(3) // 3 == "always".
+ : Smi::FromInt(2); // 2 == "no".
}
- CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
return function->IsOptimized() ? Smi::FromInt(1) // 1 == "yes".
: Smi::FromInt(2); // 2 == "no".
}
@@ -8357,14 +8551,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
PrintF("]\n");
}
Handle<Code> check_code;
-#if defined(V8_TARGET_ARCH_IA32) || \
- defined(V8_TARGET_ARCH_ARM) || \
- defined(V8_TARGET_ARCH_MIPS)
if (FLAG_count_based_interrupts) {
InterruptStub interrupt_stub;
check_code = interrupt_stub.GetCode();
} else // NOLINT
-#endif
{ // NOLINT
StackCheckStub check_stub;
check_code = check_stub.GetCode();
@@ -8603,6 +8793,25 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushBlockContext) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_PushModuleContext) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_CHECKED(ScopeInfo, scope_info, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSModule, instance, 1);
+
+ Context* context;
+ MaybeObject* maybe_context =
+ isolate->heap()->AllocateModuleContext(isolate->context(),
+ scope_info);
+ if (!maybe_context->To(&context)) return maybe_context;
+ // Also initialize the context slot of the instance object.
+ instance->set_context(context);
+ isolate->set_context(context);
+
+ return context;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteContextSlot) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
@@ -9042,7 +9251,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateParseString) {
MaybeObject* maybe_result_array =
output->EnsureCanContainHeapObjectElements();
if (maybe_result_array->IsFailure()) return maybe_result_array;
- RUNTIME_ASSERT(output->HasFastElements());
+ RUNTIME_ASSERT(output->HasFastObjectElements());
AssertNoAllocation no_allocation;
@@ -9104,13 +9313,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) {
ASSERT_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
+ Zone* zone = isolate->zone();
source = Handle<String>(source->TryFlattenGetString());
// Optimized fast case where we only have ASCII characters.
Handle<Object> result;
if (source->IsSeqAsciiString()) {
- result = JsonParser<true>::Parse(source);
+ result = JsonParser<true>::Parse(source, zone);
} else {
- result = JsonParser<false>::Parse(source);
+ result = JsonParser<false>::Parse(source, zone);
}
if (result.is_null()) {
// Syntax error or stack overflow in scanner.
@@ -9274,7 +9484,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) {
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(JSArray, array, 0);
CONVERT_ARG_CHECKED(JSObject, element, 1);
- RUNTIME_ASSERT(array->HasFastElements() || array->HasFastSmiOnlyElements());
+ RUNTIME_ASSERT(array->HasFastSmiOrObjectElements());
int length = Smi::cast(array->length())->value();
FixedArray* elements = FixedArray::cast(array->elements());
for (int i = 0; i < length; i++) {
@@ -9359,7 +9569,7 @@ class ArrayConcatVisitor {
Handle<Map> map;
if (fast_elements_) {
map = isolate_->factory()->GetElementsTransitionMap(array,
- FAST_ELEMENTS);
+ FAST_HOLEY_ELEMENTS);
} else {
map = isolate_->factory()->GetElementsTransitionMap(array,
DICTIONARY_ELEMENTS);
@@ -9418,8 +9628,10 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
uint32_t length = static_cast<uint32_t>(array->length()->Number());
int element_count = 0;
switch (array->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
// Fast elements can't have lengths that are not representable by
// a 32-bit signed integer.
ASSERT(static_cast<int32_t>(FixedArray::kMaxLength) >= 0);
@@ -9431,6 +9643,7 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
break;
}
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
break;
@@ -9521,8 +9734,10 @@ static void CollectElementIndices(Handle<JSObject> object,
List<uint32_t>* indices) {
ElementsKind kind = object->GetElementsKind();
switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
Handle<FixedArray> elements(FixedArray::cast(object->elements()));
uint32_t length = static_cast<uint32_t>(elements->length());
if (range < length) length = range;
@@ -9533,6 +9748,7 @@ static void CollectElementIndices(Handle<JSObject> object,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
@@ -9647,8 +9863,10 @@ static bool IterateElements(Isolate* isolate,
ArrayConcatVisitor* visitor) {
uint32_t length = static_cast<uint32_t>(receiver->length()->Number());
switch (receiver->GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
+ case FAST_SMI_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS: {
// Run through the elements FixedArray and use HasElement and GetElement
// to check the prototype for missing elements.
Handle<FixedArray> elements(FixedArray::cast(receiver->elements()));
@@ -9669,6 +9887,7 @@ static bool IterateElements(Isolate* isolate,
}
break;
}
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
// TODO(1810): Decide if it's worthwhile to implement this.
UNREACHABLE();
@@ -9766,7 +9985,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 0);
int argument_count = static_cast<int>(arguments->length()->Number());
- RUNTIME_ASSERT(arguments->HasFastElements());
+ RUNTIME_ASSERT(arguments->HasFastObjectElements());
Handle<FixedArray> elements(FixedArray::cast(arguments->elements()));
// Pass 1: estimate the length and number of elements of the result.
@@ -9786,10 +10005,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
Handle<JSArray> array(Handle<JSArray>::cast(obj));
// TODO(1810): Find out if it's worthwhile to properly support
// arbitrary ElementsKinds. For now, pessimistically transition to
- // FAST_ELEMENTS.
+ // FAST_*_ELEMENTS.
if (array->HasFastDoubleElements()) {
+ ElementsKind to_kind = FAST_ELEMENTS;
+ if (array->HasFastHoleyElements()) {
+ to_kind = FAST_HOLEY_ELEMENTS;
+ }
array = Handle<JSArray>::cast(
- JSObject::TransitionElementsKind(array, FAST_ELEMENTS));
+ JSObject::TransitionElementsKind(array, to_kind));
}
length_estimate =
static_cast<uint32_t>(array->length()->Number());
@@ -9886,29 +10109,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
ASSERT(args.length() == 2);
CONVERT_ARG_CHECKED(JSArray, from, 0);
CONVERT_ARG_CHECKED(JSArray, to, 1);
+ from->ValidateElements();
+ to->ValidateElements();
FixedArrayBase* new_elements = from->elements();
+ ElementsKind from_kind = from->GetElementsKind();
MaybeObject* maybe_new_map;
- ElementsKind elements_kind;
- if (new_elements->map() == isolate->heap()->fixed_array_map() ||
- new_elements->map() == isolate->heap()->fixed_cow_array_map()) {
- elements_kind = FAST_ELEMENTS;
- } else if (new_elements->map() ==
- isolate->heap()->fixed_double_array_map()) {
- elements_kind = FAST_DOUBLE_ELEMENTS;
- } else {
- elements_kind = DICTIONARY_ELEMENTS;
- }
- maybe_new_map = to->GetElementsTransitionMap(isolate, elements_kind);
+ maybe_new_map = to->GetElementsTransitionMap(isolate, from_kind);
Object* new_map;
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- to->set_map(Map::cast(new_map));
- to->set_elements(new_elements);
+ to->set_map_and_elements(Map::cast(new_map), new_elements);
to->set_length(from->length());
Object* obj;
{ MaybeObject* maybe_obj = from->ResetElements();
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
from->set_length(Smi::FromInt(0));
+ to->ValidateElements();
return to;
}
@@ -9958,8 +10174,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
}
return *isolate->factory()->NewJSArrayWithElements(keys);
} else {
- ASSERT(array->HasFastElements() ||
- array->HasFastSmiOnlyElements() ||
+ ASSERT(array->HasFastSmiOrObjectElements() ||
array->HasFastDoubleElements());
Handle<FixedArray> single_interval = isolate->factory()->NewFixedArray(2);
// -1 means start of array.
@@ -10074,7 +10289,6 @@ static MaybeObject* DebugLookupResultValue(Heap* heap,
}
case INTERCEPTOR:
case MAP_TRANSITION:
- case ELEMENTS_TRANSITION:
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
return heap->undefined_value();
@@ -10904,10 +11118,10 @@ static Handle<JSObject> MaterializeModuleScope(
}
-// Iterate over the actual scopes visible from a stack frame. The iteration
-// proceeds from the innermost visible nested scope outwards. All scopes are
-// backed by an actual context except the local scope, which is inserted
-// "artificially" in the context chain.
+// Iterate over the actual scopes visible from a stack frame or from a closure.
+// The iteration proceeds from the innermost visible nested scope outwards.
+// All scopes are backed by an actual context except the local scope,
+// which is inserted "artificially" in the context chain.
class ScopeIterator {
public:
enum ScopeType {
@@ -11008,6 +11222,18 @@ class ScopeIterator {
}
}
+ ScopeIterator(Isolate* isolate,
+ Handle<JSFunction> function)
+ : isolate_(isolate),
+ frame_(NULL),
+ inlined_jsframe_index_(0),
+ function_(function),
+ context_(function->context()) {
+ if (function->IsBuiltin()) {
+ context_ = Handle<Context>();
+ }
+ }
+
// More scopes?
bool Done() { return context_.is_null(); }
@@ -11228,6 +11454,22 @@ static const int kScopeDetailsTypeIndex = 0;
static const int kScopeDetailsObjectIndex = 1;
static const int kScopeDetailsSize = 2;
+
+static MaybeObject* MaterializeScopeDetails(Isolate* isolate,
+ ScopeIterator* it) {
+ // Calculate the size of the result.
+ int details_size = kScopeDetailsSize;
+ Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size);
+
+ // Fill in scope details.
+ details->set(kScopeDetailsTypeIndex, Smi::FromInt(it->Type()));
+ Handle<JSObject> scope_object = it->ScopeObject();
+ RETURN_IF_EMPTY_HANDLE(isolate, scope_object);
+ details->set(kScopeDetailsObjectIndex, *scope_object);
+
+ return *isolate->factory()->NewJSArrayWithElements(details);
+}
+
// Return an array with scope details
// args[0]: number: break id
// args[1]: number: frame index
@@ -11265,18 +11507,46 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) {
if (it.Done()) {
return isolate->heap()->undefined_value();
}
+ return MaterializeScopeDetails(isolate, &it);
+}
- // Calculate the size of the result.
- int details_size = kScopeDetailsSize;
- Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size);
- // Fill in scope details.
- details->set(kScopeDetailsTypeIndex, Smi::FromInt(it.Type()));
- Handle<JSObject> scope_object = it.ScopeObject();
- RETURN_IF_EMPTY_HANDLE(isolate, scope_object);
- details->set(kScopeDetailsObjectIndex, *scope_object);
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeCount) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
- return *isolate->factory()->NewJSArrayWithElements(details);
+ // Check arguments.
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, fun, 0);
+
+ // Count the visible scopes.
+ int n = 0;
+ for (ScopeIterator it(isolate, fun); !it.Done(); it.Next()) {
+ n++;
+ }
+
+ return Smi::FromInt(n);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionScopeDetails) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+
+ // Check arguments.
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, fun, 0);
+ CONVERT_NUMBER_CHECKED(int, index, Int32, args[1]);
+
+ // Find the requested scope.
+ int n = 0;
+ ScopeIterator it(isolate, fun);
+ for (; !it.Done() && n < index; it.Next()) {
+ n++;
+ }
+ if (it.Done()) {
+ return isolate->heap()->undefined_value();
+ }
+
+ return MaterializeScopeDetails(isolate, &it);
}
@@ -12270,8 +12540,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
- Handle<SharedFunctionInfo> shared(func->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
+ if (!JSFunction::CompileLazy(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
func->code()->PrintLn();
@@ -12286,11 +12555,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
- Handle<SharedFunctionInfo> shared(func->shared());
- if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
+ if (!JSFunction::CompileLazy(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
- shared->construct_stub()->PrintLn();
+ func->shared()->construct_stub()->PrintLn();
#endif // DEBUG
return isolate->heap()->undefined_value();
}
@@ -12507,7 +12775,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) {
CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0);
CONVERT_BOOLEAN_ARG_CHECKED(do_drop, 1);
- return *LiveEdit::CheckAndDropActivations(shared_array, do_drop);
+ return *LiveEdit::CheckAndDropActivations(shared_array, do_drop,
+ isolate->zone());
}
// Compares 2 strings line-by-line, then token-wise and returns diff in form
@@ -13218,9 +13487,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IS_VAR) {
return isolate->heap()->ToBoolean(obj->Has##Name()); \
}
-ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiOnlyElements)
-ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastObjectElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastSmiOrObjectElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastDoubleElements)
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastHoleyElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(DictionaryElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalPixelElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalArrayElements)
@@ -13232,6 +13503,8 @@ ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalIntElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalUnsignedIntElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalFloatElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalDoubleElements)
+// Properties test sitting with elements tests - not fooling anyone.
+ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(FastProperties)
#undef ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION
diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h
index ef351da962..f5a4f50206 100644
--- a/deps/v8/src/runtime.h
+++ b/deps/v8/src/runtime.h
@@ -89,6 +89,7 @@ namespace internal {
F(NotifyDeoptimized, 1, 1) \
F(NotifyOSR, 0, 1) \
F(DeoptimizeFunction, 1, 1) \
+ F(ClearFunctionTypeFeedback, 1, 1) \
F(RunningInSimulator, 0, 1) \
F(OptimizeFunctionOnNextCall, -1, 1) \
F(GetOptimizationStatus, 1, 1) \
@@ -98,6 +99,8 @@ namespace internal {
F(AllocateInNewSpace, 1, 1) \
F(SetNativeFlag, 1, 1) \
F(StoreArrayLiteralElement, 5, 1) \
+ F(DebugCallbackSupportsStepping, 1, 1) \
+ F(DebugPrepareStepInIfStepping, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \
@@ -323,6 +326,7 @@ namespace internal {
F(PushWithContext, 2, 1) \
F(PushCatchContext, 3, 1) \
F(PushBlockContext, 2, 1) \
+ F(PushModuleContext, 2, 1) \
F(DeleteContextSlot, 2, 1) \
F(LoadContextSlot, 2, 2) \
F(LoadContextSlotNoReferenceError, 2, 2) \
@@ -360,9 +364,11 @@ namespace internal {
F(IS_VAR, 1, 1) \
\
/* expose boolean functions from objects-inl.h */ \
- F(HasFastSmiOnlyElements, 1, 1) \
- F(HasFastElements, 1, 1) \
+ F(HasFastSmiElements, 1, 1) \
+ F(HasFastSmiOrObjectElements, 1, 1) \
+ F(HasFastObjectElements, 1, 1) \
F(HasFastDoubleElements, 1, 1) \
+ F(HasFastHoleyElements, 1, 1) \
F(HasDictionaryElements, 1, 1) \
F(HasExternalPixelElements, 1, 1) \
F(HasExternalArrayElements, 1, 1) \
@@ -374,6 +380,7 @@ namespace internal {
F(HasExternalUnsignedIntElements, 1, 1) \
F(HasExternalFloatElements, 1, 1) \
F(HasExternalDoubleElements, 1, 1) \
+ F(HasFastProperties, 1, 1) \
F(TransitionElementsSmiToDouble, 1, 1) \
F(TransitionElementsDoubleToObject, 1, 1) \
F(HaveSameMap, 2, 1) \
@@ -400,6 +407,8 @@ namespace internal {
F(GetFrameDetails, 2, 1) \
F(GetScopeCount, 2, 1) \
F(GetScopeDetails, 4, 1) \
+ F(GetFunctionScopeCount, 1, 1) \
+ F(GetFunctionScopeDetails, 2, 1) \
F(DebugPrintScopes, 0, 1) \
F(GetThreadCount, 1, 1) \
F(GetThreadDetails, 2, 1) \
diff --git a/deps/v8/src/safepoint-table.cc b/deps/v8/src/safepoint-table.cc
index 89ad8afabe..714e5c3977 100644
--- a/deps/v8/src/safepoint-table.cc
+++ b/deps/v8/src/safepoint-table.cc
@@ -116,8 +116,8 @@ void SafepointTable::PrintBits(uint8_t byte, int digits) {
}
-void Safepoint::DefinePointerRegister(Register reg) {
- registers_->Add(reg.code());
+void Safepoint::DefinePointerRegister(Register reg, Zone* zone) {
+ registers_->Add(reg.code(), zone);
}
@@ -131,15 +131,16 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
info.pc = assembler->pc_offset();
info.arguments = arguments;
info.has_doubles = (kind & Safepoint::kWithDoubles);
- deoptimization_info_.Add(info);
- deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex);
+ deoptimization_info_.Add(info, zone_);
+ deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex, zone_);
if (deopt_mode == Safepoint::kNoLazyDeopt) {
last_lazy_safepoint_ = deopt_index_list_.length();
}
- indexes_.Add(new ZoneList<int>(8));
+ indexes_.Add(new(zone_) ZoneList<int>(8, zone_), zone_);
registers_.Add((kind & Safepoint::kWithRegisters)
- ? new ZoneList<int>(4)
- : NULL);
+ ? new(zone_) ZoneList<int>(4, zone_)
+ : NULL,
+ zone_);
return Safepoint(indexes_.last(), registers_.last());
}
@@ -190,12 +191,12 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
}
// Emit table of bitmaps.
- ZoneList<uint8_t> bits(bytes_per_entry);
+ ZoneList<uint8_t> bits(bytes_per_entry, zone_);
for (int i = 0; i < length; i++) {
ZoneList<int>* indexes = indexes_[i];
ZoneList<int>* registers = registers_[i];
bits.Clear();
- bits.AddBlock(0, bytes_per_entry);
+ bits.AddBlock(0, bytes_per_entry, zone_);
// Run through the registers (if any).
ASSERT(IsAligned(kNumSafepointRegisters, kBitsPerByte));
diff --git a/deps/v8/src/safepoint-table.h b/deps/v8/src/safepoint-table.h
index 57fceecd92..307d948bfc 100644
--- a/deps/v8/src/safepoint-table.h
+++ b/deps/v8/src/safepoint-table.h
@@ -183,8 +183,8 @@ class Safepoint BASE_EMBEDDED {
static const int kNoDeoptimizationIndex =
(1 << (SafepointEntry::kDeoptIndexBits)) - 1;
- void DefinePointerSlot(int index) { indexes_->Add(index); }
- void DefinePointerRegister(Register reg);
+ void DefinePointerSlot(int index, Zone* zone) { indexes_->Add(index, zone); }
+ void DefinePointerRegister(Register reg, Zone* zone);
private:
Safepoint(ZoneList<int>* indexes, ZoneList<int>* registers) :
@@ -198,13 +198,14 @@ class Safepoint BASE_EMBEDDED {
class SafepointTableBuilder BASE_EMBEDDED {
public:
- SafepointTableBuilder()
- : deoptimization_info_(32),
- deopt_index_list_(32),
- indexes_(32),
- registers_(32),
+ explicit SafepointTableBuilder(Zone* zone)
+ : deoptimization_info_(32, zone),
+ deopt_index_list_(32, zone),
+ indexes_(32, zone),
+ registers_(32, zone),
emitted_(false),
- last_lazy_safepoint_(0) { }
+ last_lazy_safepoint_(0),
+ zone_(zone) { }
// Get the offset of the emitted safepoint table in the code.
unsigned GetCodeOffset() const;
@@ -242,6 +243,8 @@ class SafepointTableBuilder BASE_EMBEDDED {
bool emitted_;
int last_lazy_safepoint_;
+ Zone* zone_;
+
DISALLOW_COPY_AND_ASSIGN(SafepointTableBuilder);
};
diff --git a/deps/v8/src/scanner.cc b/deps/v8/src/scanner.cc
index 7901b5d826..f24af2ed26 100755
--- a/deps/v8/src/scanner.cc
+++ b/deps/v8/src/scanner.cc
@@ -611,7 +611,7 @@ void Scanner::SeekForward(int pos) {
}
-void Scanner::ScanEscape() {
+bool Scanner::ScanEscape() {
uc32 c = c0_;
Advance();
@@ -621,7 +621,7 @@ void Scanner::ScanEscape() {
if (IsCarriageReturn(c) && IsLineFeed(c0_)) Advance();
// Allow LF+CR newlines in multiline string literals.
if (IsLineFeed(c) && IsCarriageReturn(c0_)) Advance();
- return;
+ return true;
}
switch (c) {
@@ -635,13 +635,13 @@ void Scanner::ScanEscape() {
case 't' : c = '\t'; break;
case 'u' : {
c = ScanHexNumber(4);
- if (c < 0) c = 'u';
+ if (c < 0) return false;
break;
}
case 'v' : c = '\v'; break;
case 'x' : {
c = ScanHexNumber(2);
- if (c < 0) c = 'x';
+ if (c < 0) return false;
break;
}
case '0' : // fall through
@@ -654,10 +654,11 @@ void Scanner::ScanEscape() {
case '7' : c = ScanOctalEscape(c, 2); break;
}
- // According to ECMA-262, 3rd, 7.8.4 (p 18ff) these
- // should be illegal, but they are commonly handled
- // as non-escaped characters by JS VMs.
+ // According to ECMA-262, section 7.8.4, characters not covered by the
+ // above cases should be illegal, but they are commonly handled as
+ // non-escaped characters by JS VMs.
AddLiteralChar(c);
+ return true;
}
@@ -696,8 +697,7 @@ Token::Value Scanner::ScanString() {
uc32 c = c0_;
Advance();
if (c == '\\') {
- if (c0_ < 0) return Token::ILLEGAL;
- ScanEscape();
+ if (c0_ < 0 || !ScanEscape()) return Token::ILLEGAL;
} else {
AddLiteralChar(c);
}
diff --git a/deps/v8/src/scanner.h b/deps/v8/src/scanner.h
index 045e7d27a6..4de413b885 100644
--- a/deps/v8/src/scanner.h
+++ b/deps/v8/src/scanner.h
@@ -520,13 +520,16 @@ class Scanner {
Token::Value ScanIdentifierOrKeyword();
Token::Value ScanIdentifierSuffix(LiteralScope* literal);
- void ScanEscape();
Token::Value ScanString();
- // Decodes a unicode escape-sequence which is part of an identifier.
+ // Scans an escape-sequence which is part of a string and adds the
+ // decoded character to the current literal. Returns true if a pattern
+ // is scanned.
+ bool ScanEscape();
+ // Decodes a Unicode escape-sequence which is part of an identifier.
// If the escape sequence cannot be decoded the result is kBadChar.
uc32 ScanIdentifierUnicodeEscape();
- // Recognizes a uniocde escape-sequence and adds its characters,
+ // Scans a Unicode escape-sequence and adds its characters,
// uninterpreted, to the current literal. Used for parsing RegExp
// flags.
bool ScanLiteralUnicodeEscape();
diff --git a/deps/v8/src/scopeinfo.cc b/deps/v8/src/scopeinfo.cc
index 0f36234701..25f02f6320 100644
--- a/deps/v8/src/scopeinfo.cc
+++ b/deps/v8/src/scopeinfo.cc
@@ -38,10 +38,10 @@ namespace v8 {
namespace internal {
-Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
+Handle<ScopeInfo> ScopeInfo::Create(Scope* scope, Zone* zone) {
// Collect stack and context locals.
- ZoneList<Variable*> stack_locals(scope->StackLocalCount());
- ZoneList<Variable*> context_locals(scope->ContextLocalCount());
+ ZoneList<Variable*> stack_locals(scope->StackLocalCount(), zone);
+ ZoneList<Variable*> context_locals(scope->ContextLocalCount(), zone);
scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
const int stack_local_count = stack_locals.length();
const int context_local_count = context_locals.length();
@@ -53,7 +53,7 @@ Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
FunctionVariableInfo function_name_info;
VariableMode function_variable_mode;
if (scope->is_function_scope() && scope->function() != NULL) {
- Variable* var = scope->function()->var();
+ Variable* var = scope->function()->proxy()->var();
if (!var->is_used()) {
function_name_info = UNUSED;
} else if (var->IsContextSlot()) {
@@ -129,8 +129,8 @@ Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
// If present, add the function variable name and its index.
ASSERT(index == scope_info->FunctionNameEntryIndex());
if (has_function_name) {
- int var_index = scope->function()->var()->index();
- scope_info->set(index++, *scope->function()->name());
+ int var_index = scope->function()->proxy()->var()->index();
+ scope_info->set(index++, *scope->function()->proxy()->name());
scope_info->set(index++, Smi::FromInt(var_index));
ASSERT(function_name_info != STACK ||
(var_index == scope_info->StackLocalCount() &&
@@ -142,7 +142,9 @@ Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
ASSERT(index == scope_info->length());
ASSERT(scope->num_parameters() == scope_info->ParameterCount());
ASSERT(scope->num_stack_slots() == scope_info->StackSlotCount());
- ASSERT(scope->num_heap_slots() == scope_info->ContextLength());
+ ASSERT(scope->num_heap_slots() == scope_info->ContextLength() ||
+ (scope->num_heap_slots() == kVariablePartIndex &&
+ scope_info->ContextLength() == 0));
return scope_info;
}
diff --git a/deps/v8/src/scopes.cc b/deps/v8/src/scopes.cc
index c142c3d61a..ad6692e57f 100644
--- a/deps/v8/src/scopes.cc
+++ b/deps/v8/src/scopes.cc
@@ -57,7 +57,9 @@ static bool Match(void* key1, void* key2) {
}
-VariableMap::VariableMap() : ZoneHashMap(Match, 8) {}
+VariableMap::VariableMap(Zone* zone)
+ : ZoneHashMap(Match, 8, ZoneAllocationPolicy(zone)),
+ zone_(zone) {}
VariableMap::~VariableMap() {}
@@ -69,24 +71,26 @@ Variable* VariableMap::Declare(
Variable::Kind kind,
InitializationFlag initialization_flag,
Interface* interface) {
- Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true);
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true,
+ ZoneAllocationPolicy(zone()));
if (p->value == NULL) {
// The variable has not been declared yet -> insert it.
ASSERT(p->key == name.location());
- p->value = new Variable(scope,
- name,
- mode,
- is_valid_lhs,
- kind,
- initialization_flag,
- interface);
+ p->value = new(zone()) Variable(scope,
+ name,
+ mode,
+ is_valid_lhs,
+ kind,
+ initialization_flag,
+ interface);
}
return reinterpret_cast<Variable*>(p->value);
}
Variable* VariableMap::Lookup(Handle<String> name) {
- Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), false);
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), false,
+ ZoneAllocationPolicy(NULL));
if (p != NULL) {
ASSERT(*reinterpret_cast<String**>(p->key) == *name);
ASSERT(p->value != NULL);
@@ -99,18 +103,19 @@ Variable* VariableMap::Lookup(Handle<String> name) {
// ----------------------------------------------------------------------------
// Implementation of Scope
-Scope::Scope(Scope* outer_scope, ScopeType type)
+Scope::Scope(Scope* outer_scope, ScopeType type, Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(4),
- variables_(),
- temps_(4),
- params_(4),
- unresolved_(16),
- decls_(4),
+ inner_scopes_(4, zone),
+ variables_(zone),
+ temps_(4, zone),
+ params_(4, zone),
+ unresolved_(16, zone),
+ decls_(4, zone),
interface_(FLAG_harmony_modules &&
(type == MODULE_SCOPE || type == GLOBAL_SCOPE)
- ? Interface::NewModule() : NULL),
- already_resolved_(false) {
+ ? Interface::NewModule(zone) : NULL),
+ already_resolved_(false),
+ zone_(zone) {
SetDefaults(type, outer_scope, Handle<ScopeInfo>::null());
// At some point we might want to provide outer scopes to
// eval scopes (by walking the stack and reading the scope info).
@@ -122,16 +127,18 @@ Scope::Scope(Scope* outer_scope, ScopeType type)
Scope::Scope(Scope* inner_scope,
ScopeType type,
- Handle<ScopeInfo> scope_info)
+ Handle<ScopeInfo> scope_info,
+ Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(4),
- variables_(),
- temps_(4),
- params_(4),
- unresolved_(16),
- decls_(4),
+ inner_scopes_(4, zone),
+ variables_(zone),
+ temps_(4, zone),
+ params_(4, zone),
+ unresolved_(16, zone),
+ decls_(4, zone),
interface_(NULL),
- already_resolved_(true) {
+ already_resolved_(true),
+ zone_(zone) {
SetDefaults(type, NULL, scope_info);
if (!scope_info.is_null()) {
num_heap_slots_ = scope_info_->ContextLength();
@@ -143,16 +150,17 @@ Scope::Scope(Scope* inner_scope,
}
-Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name)
+Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name, Zone* zone)
: isolate_(Isolate::Current()),
- inner_scopes_(1),
- variables_(),
- temps_(0),
- params_(0),
- unresolved_(0),
- decls_(0),
+ inner_scopes_(1, zone),
+ variables_(zone),
+ temps_(0, zone),
+ params_(0, zone),
+ unresolved_(0, zone),
+ decls_(0, zone),
interface_(NULL),
- already_resolved_(true) {
+ already_resolved_(true),
+ zone_(zone) {
SetDefaults(CATCH_SCOPE, NULL, Handle<ScopeInfo>::null());
AddInnerScope(inner_scope);
++num_var_or_const_;
@@ -200,16 +208,18 @@ void Scope::SetDefaults(ScopeType type,
}
-Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope) {
+Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope,
+ Zone* zone) {
// Reconstruct the outer scope chain from a closure's context chain.
Scope* current_scope = NULL;
Scope* innermost_scope = NULL;
bool contains_with = false;
while (!context->IsGlobalContext()) {
if (context->IsWithContext()) {
- Scope* with_scope = new Scope(current_scope,
- WITH_SCOPE,
- Handle<ScopeInfo>::null());
+ Scope* with_scope = new(zone) Scope(current_scope,
+ WITH_SCOPE,
+ Handle<ScopeInfo>::null(),
+ zone);
current_scope = with_scope;
// All the inner scopes are inside a with.
contains_with = true;
@@ -218,18 +228,21 @@ Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope) {
}
} else if (context->IsFunctionContext()) {
ScopeInfo* scope_info = context->closure()->shared()->scope_info();
- current_scope = new Scope(current_scope,
- FUNCTION_SCOPE,
- Handle<ScopeInfo>(scope_info));
+ current_scope = new(zone) Scope(current_scope,
+ FUNCTION_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
} else if (context->IsBlockContext()) {
ScopeInfo* scope_info = ScopeInfo::cast(context->extension());
- current_scope = new Scope(current_scope,
- BLOCK_SCOPE,
- Handle<ScopeInfo>(scope_info));
+ current_scope = new(zone) Scope(current_scope,
+ BLOCK_SCOPE,
+ Handle<ScopeInfo>(scope_info),
+ zone);
} else {
ASSERT(context->IsCatchContext());
String* name = String::cast(context->extension());
- current_scope = new Scope(current_scope, Handle<String>(name));
+ current_scope = new(zone) Scope(
+ current_scope, Handle<String>(name), zone);
}
if (contains_with) current_scope->RecordWithStatement();
if (innermost_scope == NULL) innermost_scope = current_scope;
@@ -305,7 +318,7 @@ void Scope::Initialize() {
// Add this scope as a new inner scope of the outer scope.
if (outer_scope_ != NULL) {
- outer_scope_->inner_scopes_.Add(this);
+ outer_scope_->inner_scopes_.Add(this, zone());
scope_inside_with_ = outer_scope_->scope_inside_with_ || is_with_scope();
} else {
scope_inside_with_ = is_with_scope();
@@ -370,7 +383,7 @@ Scope* Scope::FinalizeBlockScope() {
// Move unresolved variables
for (int i = 0; i < unresolved_.length(); i++) {
- outer_scope()->unresolved_.Add(unresolved_[i]);
+ outer_scope()->unresolved_.Add(unresolved_[i], zone());
}
return NULL;
@@ -388,38 +401,42 @@ Variable* Scope::LocalLookup(Handle<String> name) {
// Check context slot lookup.
VariableMode mode;
+ Variable::Location location = Variable::CONTEXT;
InitializationFlag init_flag;
int index = scope_info_->ContextSlotIndex(*name, &mode, &init_flag);
if (index < 0) {
// Check parameters.
- mode = VAR;
- init_flag = kCreatedInitialized;
index = scope_info_->ParameterIndex(*name);
if (index < 0) return NULL;
+
+ mode = DYNAMIC;
+ location = Variable::LOOKUP;
+ init_flag = kCreatedInitialized;
}
- Variable* var =
- variables_.Declare(this,
- name,
- mode,
- true,
- Variable::NORMAL,
- init_flag);
- var->AllocateTo(Variable::CONTEXT, index);
+ Variable* var = variables_.Declare(this, name, mode, true, Variable::NORMAL,
+ init_flag);
+ var->AllocateTo(location, index);
return var;
}
Variable* Scope::LookupFunctionVar(Handle<String> name,
AstNodeFactory<AstNullVisitor>* factory) {
- if (function_ != NULL && function_->name().is_identical_to(name)) {
- return function_->var();
+ if (function_ != NULL && function_->proxy()->name().is_identical_to(name)) {
+ return function_->proxy()->var();
} else if (!scope_info_.is_null()) {
// If we are backed by a scope info, try to lookup the variable there.
VariableMode mode;
int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
if (index < 0) return NULL;
- Variable* var = DeclareFunctionVar(name, mode, factory);
+ Variable* var = new(zone()) Variable(
+ this, name, mode, true /* is valid LHS */,
+ Variable::NORMAL, kCreatedInitialized);
+ VariableProxy* proxy = factory->NewVariableProxy(var);
+ VariableDeclaration* declaration =
+ factory->NewVariableDeclaration(proxy, mode, this);
+ DeclareFunctionVar(declaration);
var->AllocateTo(Variable::CONTEXT, index);
return var;
} else {
@@ -442,9 +459,9 @@ Variable* Scope::Lookup(Handle<String> name) {
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved());
ASSERT(is_function_scope());
- Variable* var = variables_.Declare(
- this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
- params_.Add(var);
+ Variable* var = variables_.Declare(this, name, mode, true, Variable::NORMAL,
+ kCreatedInitialized);
+ params_.Add(var, zone());
}
@@ -491,19 +508,19 @@ void Scope::RemoveUnresolved(VariableProxy* var) {
Variable* Scope::NewTemporary(Handle<String> name) {
ASSERT(!already_resolved());
- Variable* var = new Variable(this,
- name,
- TEMPORARY,
- true,
- Variable::NORMAL,
- kCreatedInitialized);
- temps_.Add(var);
+ Variable* var = new(zone()) Variable(this,
+ name,
+ TEMPORARY,
+ true,
+ Variable::NORMAL,
+ kCreatedInitialized);
+ temps_.Add(var, zone());
return var;
}
void Scope::AddDeclaration(Declaration* declaration) {
- decls_.Add(declaration);
+ decls_.Add(declaration, zone());
}
@@ -579,7 +596,7 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
Variable* var = temps_[i];
if (var->is_used()) {
ASSERT(var->IsStackLocal());
- stack_locals->Add(var);
+ stack_locals->Add(var, zone());
}
}
@@ -590,9 +607,9 @@ void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
Variable* var = reinterpret_cast<Variable*>(p->value);
if (var->is_used()) {
if (var->IsStackLocal()) {
- stack_locals->Add(var);
+ stack_locals->Add(var, zone());
} else if (var->IsContextSlot()) {
- context_locals->Add(var);
+ context_locals->Add(var, zone());
}
}
}
@@ -690,7 +707,7 @@ Scope* Scope::DeclarationScope() {
Handle<ScopeInfo> Scope::GetScopeInfo() {
if (scope_info_.is_null()) {
- scope_info_ = ScopeInfo::Create(this);
+ scope_info_ = ScopeInfo::Create(this, zone());
}
return scope_info_;
}
@@ -811,7 +828,7 @@ void Scope::Print(int n) {
// Function name, if any (named function literals, only).
if (function_ != NULL) {
Indent(n1, "// (local) function name: ");
- PrintName(function_->name());
+ PrintName(function_->proxy()->name());
PrintF("\n");
}
@@ -844,7 +861,7 @@ void Scope::Print(int n) {
// Print locals.
Indent(n1, "// function var\n");
if (function_ != NULL) {
- PrintVar(n1, function_->var());
+ PrintVar(n1, function_->proxy()->var());
}
Indent(n1, "// temporary vars\n");
@@ -876,7 +893,7 @@ void Scope::Print(int n) {
Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
- if (dynamics_ == NULL) dynamics_ = new DynamicScopePart();
+ if (dynamics_ == NULL) dynamics_ = new(zone()) DynamicScopePart(zone());
VariableMap* map = dynamics_->GetMap(mode);
Variable* var = map->Lookup(name);
if (var == NULL) {
@@ -969,10 +986,14 @@ bool Scope::ResolveVariable(CompilationInfo* info,
break;
case BOUND_EVAL_SHADOWED:
- // We found a variable variable binding that might be shadowed
- // by 'eval' introduced variable bindings.
+ // We either found a variable binding that might be shadowed by eval or
+ // gave up on it (e.g. by encountering a local with the same in the outer
+ // scope which was not promoted to a context, this can happen if we use
+ // debugger to evaluate arbitrary expressions at a break point).
if (var->is_global()) {
var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
+ } else if (var->is_dynamic()) {
+ var = NonLocal(proxy->name(), DYNAMIC);
} else {
Variable* invalidated = var;
var = NonLocal(proxy->name(), DYNAMIC_LOCAL);
@@ -1006,7 +1027,7 @@ bool Scope::ResolveVariable(CompilationInfo* info,
if (FLAG_print_interface_details)
PrintF("# Resolve %s:\n", var->name()->ToAsciiArray());
#endif
- proxy->interface()->Unify(var->interface(), &ok);
+ proxy->interface()->Unify(var->interface(), zone(), &ok);
if (!ok) {
#ifdef DEBUG
if (FLAG_print_interfaces) {
@@ -1106,7 +1127,7 @@ bool Scope::MustAllocateInContext(Variable* var) {
// Exceptions: temporary variables are never allocated in a context;
// catch-bound variables are always allocated in a context.
if (var->mode() == TEMPORARY) return false;
- if (is_catch_scope() || is_block_scope()) return true;
+ if (is_catch_scope() || is_block_scope() || is_module_scope()) return true;
return var->has_forced_context_allocation() ||
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
@@ -1224,7 +1245,7 @@ void Scope::AllocateNonParameterLocals() {
// because of the current ScopeInfo implementation (see
// ScopeInfo::ScopeInfo(FunctionScope* scope) constructor).
if (function_ != NULL) {
- AllocateNonParameterLocal(function_->var());
+ AllocateNonParameterLocal(function_->proxy()->var());
}
}
@@ -1250,7 +1271,8 @@ void Scope::AllocateVariablesRecursively() {
// Force allocation of a context for this scope if necessary. For a 'with'
// scope and for a function scope that makes an 'eval' call we need a context,
// even if no local variables were statically allocated in the scope.
- bool must_have_context = is_with_scope() ||
+ // Likewise for modules.
+ bool must_have_context = is_with_scope() || is_module_scope() ||
(is_function_scope() && calls_eval());
// If we didn't allocate any locals in the local context, then we only
@@ -1266,14 +1288,14 @@ void Scope::AllocateVariablesRecursively() {
int Scope::StackLocalCount() const {
return num_stack_slots() -
- (function_ != NULL && function_->var()->IsStackLocal() ? 1 : 0);
+ (function_ != NULL && function_->proxy()->var()->IsStackLocal() ? 1 : 0);
}
int Scope::ContextLocalCount() const {
if (num_heap_slots() == 0) return 0;
return num_heap_slots() - Context::MIN_CONTEXT_SLOTS -
- (function_ != NULL && function_->var()->IsContextSlot() ? 1 : 0);
+ (function_ != NULL && function_->proxy()->var()->IsContextSlot() ? 1 : 0);
}
} } // namespace v8::internal
diff --git a/deps/v8/src/scopes.h b/deps/v8/src/scopes.h
index 174dbdbf04..decd74d232 100644
--- a/deps/v8/src/scopes.h
+++ b/deps/v8/src/scopes.h
@@ -40,7 +40,7 @@ class CompilationInfo;
// A hash map to support fast variable declaration and lookup.
class VariableMap: public ZoneHashMap {
public:
- VariableMap();
+ explicit VariableMap(Zone* zone);
virtual ~VariableMap();
@@ -53,6 +53,11 @@ class VariableMap: public ZoneHashMap {
Interface* interface = Interface::NewValue());
Variable* Lookup(Handle<String> name);
+
+ Zone* zone() const { return zone_; }
+
+ private:
+ Zone* zone_;
};
@@ -62,14 +67,19 @@ class VariableMap: public ZoneHashMap {
// and setup time for scopes that don't need them.
class DynamicScopePart : public ZoneObject {
public:
+ explicit DynamicScopePart(Zone* zone) {
+ for (int i = 0; i < 3; i++)
+ maps_[i] = new(zone->New(sizeof(VariableMap))) VariableMap(zone);
+ }
+
VariableMap* GetMap(VariableMode mode) {
int index = mode - DYNAMIC;
ASSERT(index >= 0 && index < 3);
- return &maps_[index];
+ return maps_[index];
}
private:
- VariableMap maps_[3];
+ VariableMap *maps_[3];
};
@@ -87,14 +97,15 @@ class Scope: public ZoneObject {
// ---------------------------------------------------------------------------
// Construction
- Scope(Scope* outer_scope, ScopeType type);
+ Scope(Scope* outer_scope, ScopeType type, Zone* zone);
// Compute top scope and allocate variables. For lazy compilation the top
// scope only contains the single lazily compiled function, so this
// doesn't re-allocate variables repeatedly.
static bool Analyze(CompilationInfo* info);
- static Scope* DeserializeScopeChain(Context* context, Scope* global_scope);
+ static Scope* DeserializeScopeChain(Context* context, Scope* global_scope,
+ Zone* zone);
// The scope name is only used for printing/debugging.
void SetScopeName(Handle<String> scope_name) { scope_name_ = scope_name; }
@@ -106,6 +117,8 @@ class Scope: public ZoneObject {
// tree and its children are reparented.
Scope* FinalizeBlockScope();
+ Zone* zone() const { return zone_; }
+
// ---------------------------------------------------------------------------
// Declarations
@@ -126,15 +139,9 @@ class Scope: public ZoneObject {
// Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable.
- template<class Visitor>
- Variable* DeclareFunctionVar(Handle<String> name,
- VariableMode mode,
- AstNodeFactory<Visitor>* factory) {
- ASSERT(is_function_scope() && function_ == NULL);
- Variable* function_var = new Variable(
- this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
- function_ = factory->NewVariableProxy(function_var);
- return function_var;
+ void DeclareFunctionVar(VariableDeclaration* declaration) {
+ ASSERT(is_function_scope());
+ function_ = declaration;
}
// Declare a parameter in this scope. When there are duplicated
@@ -167,7 +174,7 @@ class Scope: public ZoneObject {
ASSERT(!already_resolved());
VariableProxy* proxy =
factory->NewVariableProxy(name, false, position, interface);
- unresolved_.Add(proxy);
+ unresolved_.Add(proxy, zone_);
return proxy;
}
@@ -312,9 +319,8 @@ class Scope: public ZoneObject {
Variable* receiver() { return receiver_; }
// The variable holding the function literal for named function
- // literals, or NULL.
- // Only valid for function scopes.
- VariableProxy* function() const {
+ // literals, or NULL. Only valid for function scopes.
+ VariableDeclaration* function() const {
ASSERT(is_function_scope());
return function_;
}
@@ -449,7 +455,7 @@ class Scope: public ZoneObject {
// Convenience variable.
Variable* receiver_;
// Function variable, if any; function scopes only.
- VariableProxy* function_;
+ VariableDeclaration* function_;
// Convenience variable; function scopes only.
Variable* arguments_;
// Interface; module scopes only.
@@ -588,14 +594,15 @@ class Scope: public ZoneObject {
private:
// Construct a scope based on the scope info.
- Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info);
+ Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info,
+ Zone* zone);
// Construct a catch scope with a binding for the name.
- Scope(Scope* inner_scope, Handle<String> catch_variable_name);
+ Scope(Scope* inner_scope, Handle<String> catch_variable_name, Zone* zone);
void AddInnerScope(Scope* inner_scope) {
if (inner_scope != NULL) {
- inner_scopes_.Add(inner_scope);
+ inner_scopes_.Add(inner_scope, zone_);
inner_scope->outer_scope_ = this;
}
}
@@ -603,6 +610,8 @@ class Scope: public ZoneObject {
void SetDefaults(ScopeType type,
Scope* outer_scope,
Handle<ScopeInfo> scope_info);
+
+ Zone* zone_;
};
} } // namespace v8::internal
diff --git a/deps/v8/src/serialize.cc b/deps/v8/src/serialize.cc
index 01d5f1c7bc..cf8e5e18e6 100644
--- a/deps/v8/src/serialize.cc
+++ b/deps/v8/src/serialize.cc
@@ -244,7 +244,7 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
"Isolate::" #hacker_name "_address",
FOR_EACH_ISOLATE_ADDRESS_NAME(BUILD_NAME_LITERAL)
NULL
-#undef C
+#undef BUILD_NAME_LITERAL
};
for (uint16_t i = 0; i < Isolate::kIsolateAddressCount; ++i) {
diff --git a/deps/v8/src/small-pointer-list.h b/deps/v8/src/small-pointer-list.h
index 6c5ce890d2..295a06f26a 100644
--- a/deps/v8/src/small-pointer-list.h
+++ b/deps/v8/src/small-pointer-list.h
@@ -44,22 +44,22 @@ class SmallPointerList {
public:
SmallPointerList() : data_(kEmptyTag) {}
- explicit SmallPointerList(int capacity) : data_(kEmptyTag) {
- Reserve(capacity);
+ SmallPointerList(int capacity, Zone* zone) : data_(kEmptyTag) {
+ Reserve(capacity, zone);
}
- void Reserve(int capacity) {
+ void Reserve(int capacity, Zone* zone) {
if (capacity < 2) return;
if ((data_ & kTagMask) == kListTag) {
if (list()->capacity() >= capacity) return;
int old_length = list()->length();
- list()->AddBlock(NULL, capacity - list()->capacity());
+ list()->AddBlock(NULL, capacity - list()->capacity(), zone);
list()->Rewind(old_length);
return;
}
- PointerList* list = new PointerList(capacity);
+ PointerList* list = new(zone) PointerList(capacity, zone);
if ((data_ & kTagMask) == kSingletonTag) {
- list->Add(single_value());
+ list->Add(single_value(), zone);
}
ASSERT(IsAligned(reinterpret_cast<intptr_t>(list), kPointerAlignment));
data_ = reinterpret_cast<intptr_t>(list) | kListTag;
@@ -69,6 +69,12 @@ class SmallPointerList {
data_ = kEmptyTag;
}
+ void Sort() {
+ if ((data_ & kTagMask) == kListTag) {
+ list()->Sort(compare_value);
+ }
+ }
+
bool is_empty() const { return length() == 0; }
int length() const {
@@ -77,21 +83,21 @@ class SmallPointerList {
return list()->length();
}
- void Add(T* pointer) {
+ void Add(T* pointer, Zone* zone) {
ASSERT(IsAligned(reinterpret_cast<intptr_t>(pointer), kPointerAlignment));
if ((data_ & kTagMask) == kEmptyTag) {
data_ = reinterpret_cast<intptr_t>(pointer) | kSingletonTag;
return;
}
if ((data_ & kTagMask) == kSingletonTag) {
- PointerList* list = new PointerList(2);
- list->Add(single_value());
- list->Add(pointer);
+ PointerList* list = new(zone) PointerList(2, zone);
+ list->Add(single_value(), zone);
+ list->Add(pointer, zone);
ASSERT(IsAligned(reinterpret_cast<intptr_t>(list), kPointerAlignment));
data_ = reinterpret_cast<intptr_t>(list) | kListTag;
return;
}
- list()->Add(pointer);
+ list()->Add(pointer, zone);
}
// Note: returns T* and not T*& (unlike List from list.h).
@@ -159,6 +165,10 @@ class SmallPointerList {
private:
typedef ZoneList<T*> PointerList;
+ static int compare_value(T* const* a, T* const* b) {
+ return Compare<T>(**a, **b);
+ }
+
static const intptr_t kEmptyTag = 1;
static const intptr_t kSingletonTag = 0;
static const intptr_t kListTag = 2;
diff --git a/deps/v8/src/spaces-inl.h b/deps/v8/src/spaces-inl.h
index 3709009c9b..ed78fc7a15 100644
--- a/deps/v8/src/spaces-inl.h
+++ b/deps/v8/src/spaces-inl.h
@@ -164,7 +164,7 @@ Page* Page::Initialize(Heap* heap,
Executability executable,
PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk);
- ASSERT(chunk->size() == static_cast<size_t>(kPageSize));
+ ASSERT(chunk->size() <= static_cast<size_t>(kPageSize));
ASSERT(chunk->owner() == owner);
owner->IncreaseCapacity(page->area_size());
owner->Free(page->area_start(), page->area_size());
@@ -295,11 +295,27 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) {
Address old_top = allocation_info_.top;
+#ifdef DEBUG
+ // If we are stressing compaction we waste some memory in new space
+ // in order to get more frequent GCs.
+ if (FLAG_stress_compaction && !HEAP->linear_allocation()) {
+ if (allocation_info_.limit - old_top >= size_in_bytes * 4) {
+ int filler_size = size_in_bytes * 4;
+ for (int i = 0; i < filler_size; i += kPointerSize) {
+ *(reinterpret_cast<Object**>(old_top + i)) =
+ HEAP->one_pointer_filler_map();
+ }
+ old_top += filler_size;
+ allocation_info_.top += filler_size;
+ }
+ }
+#endif
+
if (allocation_info_.limit - old_top < size_in_bytes) {
return SlowAllocateRaw(size_in_bytes);
}
- Object* obj = HeapObject::FromAddress(allocation_info_.top);
+ Object* obj = HeapObject::FromAddress(old_top);
allocation_info_.top += size_in_bytes;
ASSERT_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_);
diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc
index 929a45fb21..a0c8f2cba1 100644
--- a/deps/v8/src/spaces.cc
+++ b/deps/v8/src/spaces.cc
@@ -572,11 +572,10 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
}
-Page* MemoryAllocator::AllocatePage(PagedSpace* owner,
+Page* MemoryAllocator::AllocatePage(intptr_t size,
+ PagedSpace* owner,
Executability executable) {
- MemoryChunk* chunk = AllocateChunk(owner->AreaSize(),
- executable,
- owner);
+ MemoryChunk* chunk = AllocateChunk(size, executable, owner);
if (chunk == NULL) return NULL;
@@ -585,8 +584,8 @@ Page* MemoryAllocator::AllocatePage(PagedSpace* owner,
LargePage* MemoryAllocator::AllocateLargePage(intptr_t object_size,
- Executability executable,
- Space* owner) {
+ Space* owner,
+ Executability executable) {
MemoryChunk* chunk = AllocateChunk(object_size, executable, owner);
if (chunk == NULL) return NULL;
return LargePage::Initialize(isolate_->heap(), chunk);
@@ -840,7 +839,6 @@ MaybeObject* PagedSpace::FindObject(Address addr) {
bool PagedSpace::CanExpand() {
ASSERT(max_capacity_ % AreaSize() == 0);
- ASSERT(Capacity() % AreaSize() == 0);
if (Capacity() == max_capacity_) return false;
@@ -855,8 +853,14 @@ bool PagedSpace::CanExpand() {
bool PagedSpace::Expand() {
if (!CanExpand()) return false;
- Page* p = heap()->isolate()->memory_allocator()->
- AllocatePage(this, executable());
+ intptr_t size = AreaSize();
+
+ if (anchor_.next_page() == &anchor_) {
+ size = SizeOfFirstPage();
+ }
+
+ Page* p = heap()->isolate()->memory_allocator()->AllocatePage(
+ size, this, executable());
if (p == NULL) return false;
ASSERT(Capacity() <= max_capacity_);
@@ -867,6 +871,38 @@ bool PagedSpace::Expand() {
}
+intptr_t PagedSpace::SizeOfFirstPage() {
+ int size = 0;
+ switch (identity()) {
+ case OLD_POINTER_SPACE:
+ size = 64 * kPointerSize * KB;
+ break;
+ case OLD_DATA_SPACE:
+ size = 192 * KB;
+ break;
+ case MAP_SPACE:
+ size = 128 * KB;
+ break;
+ case CELL_SPACE:
+ size = 96 * KB;
+ break;
+ case CODE_SPACE:
+ if (kPointerSize == 8) {
+ // On x64 we allocate code pages in a special way (from the reserved
+ // 2Byte area). That part of the code is not yet upgraded to handle
+ // small pages.
+ size = AreaSize();
+ } else {
+ size = 384 * KB;
+ }
+ break;
+ default:
+ UNREACHABLE();
+ }
+ return Min(size, AreaSize());
+}
+
+
int PagedSpace::CountTotalPages() {
PageIterator it(this);
int count = 0;
@@ -910,7 +946,6 @@ void PagedSpace::ReleasePage(Page* page) {
}
ASSERT(Capacity() > 0);
- ASSERT(Capacity() % AreaSize() == 0);
accounting_stats_.ShrinkSpace(AreaSize());
}
@@ -1049,6 +1084,7 @@ bool NewSpace::SetUp(int reserved_semispace_capacity,
if (!to_space_.Commit()) {
return false;
}
+ ASSERT(!from_space_.is_committed()); // No need to use memory yet.
start_ = chunk_base_;
address_mask_ = ~(2 * reserved_semispace_capacity - 1);
@@ -2259,8 +2295,6 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
first_unswept_page_ = p;
}
- heap()->LowerOldGenLimits(freed_bytes);
-
heap()->FreeQueuedChunks();
return IsSweepingComplete();
@@ -2590,7 +2624,7 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
}
LargePage* page = heap()->isolate()->memory_allocator()->
- AllocateLargePage(object_size, executable, this);
+ AllocateLargePage(object_size, this, executable);
if (page == NULL) return Failure::RetryAfterGC(identity());
ASSERT(page->area_size() >= object_size);
diff --git a/deps/v8/src/spaces.h b/deps/v8/src/spaces.h
index 9e74a88a33..b0ecc5d004 100644
--- a/deps/v8/src/spaces.h
+++ b/deps/v8/src/spaces.h
@@ -637,8 +637,10 @@ class MemoryChunk {
friend class MemoryAllocator;
};
+
STATIC_CHECK(sizeof(MemoryChunk) <= MemoryChunk::kHeaderSize);
+
// -----------------------------------------------------------------------------
// A page is a memory chunk of a size 1MB. Large object pages may be larger.
//
@@ -950,11 +952,11 @@ class MemoryAllocator {
void TearDown();
- Page* AllocatePage(PagedSpace* owner, Executability executable);
+ Page* AllocatePage(
+ intptr_t size, PagedSpace* owner, Executability executable);
- LargePage* AllocateLargePage(intptr_t object_size,
- Executability executable,
- Space* owner);
+ LargePage* AllocateLargePage(
+ intptr_t object_size, Space* owner, Executability executable);
void Free(MemoryChunk* chunk);
@@ -1520,6 +1522,10 @@ class PagedSpace : public Space {
return size_in_bytes - wasted;
}
+ void ResetFreeList() {
+ free_list_.Reset();
+ }
+
// Set space allocation info.
void SetTop(Address top, Address limit) {
ASSERT(top == limit ||
@@ -1627,6 +1633,8 @@ class PagedSpace : public Space {
// Maximum capacity of this space.
intptr_t max_capacity_;
+ intptr_t SizeOfFirstPage();
+
// Accounting information for this space.
AllocationStats accounting_stats_;
@@ -2369,11 +2377,6 @@ class FixedSpace : public PagedSpace {
// Prepares for a mark-compact GC.
virtual void PrepareForMarkCompact();
- protected:
- void ResetFreeList() {
- free_list_.Reset();
- }
-
private:
// The size of objects in this space.
int object_size_in_bytes_;
diff --git a/deps/v8/src/splay-tree-inl.h b/deps/v8/src/splay-tree-inl.h
index 4640ed5b08..4eca71d100 100644
--- a/deps/v8/src/splay-tree-inl.h
+++ b/deps/v8/src/splay-tree-inl.h
@@ -42,10 +42,11 @@ SplayTree<Config, Allocator>::~SplayTree() {
template<typename Config, class Allocator>
-bool SplayTree<Config, Allocator>::Insert(const Key& key, Locator* locator) {
+bool SplayTree<Config, Allocator>::Insert(const Key& key,
+ Locator* locator) {
if (is_empty()) {
// If the tree is empty, insert the new node.
- root_ = new Node(key, Config::NoValue());
+ root_ = new(allocator_) Node(key, Config::NoValue());
} else {
// Splay on the key to move the last node on the search path
// for the key to the root of the tree.
@@ -57,7 +58,7 @@ bool SplayTree<Config, Allocator>::Insert(const Key& key, Locator* locator) {
return false;
}
// Insert the new node.
- Node* node = new Node(key, Config::NoValue());
+ Node* node = new(allocator_) Node(key, Config::NoValue());
InsertInternal(cmp, node);
}
locator->bind(root_);
@@ -293,13 +294,13 @@ void SplayTree<Config, Allocator>::ForEach(Callback* callback) {
template <typename Config, class Allocator> template <class Callback>
void SplayTree<Config, Allocator>::ForEachNode(Callback* callback) {
// Pre-allocate some space for tiny trees.
- List<Node*, Allocator> nodes_to_visit(10);
- if (root_ != NULL) nodes_to_visit.Add(root_);
+ List<Node*, Allocator> nodes_to_visit(10, allocator_);
+ if (root_ != NULL) nodes_to_visit.Add(root_, allocator_);
int pos = 0;
while (pos < nodes_to_visit.length()) {
Node* node = nodes_to_visit[pos++];
- if (node->left() != NULL) nodes_to_visit.Add(node->left());
- if (node->right() != NULL) nodes_to_visit.Add(node->right());
+ if (node->left() != NULL) nodes_to_visit.Add(node->left(), allocator_);
+ if (node->right() != NULL) nodes_to_visit.Add(node->right(), allocator_);
callback->Call(node);
}
}
diff --git a/deps/v8/src/splay-tree.h b/deps/v8/src/splay-tree.h
index 72231e4d2a..388f9b5429 100644
--- a/deps/v8/src/splay-tree.h
+++ b/deps/v8/src/splay-tree.h
@@ -50,7 +50,7 @@ namespace internal {
// Forward defined as
// template <typename Config, class Allocator = FreeStoreAllocationPolicy>
// class SplayTree;
-template <typename Config, class Allocator>
+template <typename Config, class AllocationPolicy>
class SplayTree {
public:
typedef typename Config::Key Key;
@@ -58,13 +58,17 @@ class SplayTree {
class Locator;
- SplayTree() : root_(NULL) { }
+ SplayTree(AllocationPolicy allocator = AllocationPolicy())
+ : root_(NULL), allocator_(allocator) { }
~SplayTree();
- INLINE(void* operator new(size_t size)) {
- return Allocator::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size,
+ AllocationPolicy allocator = AllocationPolicy())) {
+ return allocator.New(static_cast<int>(size));
+ }
+ INLINE(void operator delete(void* p, size_t)) {
+ AllocationPolicy::Delete(p);
}
- INLINE(void operator delete(void* p, size_t)) { return Allocator::Delete(p); }
// Inserts the given key in this tree with the given value. Returns
// true if a node was inserted, otherwise false. If found the locator
@@ -112,11 +116,11 @@ class SplayTree {
left_(NULL),
right_(NULL) { }
- INLINE(void* operator new(size_t size)) {
- return Allocator::New(static_cast<int>(size));
+ INLINE(void* operator new(size_t size, AllocationPolicy allocator)) {
+ return allocator.New(static_cast<int>(size));
}
INLINE(void operator delete(void* p, size_t)) {
- return Allocator::Delete(p);
+ return AllocationPolicy::Delete(p);
}
Key key() { return key_; }
@@ -184,7 +188,7 @@ class SplayTree {
class NodeDeleter BASE_EMBEDDED {
public:
NodeDeleter() { }
- void Call(Node* node) { delete node; }
+ void Call(Node* node) { AllocationPolicy::Delete(node); }
private:
DISALLOW_COPY_AND_ASSIGN(NodeDeleter);
@@ -194,6 +198,7 @@ class SplayTree {
void ForEachNode(Callback* callback);
Node* root_;
+ AllocationPolicy allocator_;
DISALLOW_COPY_AND_ASSIGN(SplayTree);
};
diff --git a/deps/v8/src/string-stream.cc b/deps/v8/src/string-stream.cc
index 35f7be5416..270fe5a40d 100644
--- a/deps/v8/src/string-stream.cc
+++ b/deps/v8/src/string-stream.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -291,7 +291,7 @@ void StringStream::ClearMentionedObjectCache() {
isolate->set_string_stream_current_security_token(NULL);
if (isolate->string_stream_debug_object_cache() == NULL) {
isolate->set_string_stream_debug_object_cache(
- new List<HeapObject*, PreallocatedStorage>(0));
+ new List<HeapObject*, PreallocatedStorageAllocationPolicy>(0));
}
isolate->string_stream_debug_object_cache()->Clear();
}
@@ -427,7 +427,7 @@ void StringStream::PrintMentionedObjectCache() {
PrintUsingMap(JSObject::cast(printee));
if (printee->IsJSArray()) {
JSArray* array = JSArray::cast(printee);
- if (array->HasFastElements()) {
+ if (array->HasFastObjectElements()) {
unsigned int limit = FixedArray::cast(array->elements())->length();
unsigned int length =
static_cast<uint32_t>(JSArray::cast(array)->length()->Number());
diff --git a/deps/v8/src/string.js b/deps/v8/src/string.js
index 84dde3dc27..6115930b6c 100644
--- a/deps/v8/src/string.js
+++ b/deps/v8/src/string.js
@@ -189,7 +189,9 @@ function StringMatch(regexp) {
if (!regexp.global) return RegExpExecNoTests(regexp, subject, 0);
%_Log('regexp', 'regexp-match,%0S,%1r', [subject, regexp]);
// lastMatchInfo is defined in regexp.js.
- return %StringMatch(subject, regexp, lastMatchInfo);
+ var result = %StringMatch(subject, regexp, lastMatchInfo);
+ if (result !== null) lastMatchInfoOverride = null;
+ return result;
}
// Non-regexp argument.
regexp = new $RegExp(regexp);
@@ -235,10 +237,28 @@ function StringReplace(search, replace) {
replace);
}
} else {
- return %StringReplaceRegExpWithString(subject,
- search,
- TO_STRING_INLINE(replace),
- lastMatchInfo);
+ if (lastMatchInfoOverride == null) {
+ return %StringReplaceRegExpWithString(subject,
+ search,
+ TO_STRING_INLINE(replace),
+ lastMatchInfo);
+ } else {
+ // We use this hack to detect whether StringReplaceRegExpWithString
+ // found at least one hit. In that case we need to remove any
+ // override.
+ var saved_subject = lastMatchInfo[LAST_SUBJECT_INDEX];
+ lastMatchInfo[LAST_SUBJECT_INDEX] = 0;
+ var answer = %StringReplaceRegExpWithString(subject,
+ search,
+ TO_STRING_INLINE(replace),
+ lastMatchInfo);
+ if (%_IsSmi(lastMatchInfo[LAST_SUBJECT_INDEX])) {
+ lastMatchInfo[LAST_SUBJECT_INDEX] = saved_subject;
+ } else {
+ lastMatchInfoOverride = null;
+ }
+ return answer;
+ }
}
}
@@ -257,47 +277,34 @@ function StringReplace(search, replace) {
if (start < 0) return subject;
var end = start + search.length;
- var builder = new ReplaceResultBuilder(subject);
- // prefix
- builder.addSpecialSlice(0, start);
+ var result = SubString(subject, 0, start);
// Compute the string to replace with.
if (IS_SPEC_FUNCTION(replace)) {
var receiver = %GetDefaultReceiver(replace);
- builder.add(%_CallFunction(receiver,
- search,
- start,
- subject,
- replace));
+ result += %_CallFunction(receiver, search, start, subject, replace);
} else {
reusableMatchInfo[CAPTURE0] = start;
reusableMatchInfo[CAPTURE1] = end;
replace = TO_STRING_INLINE(replace);
- ExpandReplacement(replace, subject, reusableMatchInfo, builder);
+ result = ExpandReplacement(replace, subject, reusableMatchInfo, result);
}
- // suffix
- builder.addSpecialSlice(end, subject.length);
-
- return builder.generate();
+ return result + SubString(subject, end, subject.length);
}
// Expand the $-expressions in the string and return a new string with
// the result.
-function ExpandReplacement(string, subject, matchInfo, builder) {
+function ExpandReplacement(string, subject, matchInfo, result) {
var length = string.length;
- var builder_elements = builder.elements;
var next = %StringIndexOf(string, '$', 0);
if (next < 0) {
- if (length > 0) builder_elements.push(string);
- return;
+ if (length > 0) result += string;
+ return result;
}
- // Compute the number of captures; see ECMA-262, 15.5.4.11, p. 102.
- var m = NUMBER_OF_CAPTURES(matchInfo) >> 1; // Includes the match.
-
- if (next > 0) builder_elements.push(SubString(string, 0, next));
+ if (next > 0) result += SubString(string, 0, next);
while (true) {
var expansion = '$';
@@ -306,51 +313,21 @@ function ExpandReplacement(string, subject, matchInfo, builder) {
var peek = %_StringCharCodeAt(string, position);
if (peek == 36) { // $$
++position;
- builder_elements.push('$');
+ result += '$';
} else if (peek == 38) { // $& - match
++position;
- builder.addSpecialSlice(matchInfo[CAPTURE0],
- matchInfo[CAPTURE1]);
+ result += SubString(subject, matchInfo[CAPTURE0], matchInfo[CAPTURE1]);
} else if (peek == 96) { // $` - prefix
++position;
- builder.addSpecialSlice(0, matchInfo[CAPTURE0]);
+ result += SubString(subject, 0, matchInfo[CAPTURE0]);
} else if (peek == 39) { // $' - suffix
++position;
- builder.addSpecialSlice(matchInfo[CAPTURE1], subject.length);
- } else if (peek >= 48 && peek <= 57) { // $n, 0 <= n <= 9
- ++position;
- var n = peek - 48;
- if (position < length) {
- peek = %_StringCharCodeAt(string, position);
- // $nn, 01 <= nn <= 99
- if (n != 0 && peek == 48 || peek >= 49 && peek <= 57) {
- var nn = n * 10 + (peek - 48);
- if (nn < m) {
- // If the two digit capture reference is within range of
- // the captures, we use it instead of the single digit
- // one. Otherwise, we fall back to using the single
- // digit reference. This matches the behavior of
- // SpiderMonkey.
- ++position;
- n = nn;
- }
- }
- }
- if (0 < n && n < m) {
- addCaptureString(builder, matchInfo, n);
- } else {
- // Because of the captures range check in the parsing of two
- // digit capture references, we can only enter here when a
- // single digit capture reference is outside the range of
- // captures.
- builder_elements.push('$');
- --position;
- }
+ result += SubString(subject, matchInfo[CAPTURE1], subject.length);
} else {
- builder_elements.push('$');
+ result += '$';
}
} else {
- builder_elements.push('$');
+ result += '$';
}
// Go the the next $ in the string.
@@ -360,16 +337,17 @@ function ExpandReplacement(string, subject, matchInfo, builder) {
// haven't reached the end, we need to append the suffix.
if (next < 0) {
if (position < length) {
- builder_elements.push(SubString(string, position, length));
+ result += SubString(string, position, length);
}
- return;
+ return result;
}
// Append substring between the previous and the next $ character.
if (next > position) {
- builder_elements.push(SubString(string, position, next));
+ result += SubString(string, position, next);
}
}
+ return result;
}
@@ -386,18 +364,6 @@ function CaptureString(string, lastCaptureInfo, index) {
}
-// Add the string of a given regular expression capture to the
-// ReplaceResultBuilder
-function addCaptureString(builder, matchInfo, index) {
- // Scale the index.
- var scaled = index << 1;
- // Compute start and end.
- var start = matchInfo[CAPTURE(scaled)];
- if (start < 0) return;
- var end = matchInfo[CAPTURE(scaled + 1)];
- builder.addSpecialSlice(start, end);
-}
-
// TODO(lrn): This array will survive indefinitely if replace is never
// called again. However, it will be empty, since the contents are cleared
// in the finally block.
@@ -427,14 +393,22 @@ function StringReplaceGlobalRegExpWithFunction(subject, regexp, replace) {
return subject;
}
var len = res.length;
- var i = 0;
if (NUMBER_OF_CAPTURES(lastMatchInfo) == 2) {
+ // If the number of captures is two then there are no explicit captures in
+ // the regexp, just the implicit capture that captures the whole match. In
+ // this case we can simplify quite a bit and end up with something faster.
+ // The builder will consist of some integers that indicate slices of the
+ // input string and some replacements that were returned from the replace
+ // function.
var match_start = 0;
var override = new InternalArray(null, 0, subject);
var receiver = %GetDefaultReceiver(replace);
- while (i < len) {
+ for (var i = 0; i < len; i++) {
var elem = res[i];
if (%_IsSmi(elem)) {
+ // Integers represent slices of the original string. Use these to
+ // get the offsets we need for the override array (so things like
+ // RegExp.leftContext work during the callback function.
if (elem > 0) {
match_start = (elem >> 11) + (elem & 0x7ff);
} else {
@@ -446,23 +420,25 @@ function StringReplaceGlobalRegExpWithFunction(subject, regexp, replace) {
lastMatchInfoOverride = override;
var func_result =
%_CallFunction(receiver, elem, match_start, subject, replace);
+ // Overwrite the i'th element in the results with the string we got
+ // back from the callback function.
res[i] = TO_STRING_INLINE(func_result);
match_start += elem.length;
}
- i++;
}
} else {
var receiver = %GetDefaultReceiver(replace);
- while (i < len) {
+ for (var i = 0; i < len; i++) {
var elem = res[i];
if (!%_IsSmi(elem)) {
// elem must be an Array.
// Use the apply argument as backing for global RegExp properties.
lastMatchInfoOverride = elem;
var func_result = %Apply(replace, receiver, elem, 0, elem.length);
+ // Overwrite the i'th element in the results with the string we got
+ // back from the callback function.
res[i] = TO_STRING_INLINE(func_result);
}
- i++;
}
}
var resultBuilder = new ReplaceResultBuilder(subject, res);
@@ -476,9 +452,8 @@ function StringReplaceGlobalRegExpWithFunction(subject, regexp, replace) {
function StringReplaceNonGlobalRegExpWithFunction(subject, regexp, replace) {
var matchInfo = DoRegExpExec(regexp, subject, 0);
if (IS_NULL(matchInfo)) return subject;
- var result = new ReplaceResultBuilder(subject);
var index = matchInfo[CAPTURE0];
- result.addSpecialSlice(0, index);
+ var result = SubString(subject, 0, index);
var endOfMatch = matchInfo[CAPTURE1];
// Compute the parameter list consisting of the match, captures, index,
// and subject for the replace function invocation.
@@ -490,8 +465,7 @@ function StringReplaceNonGlobalRegExpWithFunction(subject, regexp, replace) {
// No captures, only the match, which is always valid.
var s = SubString(subject, index, endOfMatch);
// Don't call directly to avoid exposing the built-in global object.
- replacement =
- %_CallFunction(receiver, s, index, subject, replace);
+ replacement = %_CallFunction(receiver, s, index, subject, replace);
} else {
var parameters = new InternalArray(m + 2);
for (var j = 0; j < m; j++) {
@@ -503,11 +477,10 @@ function StringReplaceNonGlobalRegExpWithFunction(subject, regexp, replace) {
replacement = %Apply(replace, receiver, parameters, 0, j + 2);
}
- result.add(replacement); // The add method converts to string if necessary.
+ result += replacement; // The add method converts to string if necessary.
// Can't use matchInfo any more from here, since the function could
// overwrite it.
- result.addSpecialSlice(endOfMatch, subject.length);
- return result.generate();
+ return result + SubString(subject, endOfMatch, subject.length);
}
diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc
index 3371b1bf4a..27948919db 100644
--- a/deps/v8/src/stub-cache.cc
+++ b/deps/v8/src/stub-cache.cc
@@ -43,7 +43,8 @@ namespace internal {
// StubCache implementation.
-StubCache::StubCache(Isolate* isolate) : isolate_(isolate) {
+StubCache::StubCache(Isolate* isolate, Zone* zone)
+ : isolate_(isolate), zone_(zone) {
ASSERT(isolate == Isolate::Current());
}
@@ -171,6 +172,25 @@ Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name,
}
+Handle<Code> StubCache::ComputeLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
+ Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS);
+ Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+ if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+ LoadStubCompiler compiler(isolate_);
+ Handle<Code> code =
+ compiler.CompileLoadViaGetter(name, receiver, holder, getter);
+ PROFILE(isolate_, CodeCreateEvent(Logger::LOAD_IC_TAG, *code, *name));
+ GDBJIT(AddCode(GDBJITInterface::LOAD_IC, *name, *code));
+ JSObject::UpdateMapCodeCache(receiver, name, code);
+ return code;
+}
+
+
Handle<Code> StubCache::ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
@@ -384,7 +404,7 @@ Handle<Code> StubCache::ComputeStoreField(Handle<String> name,
Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
- Handle<JSObject> receiver,
+ Handle<Map> receiver_map,
KeyedIC::StubKind stub_kind,
StrictModeFlag strict_mode) {
KeyedAccessGrowMode grow_mode =
@@ -412,7 +432,6 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
UNREACHABLE();
break;
}
- Handle<Map> receiver_map(receiver->map());
Handle<Object> probe(receiver_map->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
@@ -447,7 +466,7 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
} else {
PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, 0));
}
- JSObject::UpdateMapCodeCache(receiver, name, code);
+ Map::UpdateCodeCache(receiver_map, name, code);
return code;
}
@@ -496,6 +515,24 @@ Handle<Code> StubCache::ComputeStoreCallback(Handle<String> name,
}
+Handle<Code> StubCache::ComputeStoreViaSetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ StrictModeFlag strict_mode) {
+ Code::Flags flags = Code::ComputeMonomorphicFlags(
+ Code::STORE_IC, CALLBACKS, strict_mode);
+ Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
+ if (probe->IsCode()) return Handle<Code>::cast(probe);
+
+ StoreStubCompiler compiler(isolate_, strict_mode);
+ Handle<Code> code = compiler.CompileStoreViaSetter(receiver, setter, name);
+ PROFILE(isolate_, CodeCreateEvent(Logger::STORE_IC_TAG, *code, *name));
+ GDBJIT(AddCode(GDBJITInterface::STORE_IC, *name, *code));
+ JSObject::UpdateMapCodeCache(receiver, name, code);
+ return code;
+}
+
+
Handle<Code> StubCache::ComputeStoreInterceptor(Handle<String> name,
Handle<JSObject> receiver,
StrictModeFlag strict_mode) {
@@ -901,7 +938,7 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
int offset = PrimaryOffset(name, flags, map);
if (entry(primary_, offset) == &primary_[i] &&
!TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
- types->Add(Handle<Map>(map));
+ types->Add(Handle<Map>(map), zone());
}
}
}
@@ -925,7 +962,7 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
int offset = SecondaryOffset(name, flags, primary_offset);
if (entry(secondary_, offset) == &secondary_[i] &&
!TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
- types->Add(Handle<Map>(map));
+ types->Add(Handle<Map>(map), zone());
}
}
}
@@ -939,10 +976,12 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
RUNTIME_FUNCTION(MaybeObject*, LoadCallbackProperty) {
ASSERT(args[0]->IsJSObject());
ASSERT(args[1]->IsJSObject());
- AccessorInfo* callback = AccessorInfo::cast(args[3]);
+ ASSERT(args[3]->IsSmi());
+ AccessorInfo* callback = AccessorInfo::cast(args[4]);
Address getter_address = v8::ToCData<Address>(callback->getter());
v8::AccessorGetter fun = FUNCTION_CAST<v8::AccessorGetter>(getter_address);
ASSERT(fun != NULL);
+ ASSERT(callback->IsCompatibleReceiver(args[0]));
v8::AccessorInfo info(&args[0]);
HandleScope scope(isolate);
v8::Handle<v8::Value> result;
@@ -950,7 +989,7 @@ RUNTIME_FUNCTION(MaybeObject*, LoadCallbackProperty) {
// Leaving JavaScript.
VMState state(isolate, EXTERNAL);
ExternalCallbackScope call_scope(isolate, getter_address);
- result = fun(v8::Utils::ToLocal(args.at<String>(4)), info);
+ result = fun(v8::Utils::ToLocal(args.at<String>(5)), info);
}
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
if (result.IsEmpty()) return HEAP->undefined_value();
@@ -964,6 +1003,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty) {
Address setter_address = v8::ToCData<Address>(callback->setter());
v8::AccessorSetter fun = FUNCTION_CAST<v8::AccessorSetter>(setter_address);
ASSERT(fun != NULL);
+ ASSERT(callback->IsCompatibleReceiver(recv));
Handle<String> name = args.at<String>(2);
Handle<Object> value = args.at<Object>(3);
HandleScope scope(isolate);
@@ -997,7 +1037,8 @@ RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly) {
ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2);
ASSERT(args[2]->IsJSObject()); // Receiver.
ASSERT(args[3]->IsJSObject()); // Holder.
- ASSERT(args.length() == 5); // Last arg is data object.
+ ASSERT(args[5]->IsSmi()); // Isolate.
+ ASSERT(args.length() == 6);
Address getter_address = v8::ToCData<Address>(interceptor_info->getter());
v8::NamedPropertyGetter getter =
@@ -1050,7 +1091,7 @@ static MaybeObject* LoadWithInterceptor(Arguments* args,
ASSERT(kAccessorInfoOffsetInInterceptorArgs == 2);
Handle<JSObject> receiver_handle = args->at<JSObject>(2);
Handle<JSObject> holder_handle = args->at<JSObject>(3);
- ASSERT(args->length() == 5); // Last arg is data object.
+ ASSERT(args->length() == 6);
Isolate* isolate = receiver_handle->GetIsolate();
diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h
index 29bdb61e32..cd0414319e 100644
--- a/deps/v8/src/stub-cache.h
+++ b/deps/v8/src/stub-cache.h
@@ -90,6 +90,11 @@ class StubCache {
Handle<JSObject> holder,
Handle<AccessorInfo> callback);
+ Handle<Code> ComputeLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter);
+
Handle<Code> ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
@@ -157,6 +162,11 @@ class StubCache {
Handle<AccessorInfo> callback,
StrictModeFlag strict_mode);
+ Handle<Code> ComputeStoreViaSetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ StrictModeFlag strict_mode);
+
Handle<Code> ComputeStoreInterceptor(Handle<String> name,
Handle<JSObject> receiver,
StrictModeFlag strict_mode);
@@ -169,7 +179,7 @@ class StubCache {
Handle<Map> transition,
StrictModeFlag strict_mode);
- Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<JSObject> receiver,
+ Handle<Code> ComputeKeyedLoadOrStoreElement(Handle<Map> receiver_map,
KeyedIC::StubKind stub_kind,
StrictModeFlag strict_mode);
@@ -300,9 +310,10 @@ class StubCache {
Isolate* isolate() { return isolate_; }
Heap* heap() { return isolate()->heap(); }
Factory* factory() { return isolate()->factory(); }
+ Zone* zone() const { return zone_; }
private:
- explicit StubCache(Isolate* isolate);
+ StubCache(Isolate* isolate, Zone* zone);
Handle<Code> ComputeCallInitialize(int argc,
RelocInfo::Mode mode,
@@ -375,6 +386,7 @@ class StubCache {
Entry primary_[kPrimaryTableSize];
Entry secondary_[kSecondaryTableSize];
Isolate* isolate_;
+ Zone* zone_;
friend class Isolate;
friend class SCTableReference;
@@ -460,14 +472,16 @@ class StubCompiler BASE_EMBEDDED {
Register scratch2,
Label* miss_label);
- static void GenerateStoreField(MacroAssembler* masm,
- Handle<JSObject> object,
- int index,
- Handle<Map> transition,
- Register receiver_reg,
- Register name_reg,
- Register scratch,
- Label* miss_label);
+ void GenerateStoreField(MacroAssembler* masm,
+ Handle<JSObject> object,
+ int index,
+ Handle<Map> transition,
+ Handle<String> name,
+ Register receiver_reg,
+ Register name_reg,
+ Register scratch1,
+ Register scratch2,
+ Label* miss_label);
static void GenerateLoadMiss(MacroAssembler* masm,
Code::Kind kind);
@@ -511,6 +525,7 @@ class StubCompiler BASE_EMBEDDED {
int save_at_depth,
Label* miss);
+
protected:
Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<String> name);
@@ -593,6 +608,11 @@ class LoadStubCompiler: public StubCompiler {
Handle<JSObject> holder,
Handle<AccessorInfo> callback);
+ Handle<Code> CompileLoadViaGetter(Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter);
+
Handle<Code> CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
@@ -678,6 +698,10 @@ class StoreStubCompiler: public StubCompiler {
Handle<AccessorInfo> callback,
Handle<String> name);
+ Handle<Code> CompileStoreViaSetter(Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ Handle<String> name);
+
Handle<Code> CompileStoreInterceptor(Handle<JSObject> object,
Handle<String> name);
diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc
index 159be6a5a1..f5e9106742 100644
--- a/deps/v8/src/type-info.cc
+++ b/deps/v8/src/type-info.cc
@@ -61,9 +61,11 @@ TypeInfo TypeInfo::TypeFromValue(Handle<Object> value) {
TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
Handle<Context> global_context,
- Isolate* isolate) {
+ Isolate* isolate,
+ Zone* zone) {
global_context_ = global_context;
isolate_ = isolate;
+ zone_ = zone;
BuildDictionary(code);
ASSERT(reinterpret_cast<Address>(*dictionary_.location()) != kHandleZapValue);
}
@@ -501,10 +503,10 @@ void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
// we need a generic store (or load) here.
ASSERT(Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC);
} else if (object->IsMap()) {
- types->Add(Handle<Map>::cast(object));
+ types->Add(Handle<Map>::cast(object), zone());
} else if (FLAG_collect_megamorphic_maps_from_stub_cache &&
Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
- types->Reserve(4);
+ types->Reserve(4, zone());
ASSERT(object->IsCode());
isolate_->stub_cache()->CollectMatchingMaps(types,
*name,
@@ -548,11 +550,12 @@ bool TypeFeedbackOracle::CanRetainOtherContext(JSFunction* function,
}
-static void AddMapIfMissing(Handle<Map> map, SmallMapList* list) {
+static void AddMapIfMissing(Handle<Map> map, SmallMapList* list,
+ Zone* zone) {
for (int i = 0; i < list->length(); ++i) {
if (list->at(i).is_identical_to(map)) return;
}
- list->Add(map);
+ list->Add(map, zone);
}
@@ -571,7 +574,7 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
if (object->IsMap()) {
Map* map = Map::cast(object);
if (!CanRetainOtherContext(map, *global_context_)) {
- AddMapIfMissing(Handle<Map>(map), types);
+ AddMapIfMissing(Handle<Map>(map), types, zone());
}
}
}
@@ -591,7 +594,7 @@ byte TypeFeedbackOracle::ToBooleanTypes(unsigned ast_id) {
// infos before we process them.
void TypeFeedbackOracle::BuildDictionary(Handle<Code> code) {
AssertNoAllocation no_allocation;
- ZoneList<RelocInfo> infos(16);
+ ZoneList<RelocInfo> infos(16, zone());
HandleScope scope;
GetRelocInfos(code, &infos);
CreateDictionary(code, &infos);
@@ -606,7 +609,7 @@ void TypeFeedbackOracle::GetRelocInfos(Handle<Code> code,
ZoneList<RelocInfo>* infos) {
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID);
for (RelocIterator it(*code, mask); !it.done(); it.next()) {
- infos->Add(*it.rinfo());
+ infos->Add(*it.rinfo(), zone());
}
}
diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h
index d461331bec..74910cd1a7 100644
--- a/deps/v8/src/type-info.h
+++ b/deps/v8/src/type-info.h
@@ -236,7 +236,8 @@ class TypeFeedbackOracle BASE_EMBEDDED {
public:
TypeFeedbackOracle(Handle<Code> code,
Handle<Context> global_context,
- Isolate* isolate);
+ Isolate* isolate,
+ Zone* zone);
bool LoadIsMonomorphicNormal(Property* expr);
bool LoadIsUninitialized(Property* expr);
@@ -293,6 +294,8 @@ class TypeFeedbackOracle BASE_EMBEDDED {
TypeInfo SwitchType(CaseClause* clause);
TypeInfo IncrementType(CountOperation* expr);
+ Zone* zone() const { return zone_; }
+
private:
void CollectReceiverTypes(unsigned ast_id,
Handle<String> name,
@@ -317,6 +320,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
Handle<Context> global_context_;
Isolate* isolate_;
Handle<UnseededNumberDictionary> dictionary_;
+ Zone* zone_;
DISALLOW_COPY_AND_ASSIGN(TypeFeedbackOracle);
};
diff --git a/deps/v8/src/utils.cc b/deps/v8/src/utils.cc
index 89ef4c6e3e..7e8c088dd4 100644
--- a/deps/v8/src/utils.cc
+++ b/deps/v8/src/utils.cc
@@ -89,4 +89,19 @@ char* SimpleStringBuilder::Finalize() {
return buffer_.start();
}
+
+const DivMagicNumbers DivMagicNumberFor(int32_t divisor) {
+ switch (divisor) {
+ case 3: return DivMagicNumberFor3;
+ case 5: return DivMagicNumberFor5;
+ case 7: return DivMagicNumberFor7;
+ case 9: return DivMagicNumberFor9;
+ case 11: return DivMagicNumberFor11;
+ case 25: return DivMagicNumberFor25;
+ case 125: return DivMagicNumberFor125;
+ case 625: return DivMagicNumberFor625;
+ default: return InvalidDivMagicNumber;
+ }
+}
+
} } // namespace v8::internal
diff --git a/deps/v8/src/utils.h b/deps/v8/src/utils.h
index 1d40c98b9e..f116c14db3 100644
--- a/deps/v8/src/utils.h
+++ b/deps/v8/src/utils.h
@@ -85,6 +85,32 @@ inline int WhichPowerOf2(uint32_t x) {
}
+// Magic numbers for integer division.
+// These are kind of 2's complement reciprocal of the divisors.
+// Details and proofs can be found in:
+// - Hacker's Delight, Henry S. Warren, Jr.
+// - The PowerPC Compiler Writer’s Guide
+// and probably many others.
+// See details in the implementation of the algorithm in
+// lithium-codegen-arm.cc : LCodeGen::TryEmitSignedIntegerDivisionByConstant().
+struct DivMagicNumbers {
+ unsigned M;
+ unsigned s;
+};
+
+const DivMagicNumbers InvalidDivMagicNumber= {0, 0};
+const DivMagicNumbers DivMagicNumberFor3 = {0x55555556, 0};
+const DivMagicNumbers DivMagicNumberFor5 = {0x66666667, 1};
+const DivMagicNumbers DivMagicNumberFor7 = {0x92492493, 2};
+const DivMagicNumbers DivMagicNumberFor9 = {0x38e38e39, 1};
+const DivMagicNumbers DivMagicNumberFor11 = {0x2e8ba2e9, 1};
+const DivMagicNumbers DivMagicNumberFor25 = {0x51eb851f, 3};
+const DivMagicNumbers DivMagicNumberFor125 = {0x10624dd3, 3};
+const DivMagicNumbers DivMagicNumberFor625 = {0x68db8bad, 8};
+
+const DivMagicNumbers DivMagicNumberFor(int32_t divisor);
+
+
// The C++ standard leaves the semantics of '>>' undefined for
// negative signed operands. Most implementations do the right thing,
// though.
diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc
index 1157c9d388..2910a0700d 100644
--- a/deps/v8/src/v8.cc
+++ b/deps/v8/src/v8.cc
@@ -27,11 +27,13 @@
#include "v8.h"
+#include "assembler.h"
#include "isolate.h"
#include "elements.h"
#include "bootstrapper.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "frames.h"
#include "heap-profiler.h"
#include "hydrogen.h"
#include "lithium-allocator.h"
@@ -103,13 +105,21 @@ void V8::TearDown() {
ASSERT(isolate->IsDefaultIsolate());
if (!has_been_set_up_ || has_been_disposed_) return;
+
+ ElementsAccessor::TearDown();
+ LOperand::TearDownCaches();
+ RegisteredExtension::UnregisterAll();
+
isolate->TearDown();
+ delete isolate;
is_running_ = false;
has_been_disposed_ = true;
delete call_completed_callbacks_;
call_completed_callbacks_ = NULL;
+
+ OS::TearDown();
}
@@ -240,7 +250,6 @@ Object* V8::FillHeapNumberWithRandom(Object* heap_number,
}
void V8::InitializeOncePerProcessImpl() {
- // Set up the platform OS support.
OS::SetUp();
use_crankshaft_ = FLAG_crankshaft;
@@ -256,7 +265,7 @@ void V8::InitializeOncePerProcessImpl() {
OS::PostSetUp();
- RuntimeProfiler::GlobalSetup();
+ RuntimeProfiler::GlobalSetUp();
ElementsAccessor::InitializeOncePerProcess();
@@ -267,6 +276,9 @@ void V8::InitializeOncePerProcessImpl() {
}
LOperand::SetUpCaches();
+ SetUpJSCallerSavedCodeData();
+ SamplerRegistry::SetUp();
+ ExternalReference::SetUp();
}
void V8::InitializeOncePerProcess() {
diff --git a/deps/v8/src/v8.h b/deps/v8/src/v8.h
index 59ce602555..67716d8107 100644
--- a/deps/v8/src/v8.h
+++ b/deps/v8/src/v8.h
@@ -65,6 +65,7 @@
#include "log-inl.h"
#include "cpu-profiler-inl.h"
#include "handles-inl.h"
+#include "zone-inl.h"
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/v8globals.h b/deps/v8/src/v8globals.h
index bfc5e23390..6a1766a1a5 100644
--- a/deps/v8/src/v8globals.h
+++ b/deps/v8/src/v8globals.h
@@ -48,6 +48,10 @@ const intptr_t kObjectAlignmentMask = kObjectAlignment - 1;
const intptr_t kPointerAlignment = (1 << kPointerSizeLog2);
const intptr_t kPointerAlignmentMask = kPointerAlignment - 1;
+// Desired alignment for double values.
+const intptr_t kDoubleAlignment = 8;
+const intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1;
+
// Desired alignment for maps.
#if V8_HOST_ARCH_64_BIT
const intptr_t kMapAlignmentBits = kObjectAlignmentBits;
diff --git a/deps/v8/src/v8natives.js b/deps/v8/src/v8natives.js
index f1e8084a53..86f07a195a 100644
--- a/deps/v8/src/v8natives.js
+++ b/deps/v8/src/v8natives.js
@@ -337,7 +337,7 @@ function ObjectKeys(obj) {
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var names = CallTrap0(handler, "keys", DerivedKeysTrap);
- return ToStringArray(names);
+ return ToStringArray(names, "keys");
}
return %LocalKeys(obj);
}
@@ -963,7 +963,7 @@ function ToStringArray(obj, trap) {
var names = {}; // TODO(rossberg): use sets once they are ready.
for (var index = 0; index < n; index++) {
var s = ToString(obj[index]);
- if (s in names) {
+ if (%HasLocalProperty(names, s)) {
throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s]);
}
array[index] = s;
diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc
index 48ed355e66..06912b100b 100644
--- a/deps/v8/src/version.cc
+++ b/deps/v8/src/version.cc
@@ -33,9 +33,9 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
-#define MINOR_VERSION 9
-#define BUILD_NUMBER 24
-#define PATCH_LEVEL 31
+#define MINOR_VERSION 11
+#define BUILD_NUMBER 10
+#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/deps/v8/src/x64/assembler-x64.h b/deps/v8/src/x64/assembler-x64.h
index 60b29e6475..9f5f850294 100644
--- a/deps/v8/src/x64/assembler-x64.h
+++ b/deps/v8/src/x64/assembler-x64.h
@@ -629,7 +629,8 @@ class Assembler : public AssemblerBase {
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
-
+ static const byte kJnzShortOpcode = kJccShortPrefix | not_zero;
+ static const byte kJzShortOpcode = kJccShortPrefix | zero;
// ---------------------------------------------------------------------------
diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc
index 4e037ff465..0af0a43477 100644
--- a/deps/v8/src/x64/builtins-x64.cc
+++ b/deps/v8/src/x64/builtins-x64.cc
@@ -977,7 +977,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- __ LoadInitialArrayMap(array_function, scratch2, scratch1);
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1, false);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1076,7 +1076,8 @@ static void AllocateJSArray(MacroAssembler* masm,
Register scratch,
bool fill_with_hole,
Label* gc_required) {
- __ LoadInitialArrayMap(array_function, scratch, elements_array);
+ __ LoadInitialArrayMap(array_function, scratch,
+ elements_array, fill_with_hole);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ testq(array_size, array_size);
@@ -1303,10 +1304,10 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ jmp(call_generic_code);
__ bind(&not_double);
- // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // Transition FAST_SMI_ELEMENTS to FAST_ELEMENTS.
// rbx: JSArray
__ movq(r11, FieldOperand(rbx, HeapObject::kMapOffset));
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
r11,
kScratchRegister,
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index 2845039771..61d6c87911 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -2864,30 +2864,37 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
+ static const int kRegExpExecuteArguments = 9;
int argument_slots_on_stack =
masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
__ EnterApiExitFrame(argument_slots_on_stack);
- // Argument 8: Pass current isolate address.
+ // Argument 9: Pass current isolate address.
// __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
// Immediate(ExternalReference::isolate_address()));
__ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
__ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
kScratchRegister);
- // Argument 7: Indicate that this is a direct call from JavaScript.
+ // Argument 8: Indicate that this is a direct call from JavaScript.
__ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
Immediate(1));
- // Argument 6: Start (high end) of backtracking stack memory area.
+ // Argument 7: Start (high end) of backtracking stack memory area.
__ movq(kScratchRegister, address_of_regexp_stack_memory_address);
__ movq(r9, Operand(kScratchRegister, 0));
__ movq(kScratchRegister, address_of_regexp_stack_memory_size);
__ addq(r9, Operand(kScratchRegister, 0));
- // Argument 6 passed in r9 on Linux and on the stack on Windows.
-#ifdef _WIN64
__ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
+
+ // Argument 6: Set the number of capture registers to zero to force global
+ // regexps to behave as non-global. This does not affect non-global regexps.
+ // Argument 6 is passed in r9 on Linux and on the stack on Windows.
+#ifdef _WIN64
+ __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize),
+ Immediate(0));
+#else
+ __ Set(r9, 0);
#endif
// Argument 5: static offsets vector buffer.
@@ -2895,7 +2902,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
ExternalReference::address_of_static_offsets_vector(isolate));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
- __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
+ __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
#endif
// First four arguments are passed in registers on both Linux and Windows.
@@ -2960,7 +2967,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
Label exception;
- __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmpl(rax, Immediate(1));
+ // We expect exactly one result since we force the called regexp to behave
+ // as non-global.
__ j(equal, &success, Label::kNear);
__ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
__ j(equal, &exception);
@@ -3628,8 +3637,9 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
void CallFunctionStub::Generate(MacroAssembler* masm) {
- // rdi : the function to call
// rbx : cache cell for call target
+ // rdi : the function to call
+ Isolate* isolate = masm->isolate();
Label slow, non_function;
// The receiver might implicitly be the global object. This is
@@ -3644,9 +3654,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ j(not_equal, &call, Label::kNear);
// Patch the receiver on the stack with the global receiver object.
- __ movq(rbx, GlobalObjectOperand());
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
- __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rbx);
+ __ movq(rcx, GlobalObjectOperand());
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rcx);
__ bind(&call);
}
@@ -3656,6 +3666,10 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
__ j(not_equal, &slow);
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
// Fast-case: Just invoke the function.
ParameterCount actual(argc_);
@@ -3678,6 +3692,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Slow-case: Non-function called.
__ bind(&slow);
+ if (RecordCallTarget()) {
+ // If there is a call target cache, mark it megamorphic in the
+ // non-function case. MegamorphicSentinel is an immortal immovable
+ // object (undefined) so no write barrier is needed.
+ __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ TypeFeedbackCells::MegamorphicSentinel(isolate));
+ }
// Check for function proxy.
__ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
__ j(not_equal, &non_function);
@@ -5112,56 +5133,24 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// rax: string
// rbx: instance type
// Calculate length of sub string using the smi values.
- Label result_longer_than_two;
__ movq(rcx, Operand(rsp, kToOffset));
__ movq(rdx, Operand(rsp, kFromOffset));
__ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
__ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
- __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
+ __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset));
Label not_original_string;
- __ j(not_equal, &not_original_string, Label::kNear);
+ // Shorter than original string's length: an actual substring.
+ __ j(below, &not_original_string, Label::kNear);
+ // Longer than original string's length or negative: unsafe arguments.
+ __ j(above, &runtime);
+ // Return original string.
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
__ bind(&not_original_string);
- // Special handling of sub-strings of length 1 and 2. One character strings
- // are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache.
__ SmiToInteger32(rcx, rcx);
- __ cmpl(rcx, Immediate(2));
- __ j(greater, &result_longer_than_two);
- __ j(less, &runtime);
-
- // Sub string of length 2 requested.
- // rax: string
- // rbx: instance type
- // rcx: sub string length (value is 2)
- // rdx: from index (smi)
- __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime);
-
- // Get the two characters forming the sub string.
- __ SmiToInteger32(rdx, rdx); // From index is no longer smi.
- __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
- __ movzxbq(rdi,
- FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
-
- // Try to lookup two character string in symbol table.
- Label make_two_character_string;
- StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string);
- __ IncrementCounter(counters->sub_string_native(), 1);
- __ ret(3 * kPointerSize);
-
- __ bind(&make_two_character_string);
- // Set up registers for allocating the two character string.
- __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
- __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
- __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx);
- __ IncrementCounter(counters->sub_string_native(), 1);
- __ ret(3 * kPointerSize);
- __ bind(&result_longer_than_two);
// rax: string
// rbx: instance type
// rcx: sub string length
@@ -6013,12 +6002,12 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
{ REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToObject
- // and ElementsTransitionGenerator::GenerateSmiOnlyToObject
+ // ElementsTransitionGenerator::GenerateMapChangeElementTransition
+ // and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
{ REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
- // ElementsTransitionGenerator::GenerateSmiOnlyToDouble
+ // ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateDoubleToObject
@@ -6292,9 +6281,9 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
__ CheckFastElements(rdi, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
__ JumpIfSmi(rax, &smi_element);
- __ CheckFastSmiOnlyElements(rdi, &fast_elements);
+ __ CheckFastSmiElements(rdi, &fast_elements);
// Store into the array literal requires a elements transition. Call into
// the runtime.
@@ -6312,7 +6301,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// place.
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
__ bind(&fast_elements);
__ SmiToInteger32(kScratchRegister, rcx);
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
@@ -6326,8 +6315,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
OMIT_SMI_CHECK);
__ ret(0);
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
+ // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
+ // FAST_*_ELEMENTS, and value is Smi.
__ bind(&smi_element);
__ SmiToInteger32(kScratchRegister, rcx);
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc
index a8d39b25f6..2924810c1e 100644
--- a/deps/v8/src/x64/codegen-x64.cc
+++ b/deps/v8/src/x64/codegen-x64.cc
@@ -220,7 +220,7 @@ ModuloFunction CreateModuloFunction() {
#define __ ACCESS_MASM(masm)
-void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
+void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : value
@@ -241,7 +241,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToObject(
}
-void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
+void ElementsTransitionGenerator::GenerateSmiToDouble(
MacroAssembler* masm, Label* fail) {
// ----------- S t a t e -------------
// -- rax : value
diff --git a/deps/v8/src/x64/debug-x64.cc b/deps/v8/src/x64/debug-x64.cc
index eec83d9d1e..1b29e58d59 100644
--- a/deps/v8/src/x64/debug-x64.cc
+++ b/deps/v8/src/x64/debug-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -91,6 +91,8 @@ void BreakLocationIterator::ClearDebugBreakAtSlot() {
rinfo()->PatchCode(original_rinfo()->pc(), Assembler::kDebugBreakSlotLength);
}
+const bool Debug::FramePaddingLayout::kIsSupported = true;
+
#define __ ACCESS_MASM(masm)
@@ -103,6 +105,12 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Load padding words on stack.
+ for (int i = 0; i < Debug::FramePaddingLayout::kInitialSize; i++) {
+ __ Push(Smi::FromInt(Debug::FramePaddingLayout::kPaddingValue));
+ }
+ __ Push(Smi::FromInt(Debug::FramePaddingLayout::kInitialSize));
+
// Store the registers containing live values on the expression stack to
// make sure that these are correctly updated during GC. Non object values
// are stored as as two smis causing it to be untouched by GC.
@@ -157,6 +165,11 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
}
}
+ // Read current padding counter and skip corresponding number of words.
+ __ pop(kScratchRegister);
+ __ SmiToInteger32(kScratchRegister, kScratchRegister);
+ __ lea(rsp, Operand(rsp, kScratchRegister, times_pointer_size, 0));
+
// Get rid of the internal frame.
}
diff --git a/deps/v8/src/x64/deoptimizer-x64.cc b/deps/v8/src/x64/deoptimizer-x64.cc
index f55ebfc3e8..f3046b9ce3 100644
--- a/deps/v8/src/x64/deoptimizer-x64.cc
+++ b/deps/v8/src/x64/deoptimizer-x64.cc
@@ -111,13 +111,21 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
}
+static const byte kJnsInstruction = 0x79;
+static const byte kJnsOffset = 0x1f;
+static const byte kJaeInstruction = 0x73;
+static const byte kJaeOffset = 0x07;
+static const byte kCallInstruction = 0xe8;
+static const byte kNopByteOne = 0x66;
+static const byte kNopByteTwo = 0x90;
+
void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
Address pc_after,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(check_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(check_code->entry(),
+ Assembler::target_address_at(call_target_address));
// The stack check code matches the pattern:
//
// cmp rsp, <limit>
@@ -135,11 +143,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// test rax, <loop nesting depth>
// ok:
//
- ASSERT(*(call_target_address - 3) == 0x73 && // jae
- *(call_target_address - 2) == 0x07 && // offset
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x66; // 2 byte nop part 1
- *(call_target_address - 2) = 0x90; // 2 byte nop part 2
+ if (FLAG_count_based_interrupts) {
+ ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
+ } else {
+ ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
+ }
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ *(call_target_address - 3) = kNopByteOne;
+ *(call_target_address - 2) = kNopByteTwo;
Assembler::set_target_address_at(call_target_address,
replacement_code->entry());
@@ -157,11 +170,16 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
Assembler::target_address_at(call_target_address));
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
- ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1
- *(call_target_address - 2) == 0x90 && // 2 byte nop part 2
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x73; // jae
- *(call_target_address - 2) = 0x07; // offset
+ ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
+ ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ if (FLAG_count_based_interrupts) {
+ *(call_target_address - 3) = kJnsInstruction;
+ *(call_target_address - 2) = kJnsOffset;
+ } else {
+ *(call_target_address - 3) = kJaeInstruction;
+ *(call_target_address - 2) = kJaeOffset;
+ }
Assembler::set_target_address_at(call_target_address,
check_code->entry());
diff --git a/deps/v8/src/x64/disasm-x64.cc b/deps/v8/src/x64/disasm-x64.cc
index adeda0bb08..0738153588 100644
--- a/deps/v8/src/x64/disasm-x64.cc
+++ b/deps/v8/src/x64/disasm-x64.cc
@@ -315,7 +315,8 @@ class DisassemblerX64 {
rex_(0),
operand_size_(0),
group_1_prefix_(0),
- byte_size_operand_(false) {
+ byte_size_operand_(false),
+ instruction_table_(instruction_table.Pointer()) {
tmp_buffer_[0] = '\0';
}
@@ -344,6 +345,7 @@ class DisassemblerX64 {
byte group_1_prefix_; // 0xF2, 0xF3, or (if no group 1 prefix is present) 0.
// Byte size operand override.
bool byte_size_operand_;
+ const InstructionTable* const instruction_table_;
void setRex(byte rex) {
ASSERT_EQ(0x40, rex & 0xF0);
@@ -1340,7 +1342,7 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
data++;
}
- const InstructionDesc& idesc = instruction_table.Get().Get(current);
+ const InstructionDesc& idesc = instruction_table_->Get(current);
byte_size_operand_ = idesc.byte_size_operation;
switch (idesc.type) {
case ZERO_OPERANDS_INSTR:
@@ -1682,7 +1684,7 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
default:
UNREACHABLE();
}
- AppendToBuffer("test%c rax,0x%"V8_PTR_PREFIX"x",
+ AppendToBuffer("test%c rax,0x%" V8_PTR_PREFIX "x",
operand_size_code(),
value);
break;
diff --git a/deps/v8/src/x64/full-codegen-x64.cc b/deps/v8/src/x64/full-codegen-x64.cc
index d0c4f4dd89..a3e42eb505 100644
--- a/deps/v8/src/x64/full-codegen-x64.cc
+++ b/deps/v8/src/x64/full-codegen-x64.cc
@@ -34,6 +34,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -100,11 +101,6 @@ class JumpPatchSite BASE_EMBEDDED {
};
-int FullCodeGenerator::self_optimization_header_size() {
- return 20;
-}
-
-
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
@@ -122,32 +118,11 @@ void FullCodeGenerator::Generate() {
CompilationInfo* info = info_;
handler_table_ =
isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
- // We can optionally optimize based on counters rather than statistical
- // sampling.
- if (info->ShouldSelfOptimize()) {
- if (FLAG_trace_opt_verbose) {
- PrintF("[adding self-optimization header to %s]\n",
- *info->function()->debug_name()->ToCString());
- }
- has_self_optimization_header_ = true;
- MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
- Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
- JSGlobalPropertyCell* cell;
- if (maybe_cell->To(&cell)) {
- __ movq(rax, Handle<JSGlobalPropertyCell>(cell),
- RelocInfo::EMBEDDED_OBJECT);
- __ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
- Smi::FromInt(-1));
- Handle<Code> compile_stub(
- isolate()->builtins()->builtin(Builtins::kLazyRecompile));
- __ j(zero, compile_stub, RelocInfo::CODE_TARGET);
- ASSERT(masm_->pc_offset() == self_optimization_header_size());
- }
- }
-
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
@@ -282,11 +257,11 @@ void FullCodeGenerator::Generate() {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- VariableProxy* proxy = scope()->function();
- ASSERT(proxy->var()->mode() == CONST ||
- proxy->var()->mode() == CONST_HARMONY);
- ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL);
+ VariableDeclaration* function = scope()->function();
+ ASSERT(function->proxy()->var()->mode() == CONST ||
+ function->proxy()->var()->mode() == CONST_HARMONY);
+ ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
+ VisitVariableDeclaration(function);
}
VisitDeclarations(scope()->declarations());
}
@@ -322,14 +297,60 @@ void FullCodeGenerator::ClearAccumulator() {
}
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
+ __ SmiAddConstant(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(-delta));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing; if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
+ __ movq(kScratchRegister,
+ reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
+ RelocInfo::NONE);
+ __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ kScratchRegister);
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 162;
+
+
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
Label ok;
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
- __ j(above_equal, &ok, Label::kNear);
- StackCheckStub stub;
- __ CallStub(&stub);
+
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ j(positive, &ok, Label::kNear);
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok, Label::kNear);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
+
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
@@ -342,6 +363,10 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
ASSERT(loop_depth() > 0);
__ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
@@ -361,6 +386,31 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(rax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance = kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ j(positive, &ok, Label::kNear);
+ __ push(rax);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(rax);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
@@ -609,7 +659,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* fall_through) {
ToBooleanStub stub(result_register());
__ push(result_register());
- __ CallStub(&stub);
+ __ CallStub(&stub, condition->test_id());
__ testq(result_register(), result_register());
// The stub returns nonzero for true.
Split(not_zero, if_true, if_false, fall_through);
@@ -703,61 +753,52 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
}
-void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
- VariableMode mode,
- FunctionLiteral* function) {
+void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
+ // The variable in the declaration always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
+ __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
+ __ Check(not_equal, "Declaration in with context.");
+ __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
+ __ Check(not_equal, "Declaration in catch context.");
+ }
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(
+ VariableDeclaration* declaration) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
+ VariableProxy* proxy = declaration->proxy();
+ VariableMode mode = declaration->mode();
Variable* variable = proxy->var();
- bool binding_needs_init = (function == NULL) &&
- (mode == CONST || mode == CONST_HARMONY || mode == LET);
+ bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++global_count_;
+ globals_->Add(variable->name(), zone());
+ globals_->Add(variable->binding_needs_init()
+ ? isolate()->factory()->the_hole_value()
+ : isolate()->factory()->undefined_value(),
+ zone());
break;
case Variable::PARAMETER:
case Variable::LOCAL:
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ movq(StackOperand(variable), result_register());
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(StackOperand(variable), kScratchRegister);
}
break;
case Variable::CONTEXT:
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
- __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
- __ Check(not_equal, "Declaration in with context.");
- __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
- __ Check(not_equal, "Declaration in catch context.");
- }
- if (function != NULL) {
- Comment cmnt(masm_, "[ Declaration");
- VisitForAccumulatorValue(function);
- __ movq(ContextOperand(rsi, variable->index()), result_register());
- int offset = Context::SlotOffset(variable->index());
- // We know that we have written a function, which is not a smi.
- __ RecordWriteContextSlot(rsi,
- offset,
- result_register(),
- rcx,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
- } else if (binding_needs_init) {
- Comment cmnt(masm_, "[ Declaration");
+ if (hole_init) {
+ Comment cmnt(masm_, "[ VariableDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
__ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
// No write barrier since the hole value is in old space.
@@ -766,14 +807,12 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
break;
case Variable::LOOKUP: {
- Comment cmnt(masm_, "[ Declaration");
+ Comment cmnt(masm_, "[ VariableDeclaration");
__ push(rsi);
__ Push(variable->name());
// Declaration nodes are always introduced in one of four modes.
- ASSERT(mode == VAR ||
- mode == CONST ||
- mode == CONST_HARMONY ||
- mode == LET);
+ ASSERT(mode == VAR || mode == LET ||
+ mode == CONST || mode == CONST_HARMONY);
PropertyAttributes attr =
(mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
__ Push(Smi::FromInt(attr));
@@ -781,9 +820,7 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (function != NULL) {
- VisitForStackValue(function);
- } else if (binding_needs_init) {
+ if (hole_init) {
__ PushRoot(Heap::kTheHoleValueRootIndex);
} else {
__ Push(Smi::FromInt(0)); // Indicates no initial value.
@@ -795,6 +832,119 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
+void FullCodeGenerator::VisitFunctionDeclaration(
+ FunctionDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ globals_->Add(variable->name(), zone());
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(declaration->fun(), script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) return SetStackOverflow();
+ globals_->Add(function, zone());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ VisitForAccumulatorValue(declaration->fun());
+ __ movq(StackOperand(variable), result_register());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ VisitForAccumulatorValue(declaration->fun());
+ __ movq(ContextOperand(rsi, variable->index()), result_register());
+ int offset = Context::SlotOffset(variable->index());
+ // We know that we have written a function, which is not a smi.
+ __ RecordWriteContextSlot(rsi,
+ offset,
+ result_register(),
+ rcx,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+ break;
+ }
+
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ FunctionDeclaration");
+ __ push(rsi);
+ __ Push(variable->name());
+ __ Push(Smi::FromInt(NONE));
+ VisitForStackValue(declaration->fun());
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
+ }
+ }
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ Handle<JSModule> instance = declaration->module()->interface()->Instance();
+ ASSERT(!instance.is_null());
+
+ switch (variable->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ globals_->Add(variable->name(), zone());
+ globals_->Add(instance, zone());
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ModuleDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ __ Move(ContextOperand(rsi, variable->index()), instance);
+ Visit(declaration->module());
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
+ VariableProxy* proxy = declaration->proxy();
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ // TODO(rossberg)
+ break;
+
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, "[ ImportDeclaration");
+ EmitDebugCheckDeclarationContext(variable);
+ // TODO(rossberg)
+ break;
+ }
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::LOOKUP:
+ UNREACHABLE();
+ }
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
+ // TODO(rossberg)
+}
+
+
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.
@@ -856,7 +1006,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ testq(rax, rax);
@@ -1155,7 +1305,7 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
- __ call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1236,7 +1386,7 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
__ Move(rcx, var->name());
__ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(rax);
break;
}
@@ -1418,7 +1568,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Mark all computed expressions that are bound to a key that
// is shadowed by a later occurrence of the same key. For the
// marked expressions, no store code is emitted.
- expr->CalculateEmitStore();
+ expr->CalculateEmitStore(zone());
AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
@@ -1446,7 +1596,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET, key->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1510,7 +1660,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
- bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
+ bool has_constant_fast_elements =
+ IsFastObjectElementsKind(constant_elements_kind);
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1521,7 +1672,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
@@ -1533,10 +1684,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
- ASSERT(constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
+ ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
- // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
? FastCloneShallowArrayStub::CLONE_ELEMENTS
@@ -1564,9 +1714,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- if (constant_elements_kind == FAST_ELEMENTS) {
- // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
- // transition and don't need to call the runtime stub.
+ if (IsFastObjectElementsKind(constant_elements_kind)) {
+ // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
+ // cannot transition and don't need to call the runtime stub.
int offset = FixedArray::kHeaderSize + (i * kPointerSize);
__ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
__ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
@@ -1716,14 +1866,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1745,7 +1895,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ movq(rax, rcx);
BinaryOpStub stub(op, mode);
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
@@ -1794,7 +1944,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(rdx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(rax);
}
@@ -1835,7 +1985,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ call(ic);
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1848,7 +1998,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ call(ic);
+ CallIC(ic);
break;
}
}
@@ -1865,7 +2015,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
ASSERT(!var->IsParameter()); // No const parameters.
@@ -1973,7 +2123,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2013,7 +2163,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2047,6 +2197,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ call(code, rmode, ast_id);
+}
+
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2064,7 +2222,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2097,7 +2255,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2116,9 +2274,21 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
+
+ // Record call targets in unoptimized code, but not in the snapshot.
+ if (!Serializer::enabled()) {
+ flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ Move(rbx, cell);
+ }
+
CallFunctionStub stub(arg_count, flags);
__ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
- __ CallStub(&stub);
+ __ CallStub(&stub, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -3641,7 +3811,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
@@ -3799,7 +3969,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register rax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(rax);
}
@@ -3920,7 +4090,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -3954,7 +4124,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->StoreIC_Initialize()
: isolate()->builtins()->StoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3971,7 +4141,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3998,7 +4168,7 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4178,7 +4348,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
@@ -4258,7 +4428,8 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
Scope* declaration_scope = scope()->DeclarationScope();
- if (declaration_scope->is_global_scope()) {
+ if (declaration_scope->is_global_scope() ||
+ declaration_scope->is_module_scope()) {
// Contexts nested in the global context have a canonical empty function
// as their closure, not the anonymous closure containing the global
// code. Pass a smi sentinel and let the runtime look up the empty
@@ -4289,15 +4460,50 @@ void FullCodeGenerator::EnterFinallyBlock() {
__ subq(rdx, rcx);
__ Integer32ToSmi(rdx, rdx);
__ push(rdx);
+
// Store result register while executing finally block.
__ push(result_register());
+
+ // Store pending message while executing finally block.
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ Load(rdx, pending_message_obj);
+ __ push(rdx);
+
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ Load(rdx, has_pending_message);
+ __ push(rdx);
+
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ Load(rdx, pending_message_script);
+ __ push(rdx);
}
void FullCodeGenerator::ExitFinallyBlock() {
ASSERT(!result_register().is(rdx));
ASSERT(!result_register().is(rcx));
+ // Restore pending message from stack.
+ __ pop(rdx);
+ ExternalReference pending_message_script =
+ ExternalReference::address_of_pending_message_script(isolate());
+ __ Store(pending_message_script, rdx);
+
+ __ pop(rdx);
+ ExternalReference has_pending_message =
+ ExternalReference::address_of_has_pending_message(isolate());
+ __ Store(has_pending_message, rdx);
+
+ __ pop(rdx);
+ ExternalReference pending_message_obj =
+ ExternalReference::address_of_pending_message_obj(isolate());
+ __ Store(pending_message_obj, rdx);
+
+ // Restore result register from stack.
__ pop(result_register());
+
// Uncook return address.
__ pop(rdx);
__ SmiToInteger32(rdx, rdx);
diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc
index 0632ce439f..82fdb3cece 100644
--- a/deps/v8/src/x64/ic-x64.cc
+++ b/deps/v8/src/x64/ic-x64.cc
@@ -769,25 +769,25 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &non_double_value);
- // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // Value is a double. Transition FAST_SMI_ELEMENTS ->
// FAST_DOUBLE_ELEMENTS and complete the store.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_DOUBLE_ELEMENTS,
rbx,
rdi,
&slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &slow);
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ jmp(&fast_double_without_map_check);
__ bind(&non_double_value);
- // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
rbx,
rdi,
&slow);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm);
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ jmp(&finish_object_store);
@@ -1642,7 +1642,7 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
// Must return the modified receiver in eax.
if (!FLAG_trace_elements_transitions) {
Label fail;
- ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
+ ElementsTransitionGenerator::GenerateSmiToDouble(masm, &fail);
__ movq(rax, rdx);
__ Ret();
__ bind(&fail);
@@ -1741,11 +1741,11 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
// Activate inlined smi code.
if (previous_state == UNINITIALIZED) {
- PatchInlinedSmiCode(address());
+ PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
}
}
-void PatchInlinedSmiCode(Address address) {
+void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
// The address of the instruction following the call.
Address test_instruction_address =
address + Assembler::kCallTargetAddressOffset;
@@ -1766,14 +1766,18 @@ void PatchInlinedSmiCode(Address address) {
address, test_instruction_address, delta);
}
- // Patch with a short conditional jump. There must be a
- // short jump-if-carry/not-carry at this position.
+ // Patch with a short conditional jump. Enabling means switching from a short
+ // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
+ // reverse operation of that.
Address jmp_address = test_instruction_address - delta;
- ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
- *jmp_address == Assembler::kJcShortOpcode);
- Condition cc = *jmp_address == Assembler::kJncShortOpcode
- ? not_zero
- : zero;
+ ASSERT((check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ||
+ *jmp_address == Assembler::kJcShortOpcode)
+ : (*jmp_address == Assembler::kJnzShortOpcode ||
+ *jmp_address == Assembler::kJzShortOpcode));
+ Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
+ ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
+ : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
*jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
}
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc
index 2ba2c57f40..bc8f84864e 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.cc
+++ b/deps/v8/src/x64/lithium-codegen-x64.cc
@@ -523,14 +523,15 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
++jsframe_count;
}
}
- Translation translation(&translations_, frame_count, jsframe_count);
+ Translation translation(&translations_, frame_count, jsframe_count,
+ environment->zone());
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
int pc_offset = masm()->pc_offset();
environment->Register(deoptimization_index,
translation.index(),
(mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
- deoptimizations_.Add(environment);
+ deoptimizations_.Add(environment, environment->zone());
}
}
@@ -552,7 +553,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
// jump entry if this is the case.
if (jump_table_.is_empty() ||
jump_table_.last().address != entry) {
- jump_table_.Add(JumpTableEntry(entry));
+ jump_table_.Add(JumpTableEntry(entry), zone());
}
__ j(cc, &jump_table_.last().label);
}
@@ -597,7 +598,7 @@ int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
for (int i = 0; i < deoptimization_literals_.length(); ++i) {
if (deoptimization_literals_[i].is_identical_to(literal)) return i;
}
- deoptimization_literals_.Add(literal);
+ deoptimization_literals_.Add(literal, zone());
return result;
}
@@ -644,14 +645,14 @@ void LCodeGen::RecordSafepoint(
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
- safepoint.DefinePointerSlot(pointer->index());
+ safepoint.DefinePointerSlot(pointer->index(), zone());
} else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
- safepoint.DefinePointerRegister(ToRegister(pointer));
+ safepoint.DefinePointerRegister(ToRegister(pointer), zone());
}
}
if (kind & Safepoint::kWithRegisters) {
// Register rsi always contains a pointer to the context.
- safepoint.DefinePointerRegister(rsi);
+ safepoint.DefinePointerRegister(rsi, zone());
}
}
@@ -663,7 +664,7 @@ void LCodeGen::RecordSafepoint(LPointerMap* pointers,
void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
+ LPointerMap empty_pointers(RelocInfo::kNoPosition, zone());
RecordSafepoint(&empty_pointers, deopt_mode);
}
@@ -1941,7 +1942,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
DeferredInstanceOfKnownGlobal* deferred;
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
+ deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->InputAt(0));
@@ -2016,8 +2017,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RECORD_SAFEPOINT_WITH_REGISTERS,
2);
ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
- ASSERT(instr->HasDeoptimizationEnvironment());
- LEnvironment* env = instr->deoptimization_environment();
+ LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Move result to a register that survives the end of the
// PushSafepointRegisterScope.
@@ -2195,12 +2195,12 @@ void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name) {
+ Handle<String> name,
+ LEnvironment* env) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsFound() &&
- (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
- if (lookup.type() == FIELD) {
+ ASSERT(lookup.IsFound() || lookup.IsCacheable());
+ if (lookup.IsFound() && lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
int offset = index * kPointerSize;
if (index < 0) {
@@ -2212,53 +2212,87 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
__ movq(result, FieldOperand(object, JSObject::kPropertiesOffset));
__ movq(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
}
- } else {
+ } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
__ LoadHeapObject(result, function);
+ } else {
+ // Negative lookup.
+ // Check prototypes.
+ HeapObject* current = HeapObject::cast((*type)->prototype());
+ Heap* heap = type->GetHeap();
+ while (current != heap->null_value()) {
+ Handle<HeapObject> link(current);
+ __ LoadHeapObject(result, link);
+ __ Cmp(FieldOperand(result, HeapObject::kMapOffset),
+ Handle<Map>(JSObject::cast(current)->map()));
+ DeoptimizeIf(not_equal, env);
+ current = HeapObject::cast(current->map()->prototype());
+ }
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
}
}
+// Check for cases where EmitLoadFieldOrConstantFunction needs to walk the
+// prototype chain, which causes unbounded code generation.
+static bool CompactEmit(
+ SmallMapList* list, Handle<String> name, int i, Isolate* isolate) {
+ LookupResult lookup(isolate);
+ Handle<Map> map = list->at(i);
+ map->LookupInDescriptors(NULL, *name, &lookup);
+ return lookup.IsFound() &&
+ (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION);
+}
+
+
void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
Register object = ToRegister(instr->object());
Register result = ToRegister(instr->result());
int map_count = instr->hydrogen()->types()->length();
- Handle<String> name = instr->hydrogen()->name();
+ bool need_generic = instr->hydrogen()->need_generic();
- if (map_count == 0) {
- ASSERT(instr->hydrogen()->need_generic());
- __ Move(rcx, instr->hydrogen()->name());
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
- Label done;
- for (int i = 0; i < map_count - 1; ++i) {
- Handle<Map> map = instr->hydrogen()->types()->at(i);
- Label next;
- __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
- __ j(not_equal, &next, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
- __ bind(&next);
+ if (map_count == 0 && !need_generic) {
+ DeoptimizeIf(no_condition, instr->environment());
+ return;
+ }
+ Handle<String> name = instr->hydrogen()->name();
+ Label done;
+ bool all_are_compact = true;
+ for (int i = 0; i < map_count; ++i) {
+ if (!CompactEmit(instr->hydrogen()->types(), name, i, isolate())) {
+ all_are_compact = false;
+ break;
}
- Handle<Map> map = instr->hydrogen()->types()->last();
- __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
- if (instr->hydrogen()->need_generic()) {
- Label generic;
- __ j(not_equal, &generic, Label::kNear);
- EmitLoadFieldOrConstantFunction(result, object, map, name);
- __ jmp(&done, Label::kNear);
- __ bind(&generic);
- __ Move(rcx, instr->hydrogen()->name());
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
- } else {
+ }
+ for (int i = 0; i < map_count; ++i) {
+ bool last = (i == map_count - 1);
+ Handle<Map> map = instr->hydrogen()->types()->at(i);
+ Label check_passed;
+ __ CompareMap(object, map, &check_passed, ALLOW_ELEMENT_TRANSITION_MAPS);
+ if (last && !need_generic) {
DeoptimizeIf(not_equal, instr->environment());
- EmitLoadFieldOrConstantFunction(result, object, map, name);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ } else {
+ Label next;
+ bool compact = all_are_compact ? true :
+ CompactEmit(instr->hydrogen()->types(), name, i, isolate());
+ __ j(not_equal, &next, compact ? Label::kNear : Label::kFar);
+ __ bind(&check_passed);
+ EmitLoadFieldOrConstantFunction(
+ result, object, map, name, instr->environment());
+ __ jmp(&done, all_are_compact ? Label::kNear : Label::kFar);
+ __ bind(&next);
}
- __ bind(&done);
}
+ if (need_generic) {
+ __ Move(rcx, name);
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ }
+ __ bind(&done);
}
@@ -2331,8 +2365,10 @@ void LCodeGen::DoLoadElements(LLoadElements* instr) {
__ movzxbq(temp, FieldOperand(temp, Map::kBitField2Offset));
__ and_(temp, Immediate(Map::kElementsKindMask));
__ shr(temp, Immediate(Map::kElementsKindShift));
- __ cmpl(temp, Immediate(FAST_ELEMENTS));
- __ j(equal, &ok, Label::kNear);
+ __ cmpl(temp, Immediate(GetInitialFastElementsKind()));
+ __ j(less, &fail, Label::kNear);
+ __ cmpl(temp, Immediate(TERMINAL_FAST_ELEMENTS_KIND));
+ __ j(less_equal, &ok, Label::kNear);
__ cmpl(temp, Immediate(FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND));
__ j(less, &fail, Label::kNear);
__ cmpl(temp, Immediate(LAST_EXTERNAL_ARRAY_ELEMENTS_KIND));
@@ -2376,16 +2412,30 @@ void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
Register result = ToRegister(instr->result());
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits.
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
+ }
+
// Load the result.
__ movq(result,
- BuildFastArrayOperand(instr->elements(), instr->key(),
+ BuildFastArrayOperand(instr->elements(),
+ instr->key(),
FAST_ELEMENTS,
- FixedArray::kHeaderSize - kHeapObjectTag));
+ FixedArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index()));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(equal, instr->environment());
+ if (IsFastSmiElementsKind(instr->hydrogen()->elements_kind())) {
+ Condition smi = __ CheckSmi(result);
+ DeoptimizeIf(NegateCondition(smi), instr->environment());
+ } else {
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
}
}
@@ -2394,19 +2444,32 @@ void LCodeGen::DoLoadKeyedFastDoubleElement(
LLoadKeyedFastDoubleElement* instr) {
XMMRegister result(ToDoubleRegister(instr->result()));
- int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
- sizeof(kHoleNanLower32);
- Operand hole_check_operand = BuildFastArrayOperand(
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
+ }
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag +
+ sizeof(kHoleNanLower32);
+ Operand hole_check_operand = BuildFastArrayOperand(
+ instr->elements(),
+ instr->key(),
+ FAST_DOUBLE_ELEMENTS,
+ offset,
+ instr->additional_index());
+ __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ Operand double_load_operand = BuildFastArrayOperand(
instr->elements(),
instr->key(),
FAST_DOUBLE_ELEMENTS,
- offset);
- __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32));
- DeoptimizeIf(equal, instr->environment());
-
- Operand double_load_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag);
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
__ movsd(result, double_load_operand);
}
@@ -2415,7 +2478,8 @@ Operand LCodeGen::BuildFastArrayOperand(
LOperand* elements_pointer,
LOperand* key,
ElementsKind elements_kind,
- uint32_t offset) {
+ uint32_t offset,
+ uint32_t additional_index) {
Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
@@ -2424,11 +2488,14 @@ Operand LCodeGen::BuildFastArrayOperand(
Abort("array index constant value too big");
}
return Operand(elements_pointer_reg,
- constant_value * (1 << shift_size) + offset);
+ ((constant_value + additional_index) << shift_size)
+ + offset);
} else {
ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
- return Operand(elements_pointer_reg, ToRegister(key),
- scale_factor, offset);
+ return Operand(elements_pointer_reg,
+ ToRegister(key),
+ scale_factor,
+ offset + (additional_index << shift_size));
}
}
@@ -2437,7 +2504,17 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
LLoadKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(), elements_kind, 0));
+ instr->key(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
+ }
+
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
XMMRegister result(ToDoubleRegister(instr->result()));
__ movss(result, operand);
@@ -2474,8 +2551,11 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -2497,24 +2577,28 @@ void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
Register result = ToRegister(instr->result());
- // Check for arguments adapter frame.
- Label done, adapted;
- __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
- Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
- __ j(equal, &adapted, Label::kNear);
+ if (instr->hydrogen()->from_inlined()) {
+ __ lea(result, Operand(rsp, -2 * kPointerSize));
+ } else {
+ // Check for arguments adapter frame.
+ Label done, adapted;
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
+ __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ j(equal, &adapted, Label::kNear);
+
+ // No arguments adaptor frame.
+ __ movq(result, rbp);
+ __ jmp(&done, Label::kNear);
- // No arguments adaptor frame.
- __ movq(result, rbp);
- __ jmp(&done, Label::kNear);
+ // Arguments adaptor frame present.
+ __ bind(&adapted);
+ __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
- // Arguments adaptor frame present.
- __ bind(&adapted);
- __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-
- // Result is the frame pointer for the frame if not adapted and for the real
- // frame below the adaptor frame if adapted.
- __ bind(&done);
+ // Result is the frame pointer for the frame if not adapted and for the real
+ // frame below the adaptor frame if adapted.
+ __ bind(&done);
+ }
}
@@ -2622,7 +2706,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// Invoke the function.
__ bind(&invoke);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -2640,6 +2724,11 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
}
+void LCodeGen::DoDrop(LDrop* instr) {
+ __ Drop(instr->count());
+}
+
+
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
__ LoadHeapObject(result, instr->hydrogen()->closure());
@@ -2684,7 +2773,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind) {
+ CallKind call_kind,
+ RDIState rdi_state) {
bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
function->shared()->formal_parameter_count() == arity;
@@ -2692,7 +2782,9 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
RecordPosition(pointers->position());
if (can_invoke_directly) {
- __ LoadHeapObject(rdi, function);
+ if (rdi_state == RDI_UNINITIALIZED) {
+ __ LoadHeapObject(rdi, function);
+ }
// Change context if needed.
bool change_context =
@@ -2737,7 +2829,8 @@ void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
CallKnownFunction(instr->function(),
instr->arity(),
instr,
- CALL_AS_METHOD);
+ CALL_AS_METHOD,
+ RDI_UNINITIALIZED);
}
@@ -2832,7 +2925,7 @@ void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
EmitIntegerMathAbs(instr);
} else { // Tagged case.
DeferredMathAbsTaggedHeapNumber* deferred =
- new DeferredMathAbsTaggedHeapNumber(this, instr);
+ new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr);
Register input_reg = ToRegister(instr->InputAt(0));
// Smi check.
__ JumpIfNotSmi(input_reg, deferred->entry());
@@ -3024,7 +3117,7 @@ void LCodeGen::DoRandom(LRandom* instr) {
LRandom* instr_;
};
- DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ DeferredDoRandom* deferred = new(zone()) DeferredDoRandom(this, instr);
// Having marked this instruction as a call we can use any
// registers.
@@ -3174,13 +3267,21 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(ToRegister(instr->function()).is(rdi));
ASSERT(instr->HasPointerMap());
- ASSERT(instr->HasDeoptimizationEnvironment());
- LPointerMap* pointers = instr->pointer_map();
- RecordPosition(pointers->position());
- SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+
+ if (instr->known_function().is_null()) {
+ LPointerMap* pointers = instr->pointer_map();
+ RecordPosition(pointers->position());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(instr->arity());
+ __ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ } else {
+ CallKnownFunction(instr->known_function(),
+ instr->arity(),
+ instr,
+ CALL_AS_METHOD,
+ RDI_CONTAINS_TARGET);
+ }
}
@@ -3234,7 +3335,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(rax));
- CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
+ CallKnownFunction(instr->target(),
+ instr->arity(),
+ instr,
+ CALL_AS_FUNCTION,
+ RDI_UNINITIALIZED);
}
@@ -3259,7 +3364,22 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
int offset = instr->offset();
if (!instr->transition().is_null()) {
- __ Move(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
+ if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
+ __ Move(FieldOperand(object, HeapObject::kMapOffset),
+ instr->transition());
+ } else {
+ Register temp = ToRegister(instr->TempAt(0));
+ __ Move(kScratchRegister, instr->transition());
+ __ movq(FieldOperand(object, HeapObject::kMapOffset), kScratchRegister);
+ // Update the write barrier for the map field.
+ __ RecordWriteField(object,
+ HeapObject::kMapOffset,
+ kScratchRegister,
+ temp,
+ kSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ }
}
// Do the store.
@@ -3314,7 +3434,18 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
LStoreKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
Operand operand(BuildFastArrayOperand(instr->external_pointer(),
- instr->key(), elements_kind, 0));
+ instr->key(),
+ elements_kind,
+ 0,
+ instr->additional_index()));
+
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
+ }
+
if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
XMMRegister value(ToDoubleRegister(instr->value()));
__ cvtsd2ss(value, value);
@@ -3340,8 +3471,11 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3384,30 +3518,29 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register elements = ToRegister(instr->object());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- // Do the store.
- if (instr->key()->IsConstantOperand()) {
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
- int offset =
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
- __ movq(FieldOperand(elements, offset), value);
- } else {
- __ movq(FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize),
- value);
+ Operand operand =
+ BuildFastArrayOperand(instr->object(),
+ instr->key(),
+ FAST_ELEMENTS,
+ FixedArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
}
+ __ movq(operand, value);
+
if (instr->hydrogen()->NeedsWriteBarrier()) {
+ ASSERT(!instr->key()->IsConstantOperand());
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
// Compute address of modified element and store it into key register.
- __ lea(key, FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize));
+ __ lea(key, operand);
__ RecordWrite(elements,
key,
value,
@@ -3421,19 +3554,34 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
void LCodeGen::DoStoreKeyedFastDoubleElement(
LStoreKeyedFastDoubleElement* instr) {
XMMRegister value = ToDoubleRegister(instr->value());
- Label have_value;
- __ ucomisd(value, value);
- __ j(parity_odd, &have_value); // NaN.
+ if (instr->NeedsCanonicalization()) {
+ Label have_value;
+
+ __ ucomisd(value, value);
+ __ j(parity_odd, &have_value); // NaN.
- __ Set(kScratchRegister, BitCast<uint64_t>(
- FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
- __ movq(value, kScratchRegister);
+ __ Set(kScratchRegister, BitCast<uint64_t>(
+ FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
+ __ movq(value, kScratchRegister);
+
+ __ bind(&have_value);
+ }
- __ bind(&have_value);
Operand double_store_operand = BuildFastArrayOperand(
- instr->elements(), instr->key(), FAST_DOUBLE_ELEMENTS,
- FixedDoubleArray::kHeaderSize - kHeapObjectTag);
+ instr->elements(),
+ instr->key(),
+ FAST_DOUBLE_ELEMENTS,
+ FixedDoubleArray::kHeaderSize - kHeapObjectTag,
+ instr->additional_index());
+
+ if (instr->hydrogen()->IsDehoisted() && !instr->key()->IsConstantOperand()) {
+ // Sign extend key because it could be a 32 bit negative value
+ // and the dehoisted address computation happens in 64 bits
+ Register key_reg = ToRegister(instr->key());
+ __ movsxlq(key_reg, key_reg);
+ }
+
__ movsd(double_store_operand, value);
}
@@ -3462,21 +3610,22 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ Cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
__ j(not_equal, &not_applicable);
__ movq(new_map_reg, to_map, RelocInfo::EMBEDDED_OBJECT);
- if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
__ movq(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg);
// Write barrier.
ASSERT_NE(instr->temp_reg(), NULL);
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg,
ToRegister(instr->temp_reg()), kDontSaveFPRegs);
- } else if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
- to_kind == FAST_DOUBLE_ELEMENTS) {
+ } else if (IsFastSmiElementsKind(from_kind) &&
+ IsFastDoubleElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(rdx));
ASSERT(new_map_reg.is(rbx));
__ movq(fixed_object_reg, object_reg);
CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(),
RelocInfo::CODE_TARGET, instr);
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ } else if (IsFastDoubleElementsKind(from_kind) &&
+ IsFastObjectElementsKind(to_kind)) {
Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(rdx));
ASSERT(new_map_reg.is(rbx));
@@ -3510,7 +3659,7 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
};
DeferredStringCharCodeAt* deferred =
- new DeferredStringCharCodeAt(this, instr);
+ new(zone()) DeferredStringCharCodeAt(this, instr);
StringCharLoadGenerator::Generate(masm(),
ToRegister(instr->string()),
@@ -3564,7 +3713,7 @@ void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
};
DeferredStringCharFromCode* deferred =
- new DeferredStringCharFromCode(this, instr);
+ new(zone()) DeferredStringCharFromCode(this, instr);
ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
Register char_code = ToRegister(instr->char_code());
@@ -3644,7 +3793,7 @@ void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
Register reg = ToRegister(instr->result());
Register tmp = ToRegister(instr->TempAt(0));
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
+ DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, deferred->entry());
} else {
@@ -3686,6 +3835,10 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
if (instr->needs_check()) {
Condition is_smi = __ CheckSmi(input);
DeoptimizeIf(NegateCondition(is_smi), instr->environment());
+ } else {
+ if (FLAG_debug_code) {
+ __ AbortIfNotSmi(input);
+ }
}
__ SmiToInteger32(input, input);
}
@@ -3804,7 +3957,7 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
ASSERT(input->Equals(instr->result()));
Register input_reg = ToRegister(input);
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
+ DeferredTaggedToI* deferred = new(zone()) DeferredTaggedToI(this, instr);
__ JumpIfNotSmi(input_reg, deferred->entry());
__ SmiToInteger32(input_reg, input_reg);
__ bind(deferred->exit());
@@ -3953,12 +4106,21 @@ void LCodeGen::DoCheckMapCommon(Register reg,
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Handle<Map> map = instr->hydrogen()->map();
- DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
+ __ j(equal, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4049,7 +4211,8 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
LAllocateObject* instr_;
};
- DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ DeferredAllocateObject* deferred =
+ new(zone()) DeferredAllocateObject(this, instr);
Register result = ToRegister(instr->result());
Register scratch = ToRegister(instr->TempAt(0));
@@ -4071,6 +4234,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
deferred->entry(),
TAG_OBJECT);
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
// Load the initial map.
Register map = scratch;
__ LoadHeapObject(scratch, constructor);
@@ -4105,14 +4276,14 @@ void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
__ movq(FieldOperand(result, property_offset), scratch);
}
}
-
- __ bind(deferred->exit());
}
void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
Register result = ToRegister(instr->result());
Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
@@ -4120,8 +4291,8 @@ void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
__ Set(result, 0);
PushSafepointRegistersScope scope(this);
- __ PushHeapObject(constructor);
- CallRuntimeFromDeferred(Runtime::kNewObject, 1, instr);
+ __ Push(Smi::FromInt(instance_size));
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
__ StoreToSafepointRegisterSlot(result, rax);
}
@@ -4133,8 +4304,9 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
// Deopt if the array literal boilerplate ElementsKind is of a type different
// than the expected one. The check isn't necessary if the boilerplate has
- // already been converted to FAST_ELEMENTS.
- if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
__ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object());
__ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
// Load the map's "bit field 2".
@@ -4250,9 +4422,10 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
__ movq(FieldOperand(result, total_offset), rcx);
}
} else if (elements->IsFixedArray()) {
+ Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
for (int i = 0; i < elements_length; i++) {
int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
- Handle<Object> value = JSObject::GetElement(object, i);
+ Handle<Object> value(fast_elements->get(i));
if (value->IsJSObject()) {
Handle<JSObject> value_object = Handle<JSObject>::cast(value);
__ lea(rcx, Operand(result, *offset));
@@ -4276,6 +4449,24 @@ void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
int size = instr->hydrogen()->total_size();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate()->GetElementsKind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to TERMINAL_FAST_ELEMENTS_KIND.
+ if (CanTransitionToMoreGeneralFastElementsKind(
+ boilerplate_elements_kind, true)) {
+ __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
+ __ movq(rcx, FieldOperand(rbx, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ movb(rcx, FieldOperand(rcx, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ and_(rcx, Immediate(Map::kElementsKindMask));
+ __ cmpb(rcx, Immediate(boilerplate_elements_kind <<
+ Map::kElementsKindShift));
+ DeoptimizeIf(not_equal, instr->environment());
+ }
// Allocate all objects that are part of the literal in one big
// allocation. This avoids multiple limit checks.
@@ -4574,7 +4765,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
LOperand* key = instr->key();
EmitPushTaggedOperand(obj);
EmitPushTaggedOperand(key);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
// Create safepoint generator that will also ensure enough space in the
@@ -4592,7 +4783,7 @@ void LCodeGen::DoIn(LIn* instr) {
LOperand* key = instr->key();
EmitPushTaggedOperand(key);
EmitPushTaggedOperand(obj);
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
+ ASSERT(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
SafepointGenerator safepoint_generator(
@@ -4643,7 +4834,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
DeferredStackCheck* deferred_stack_check =
- new DeferredStackCheck(this, instr);
+ new(zone()) DeferredStackCheck(this, instr);
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(below, deferred_stack_check->entry());
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
diff --git a/deps/v8/src/x64/lithium-codegen-x64.h b/deps/v8/src/x64/lithium-codegen-x64.h
index f5045b66a2..99e7ec8249 100644
--- a/deps/v8/src/x64/lithium-codegen-x64.h
+++ b/deps/v8/src/x64/lithium-codegen-x64.h
@@ -45,22 +45,26 @@ class SafepointGenerator;
class LCodeGen BASE_EMBEDDED {
public:
- LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
+ LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info,
+ Zone* zone)
: chunk_(chunk),
masm_(assembler),
info_(info),
current_block_(-1),
current_instruction_(-1),
instructions_(chunk->instructions()),
- deoptimizations_(4),
- jump_table_(4),
- deoptimization_literals_(8),
+ deoptimizations_(4, zone),
+ jump_table_(4, zone),
+ deoptimization_literals_(8, zone),
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- deferred_(8),
+ translations_(zone),
+ deferred_(8, zone),
osr_pc_offset_(-1),
last_lazy_deopt_pc_(0),
+ safepoints_(zone),
+ zone_(zone),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -72,6 +76,7 @@ class LCodeGen BASE_EMBEDDED {
Isolate* isolate() const { return info_->isolate(); }
Factory* factory() const { return isolate()->factory(); }
Heap* heap() const { return isolate()->heap(); }
+ Zone* zone() const { return zone_; }
// Support for converting LOperands to assembler types.
Register ToRegister(LOperand* op) const;
@@ -155,7 +160,7 @@ class LCodeGen BASE_EMBEDDED {
void Abort(const char* format, ...);
void Comment(const char* format, ...);
- void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
+ void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
// Code generation passes. Returns true if code generation should
// continue.
@@ -196,12 +201,18 @@ class LCodeGen BASE_EMBEDDED {
int argc,
LInstruction* instr);
+ enum RDIState {
+ RDI_UNINITIALIZED,
+ RDI_CONTAINS_TARGET
+ };
+
// Generate a direct call to a known function. Expects the function
// to be in rdi.
void CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
- CallKind call_kind);
+ CallKind call_kind,
+ RDIState rdi_state);
void RecordSafepointWithLazyDeopt(LInstruction* instr,
@@ -225,7 +236,8 @@ class LCodeGen BASE_EMBEDDED {
LOperand* elements_pointer,
LOperand* key,
ElementsKind elements_kind,
- uint32_t offset);
+ uint32_t offset,
+ uint32_t additional_index = 0);
// Specific math operations - used from DoUnaryMathOperation.
void EmitIntegerMathAbs(LUnaryMathOperation* instr);
@@ -289,7 +301,8 @@ class LCodeGen BASE_EMBEDDED {
void EmitLoadFieldOrConstantFunction(Register result,
Register object,
Handle<Map> type,
- Handle<String> name);
+ Handle<String> name,
+ LEnvironment* env);
// Emits code for pushing either a tagged constant, a (non-double)
// register, or a stack slot operand.
@@ -334,6 +347,8 @@ class LCodeGen BASE_EMBEDDED {
// itself is emitted at the end of the generated code.
SafepointTableBuilder safepoints_;
+ Zone* zone_;
+
// Compiler from a set of parallel moves to a sequential list of moves.
LGapResolver resolver_;
diff --git a/deps/v8/src/x64/lithium-gap-resolver-x64.cc b/deps/v8/src/x64/lithium-gap-resolver-x64.cc
index 877ea8cd3b..22183a2f8d 100644
--- a/deps/v8/src/x64/lithium-gap-resolver-x64.cc
+++ b/deps/v8/src/x64/lithium-gap-resolver-x64.cc
@@ -36,7 +36,7 @@ namespace v8 {
namespace internal {
LGapResolver::LGapResolver(LCodeGen* owner)
- : cgen_(owner), moves_(32) {}
+ : cgen_(owner), moves_(32, owner->zone()) {}
void LGapResolver::Resolve(LParallelMove* parallel_move) {
@@ -74,7 +74,7 @@ void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
for (int i = 0; i < moves->length(); ++i) {
LMoveOperands move = moves->at(i);
- if (!move.IsRedundant()) moves_.Add(move);
+ if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
}
Verify();
}
diff --git a/deps/v8/src/x64/lithium-x64.cc b/deps/v8/src/x64/lithium-x64.cc
index 593e778d82..d06a6a4063 100644
--- a/deps/v8/src/x64/lithium-x64.cc
+++ b/deps/v8/src/x64/lithium-x64.cc
@@ -110,22 +110,17 @@ void LInstruction::PrintTo(StringStream* stream) {
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintDataTo(StringStream* stream) {
+void LInstruction::PrintDataTo(StringStream* stream) {
stream->Add("= ");
- for (int i = 0; i < inputs_.length(); i++) {
+ for (int i = 0; i < InputCount(); i++) {
if (i > 0) stream->Add(" ");
- inputs_[i]->PrintTo(stream);
+ InputAt(i)->PrintTo(stream);
}
}
-template<int R, int I, int T>
-void LTemplateInstruction<R, I, T>::PrintOutputOperandTo(StringStream* stream) {
- for (int i = 0; i < results_.length(); i++) {
- if (i > 0) stream->Add(" ");
- results_[i]->PrintTo(stream);
- }
+void LInstruction::PrintOutputOperandTo(StringStream* stream) {
+ if (HasResult()) result()->PrintTo(stream);
}
@@ -374,9 +369,9 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
// stack slots for int32 values.
int index = GetNextSpillIndex(is_double);
if (is_double) {
- return LDoubleStackSlot::Create(index);
+ return LDoubleStackSlot::Create(index, zone());
} else {
- return LStackSlot::Create(index);
+ return LStackSlot::Create(index, zone());
}
}
@@ -472,23 +467,23 @@ void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
- instructions_.Add(gap);
+ instructions_.Add(gap, zone());
index = instructions_.length();
- instructions_.Add(instr);
+ instructions_.Add(instr, zone());
} else {
index = instructions_.length();
- instructions_.Add(instr);
- instructions_.Add(gap);
+ instructions_.Add(instr, zone());
+ instructions_.Add(gap, zone());
}
if (instr->HasPointerMap()) {
- pointer_maps_.Add(instr->pointer_map());
+ pointer_maps_.Add(instr->pointer_map(), zone());
instr->pointer_map()->set_lithium_position(index);
}
}
LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
- return LConstantOperand::Create(constant->id());
+ return LConstantOperand::Create(constant->id(), zone());
}
@@ -527,7 +522,8 @@ int LChunk::NearestGapPos(int index) const {
void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
- GetGapAt(index)->GetOrCreateParallelMove(LGap::START)->AddMove(from, to);
+ GetGapAt(index)->GetOrCreateParallelMove(
+ LGap::START, zone())->AddMove(from, to, zone());
}
@@ -727,22 +723,6 @@ LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
}
-LInstruction* LChunkBuilder::SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id) {
- ASSERT(instruction_pending_deoptimization_environment_ == NULL);
- ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
- instruction_pending_deoptimization_environment_ = instr;
- pending_deoptimization_ast_id_ = ast_id;
- return instr;
-}
-
-
-void LChunkBuilder::ClearInstructionPendingDeoptimizationEnvironment() {
- instruction_pending_deoptimization_environment_ = NULL;
- pending_deoptimization_ast_id_ = AstNode::kNoNumber;
-}
-
-
LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize) {
@@ -755,8 +735,10 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
- instr = SetInstructionPendingDeoptimizationEnvironment(
- instr, sim->ast_id());
+ ASSERT(instruction_pending_deoptimization_environment_ == NULL);
+ ASSERT(pending_deoptimization_ast_id_ == AstNode::kNoNumber);
+ instruction_pending_deoptimization_environment_ = instr;
+ pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
@@ -774,15 +756,9 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
}
-LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
- instr->MarkAsSaveDoubles();
- return instr;
-}
-
-
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new(zone()) LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_, zone()));
return instr;
}
@@ -1005,7 +981,8 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
hydrogen_env->parameter_count(),
argument_count_,
value_count,
- outer);
+ outer,
+ zone());
int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1285,6 +1262,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
+ if (instr->HasNoUses()) return NULL;
LOperand* input = UseRegisterAtStart(instr->value());
LBitNotI* result = new(zone()) LBitNotI(input);
return DefineSameAsFirst(result);
@@ -1309,6 +1287,12 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
}
+LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
+ UNIMPLEMENTED();
+ return NULL;
+}
+
+
LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->representation().IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
@@ -1611,7 +1595,7 @@ LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
LOperand* object = UseFixed(instr->value(), rax);
- LDateField* result = new LDateField(object, instr->index());
+ LDateField* result = new(zone()) LDateField(object, instr->index());
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1660,14 +1644,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- bool needs_check = !instr->value()->type().IsSmi();
- if (needs_check) {
+ if (instr->value()->type().IsSmi()) {
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
+ } else {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
- } else {
- return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1737,9 +1720,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckMap* result = new(zone()) LCheckMap(value);
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -2030,8 +2013,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
LInstruction* LChunkBuilder::DoTransitionElementsKind(
HTransitionElementsKind* instr) {
- if (instr->original_map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS &&
- instr->transitioned_map()->elements_kind() == FAST_ELEMENTS) {
+ ElementsKind from_kind = instr->original_map()->elements_kind();
+ ElementsKind to_kind = instr->transitioned_map()->elements_kind();
+ if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
@@ -2053,10 +2037,19 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool needs_write_barrier = instr->NeedsWriteBarrier();
-
- LOperand* obj = needs_write_barrier
- ? UseTempRegister(instr->object())
- : UseRegisterAtStart(instr->object());
+ bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ instr->NeedsWriteBarrierForMap();
+
+ LOperand* obj;
+ if (needs_write_barrier) {
+ obj = instr->is_in_object()
+ ? UseRegister(instr->object())
+ : UseTempRegister(instr->object());
+ } else {
+ obj = needs_write_barrier_for_map
+ ? UseRegister(instr->object())
+ : UseRegisterAtStart(instr->object());
+ }
LOperand* val = needs_write_barrier
? UseTempRegister(instr->value())
@@ -2064,8 +2057,8 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
// We only need a scratch register if we have a write barrier or we
// have a store into the properties array (not in-object-property).
- LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
- ? TempRegister() : NULL;
+ LOperand* temp = (!instr->is_in_object() || needs_write_barrier ||
+ needs_write_barrier_for_map) ? TempRegister() : NULL;
return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -2110,7 +2103,7 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
- LAllocateObject* result = new LAllocateObject(TempRegister());
+ LAllocateObject* result = new(zone()) LAllocateObject(TempRegister());
return AssignPointerMap(DefineAsRegister(result));
}
@@ -2241,9 +2234,12 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
+ // Store the lazy deopt environment with the instruction if needed. Right
+ // now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
- set_deoptimization_environment(result->environment());
- ClearInstructionPendingDeoptimizationEnvironment();
+ SetDeferredLazyDeoptimizationEnvironment(result->environment());
+ instruction_pending_deoptimization_environment_ = NULL;
+ pending_deoptimization_ast_id_ = AstNode::kNoNumber;
return result;
}
@@ -2270,8 +2266,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
undefined,
instr->call_kind(),
instr->is_construct());
- if (instr->arguments() != NULL) {
- inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ if (instr->arguments_var() != NULL) {
+ inner->Bind(instr->arguments_var(), graph()->GetArgumentsObject());
}
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
@@ -2280,10 +2276,21 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
+ LInstruction* pop = NULL;
+
+ HEnvironment* env = current_block_->last_environment();
+
+ if (instr->arguments_pushed()) {
+ int argument_count = env->arguments_environment()->parameter_count();
+ pop = new(zone()) LDrop(argument_count);
+ argument_count_ -= argument_count;
+ }
+
HEnvironment* outer = current_block_->last_environment()->
DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
- return NULL;
+
+ return pop;
}
diff --git a/deps/v8/src/x64/lithium-x64.h b/deps/v8/src/x64/lithium-x64.h
index 2d8fd2ecce..d038dda060 100644
--- a/deps/v8/src/x64/lithium-x64.h
+++ b/deps/v8/src/x64/lithium-x64.h
@@ -71,7 +71,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -179,7 +179,8 @@ class LCodeGen;
V(CheckMapValue) \
V(LoadFieldByIndex) \
V(DateField) \
- V(WrapReceiver)
+ V(WrapReceiver) \
+ V(Drop)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -203,16 +204,15 @@ class LInstruction: public ZoneObject {
LInstruction()
: environment_(NULL),
hydrogen_value_(NULL),
- is_call_(false),
- is_save_doubles_(false) { }
+ is_call_(false) { }
virtual ~LInstruction() { }
virtual void CompileToNative(LCodeGen* generator) = 0;
virtual const char* Mnemonic() const = 0;
virtual void PrintTo(StringStream* stream);
- virtual void PrintDataTo(StringStream* stream) = 0;
- virtual void PrintOutputOperandTo(StringStream* stream) = 0;
+ virtual void PrintDataTo(StringStream* stream);
+ virtual void PrintOutputOperandTo(StringStream* stream);
enum Opcode {
// Declare a unique enum value for each instruction.
@@ -247,22 +247,12 @@ class LInstruction: public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
- void set_deoptimization_environment(LEnvironment* env) {
- deoptimization_environment_.set(env);
- }
- LEnvironment* deoptimization_environment() const {
- return deoptimization_environment_.get();
- }
- bool HasDeoptimizationEnvironment() const {
- return deoptimization_environment_.is_set();
- }
-
void MarkAsCall() { is_call_ = true; }
- void MarkAsSaveDoubles() { is_save_doubles_ = true; }
+
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
// Interface to the register allocator and iterators.
bool IsMarkedAsCall() const { return is_call_; }
- bool IsMarkedAsSaveDoubles() const { return is_save_doubles_; }
virtual bool HasResult() const = 0;
virtual LOperand* result() = 0;
@@ -283,9 +273,7 @@ class LInstruction: public ZoneObject {
LEnvironment* environment_;
SetOncePointer<LPointerMap> pointer_map_;
HValue* hydrogen_value_;
- SetOncePointer<LEnvironment> deoptimization_environment_;
bool is_call_;
- bool is_save_doubles_;
};
@@ -307,9 +295,6 @@ class LTemplateInstruction: public LInstruction {
int TempCount() { return T; }
LOperand* TempAt(int i) { return temps_[i]; }
- virtual void PrintDataTo(StringStream* stream);
- virtual void PrintOutputOperandTo(StringStream* stream);
-
protected:
EmbeddedContainer<LOperand*, R> results_;
EmbeddedContainer<LOperand*, I> inputs_;
@@ -348,8 +333,11 @@ class LGap: public LTemplateInstruction<0, 0, 0> {
LAST_INNER_POSITION = AFTER
};
- LParallelMove* GetOrCreateParallelMove(InnerPosition pos) {
- if (parallel_moves_[pos] == NULL) parallel_moves_[pos] = new LParallelMove;
+ LParallelMove* GetOrCreateParallelMove(InnerPosition pos,
+ Zone* zone) {
+ if (parallel_moves_[pos] == NULL) {
+ parallel_moves_[pos] = new(zone) LParallelMove(zone);
+ }
return parallel_moves_[pos];
}
@@ -535,9 +523,8 @@ class LArgumentsLength: public LTemplateInstruction<1, 1, 0> {
class LArgumentsElements: public LTemplateInstruction<1, 0, 0> {
public:
- LArgumentsElements() { }
-
DECLARE_CONCRETE_INSTRUCTION(ArgumentsElements, "arguments-elements")
+ DECLARE_HYDROGEN_ACCESSOR(ArgumentsElements)
};
@@ -831,6 +818,15 @@ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
+ LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
+ return lazy_deopt_env_;
+ }
+ virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) {
+ lazy_deopt_env_ = env;
+ }
+
+ private:
+ LEnvironment* lazy_deopt_env_;
};
@@ -1206,6 +1202,7 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1222,13 +1219,13 @@ class LLoadKeyedFastDoubleElement: public LTemplateInstruction<1, 2, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
public:
- LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
- LOperand* key) {
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer, LOperand* key) {
inputs_[0] = external_pointer;
inputs_[1] = key;
}
@@ -1242,6 +1239,7 @@ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1358,6 +1356,19 @@ class LPushArgument: public LTemplateInstruction<0, 1, 0> {
};
+class LDrop: public LTemplateInstruction<0, 0, 0> {
+ public:
+ explicit LDrop(int count) : count_(count) { }
+
+ int count() const { return count_; }
+
+ DECLARE_CONCRETE_INSTRUCTION(Drop, "drop")
+
+ private:
+ int count_;
+};
+
+
class LThisFunction: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ThisFunction, "this-function")
@@ -1434,6 +1445,7 @@ class LInvokeFunction: public LTemplateInstruction<1, 1, 0> {
virtual void PrintDataTo(StringStream* stream);
int arity() const { return hydrogen()->argument_count() - 1; }
+ Handle<JSFunction> known_function() { return hydrogen()->known_function(); }
};
@@ -1685,6 +1697,7 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1707,6 +1720,9 @@ class LStoreKeyedFastDoubleElement: public LTemplateInstruction<0, 3, 0> {
LOperand* elements() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
+
+ bool NeedsCanonicalization() { return hydrogen()->NeedsCanonicalization(); }
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1730,6 +1746,7 @@ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
ElementsKind elements_kind() const {
return hydrogen()->elements_kind();
}
+ uint32_t additional_index() const { return hydrogen()->index_offset(); }
};
@@ -1857,14 +1874,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -2147,13 +2164,13 @@ class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
- explicit LChunk(CompilationInfo* info, HGraph* graph)
+ LChunk(CompilationInfo* info, HGraph* graph)
: spill_slot_count_(0),
info_(info),
graph_(graph),
- instructions_(32),
- pointer_maps_(8),
- inlined_closures_(1) { }
+ instructions_(32, graph->zone()),
+ pointer_maps_(8, graph->zone()),
+ inlined_closures_(1, graph->zone()) { }
void AddInstruction(LInstruction* instruction, HBasicBlock* block);
LConstantOperand* DefineConstantOperand(HConstant* constant);
@@ -2198,9 +2215,11 @@ class LChunk: public ZoneObject {
}
void AddInlinedClosure(Handle<JSFunction> closure) {
- inlined_closures_.Add(closure);
+ inlined_closures_.Add(closure, zone());
}
+ Zone* zone() const { return graph_->zone(); }
+
private:
int spill_slot_count_;
CompilationInfo* info_;
@@ -2217,7 +2236,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- zone_(graph->isolate()->zone()),
+ zone_(graph->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2335,11 +2354,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
- LInstruction* MarkAsSaveDoubles(LInstruction* instr);
-
- LInstruction* SetInstructionPendingDeoptimizationEnvironment(
- LInstruction* instr, int ast_id);
- void ClearInstructionPendingDeoptimizationEnvironment();
LEnvironment* CreateEnvironment(HEnvironment* hydrogen_env,
int* argument_index_accumulator);
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index f7db250f9e..7d5d6d3d0b 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -150,6 +150,20 @@ int MacroAssembler::LoadAddressSize(ExternalReference source) {
}
+void MacroAssembler::PushAddress(ExternalReference source) {
+ int64_t address = reinterpret_cast<int64_t>(source.address());
+ if (is_int32(address) && !Serializer::enabled()) {
+ if (emit_debug_code()) {
+ movq(kScratchRegister, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
+ }
+ push(Immediate(static_cast<int32_t>(address)));
+ return;
+ }
+ LoadAddress(kScratchRegister, source);
+ push(kScratchRegister);
+}
+
+
void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
ASSERT(root_array_available_);
movq(destination, Operand(kRootRegister,
@@ -657,7 +671,7 @@ static int Offset(ExternalReference ref0, ExternalReference ref1) {
void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
-#ifdef _WIN64
+#if defined(_WIN64) && !defined(__MINGW64__)
// We need to prepare a slot for result handle on stack and put
// a pointer to it into 1st arg register.
EnterApiExitFrame(arg_stack_space + 1);
@@ -705,7 +719,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
RelocInfo::RUNTIME_ENTRY);
call(rax);
-#ifdef _WIN64
+#if defined(_WIN64) && !defined(__MINGW64__)
// rax keeps a pointer to v8::Handle, unpack it.
movq(rax, Operand(rax, 0));
#endif
@@ -2644,10 +2658,12 @@ void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
void MacroAssembler::CheckFastElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
@@ -2655,23 +2671,26 @@ void MacroAssembler::CheckFastElements(Register map,
void MacroAssembler::CheckFastObjectElements(Register map,
Label* fail,
Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
- STATIC_ASSERT(FAST_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
+ STATIC_ASSERT(FAST_ELEMENTS == 2);
+ STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
-void MacroAssembler::CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance) {
- STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
+void MacroAssembler::CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance) {
+ STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
+ STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(above, fail, distance);
}
@@ -2735,24 +2754,18 @@ void MacroAssembler::CompareMap(Register obj,
CompareMapMode mode) {
Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
- Map* transitioned_fast_element_map(
- map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
- ASSERT(transitioned_fast_element_map == NULL ||
- map->elements_kind() != FAST_ELEMENTS);
- if (transitioned_fast_element_map != NULL) {
- j(equal, early_success, Label::kNear);
- Cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_fast_element_map));
- }
-
- Map* transitioned_double_map(
- map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
- ASSERT(transitioned_double_map == NULL ||
- map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
- if (transitioned_double_map != NULL) {
- j(equal, early_success, Label::kNear);
- Cmp(FieldOperand(obj, HeapObject::kMapOffset),
- Handle<Map>(transitioned_double_map));
+ ElementsKind kind = map->elements_kind();
+ if (IsFastElementsKind(kind)) {
+ bool packed = IsFastPackedElementsKind(kind);
+ Map* current_map = *map;
+ while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
+ kind = GetNextMoreGeneralFastElementsKind(kind, packed);
+ current_map = current_map->LookupElementsTransitionMap(kind);
+ if (!current_map) break;
+ j(equal, early_success, Label::kNear);
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(current_map));
+ }
}
}
}
@@ -4043,27 +4056,38 @@ void MacroAssembler::LoadTransitionedArrayMapConditional(
movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
// Check that the function's map is the same as the expected cached map.
- int expected_index =
- Context::GetContextMapIndexFromElementsKind(expected_kind);
- cmpq(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ movq(scratch, Operand(scratch,
+ Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
+
+ int offset = expected_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ cmpq(map_in_out, FieldOperand(scratch, offset));
j(not_equal, no_map_match);
// Use the transitioned cached map.
- int trans_index =
- Context::GetContextMapIndexFromElementsKind(transitioned_kind);
- movq(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+ offset = transitioned_kind * kPointerSize +
+ FixedArrayBase::kHeaderSize;
+ movq(map_in_out, FieldOperand(scratch, offset));
}
void MacroAssembler::LoadInitialArrayMap(
- Register function_in, Register scratch, Register map_out) {
+ Register function_in, Register scratch,
+ Register map_out, bool can_have_holes) {
ASSERT(!function_in.is(map_out));
Label done;
movq(map_out, FieldOperand(function_in,
JSFunction::kPrototypeOrInitialMapOffset));
if (!FLAG_smi_only_arrays) {
- LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
- FAST_ELEMENTS,
+ ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ kind,
+ map_out,
+ scratch,
+ &done);
+ } else if (can_have_holes) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
+ FAST_HOLEY_SMI_ELEMENTS,
map_out,
scratch,
&done);
@@ -4174,7 +4198,7 @@ bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
CodePatcher::CodePatcher(byte* address, int size)
: address_(address),
size_(size),
- masm_(Isolate::Current(), address, size + Assembler::kGap) {
+ masm_(NULL, address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch.
// The size is adjusted with kGap on order for the assembler to generate size
// bytes of instructions without failing with buffer size constraints.
diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h
index 6bb5cfeb42..1c1cd95e94 100644
--- a/deps/v8/src/x64/macro-assembler-x64.h
+++ b/deps/v8/src/x64/macro-assembler-x64.h
@@ -127,6 +127,8 @@ class MacroAssembler: public Assembler {
// Returns the size of the code generated by LoadAddress.
// Used by CallSize(ExternalReference) to find the size of a call.
int LoadAddressSize(ExternalReference source);
+ // Pushes the address of the external reference onto the stack.
+ void PushAddress(ExternalReference source);
// Operations on roots in the root-array.
void LoadRoot(Register destination, Heap::RootListIndex index);
@@ -875,9 +877,9 @@ class MacroAssembler: public Assembler {
// Check if a map for a JSObject indicates that the object has fast smi only
// elements. Jump to the specified label if it does not.
- void CheckFastSmiOnlyElements(Register map,
- Label* fail,
- Label::Distance distance = Label::kFar);
+ void CheckFastSmiElements(Register map,
+ Label* fail,
+ Label::Distance distance = Label::kFar);
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by index in
@@ -1139,7 +1141,8 @@ class MacroAssembler: public Assembler {
// Load the initial map for new Arrays from a JSFunction.
void LoadInitialArrayMap(Register function_in,
Register scratch,
- Register map_out);
+ Register map_out,
+ bool can_have_holes);
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
index 837c2543c3..a72a0a0d1d 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,21 +44,23 @@ namespace internal {
/*
* This assembler uses the following register assignment convention
- * - rdx : currently loaded character(s) as ASCII or UC16. Must be loaded using
- * LoadCurrentCharacter before using any of the dispatch methods.
- * - rdi : current position in input, as negative offset from end of string.
+ * - rdx : Currently loaded character(s) as ASCII or UC16. Must be loaded
+ * using LoadCurrentCharacter before using any of the dispatch methods.
+ * Temporarily stores the index of capture start after a matching pass
+ * for a global regexp.
+ * - rdi : Current position in input, as negative offset from end of string.
* Please notice that this is the byte offset, not the character
- * offset! Is always a 32-bit signed (negative) offset, but must be
+ * offset! Is always a 32-bit signed (negative) offset, but must be
* maintained sign-extended to 64 bits, since it is used as index.
- * - rsi : end of input (points to byte after last character in input),
+ * - rsi : End of input (points to byte after last character in input),
* so that rsi+rdi points to the current character.
- * - rbp : frame pointer. Used to access arguments, local variables and
+ * - rbp : Frame pointer. Used to access arguments, local variables and
* RegExp registers.
- * - rsp : points to tip of C stack.
- * - rcx : points to tip of backtrack stack. The backtrack stack contains
- * only 32-bit values. Most are offsets from some base (e.g., character
+ * - rsp : Points to tip of C stack.
+ * - rcx : Points to tip of backtrack stack. The backtrack stack contains
+ * only 32-bit values. Most are offsets from some base (e.g., character
* positions from end of string or code location from Code* pointer).
- * - r8 : code object pointer. Used to convert between absolute and
+ * - r8 : Code object pointer. Used to convert between absolute and
* code-object-relative addresses.
*
* The registers rax, rbx, r9 and r11 are free to use for computations.
@@ -72,20 +74,22 @@ namespace internal {
*
* The stack will have the following content, in some order, indexable from the
* frame pointer (see, e.g., kStackHighEnd):
- * - Isolate* isolate (Address of the current isolate)
+ * - Isolate* isolate (address of the current isolate)
* - direct_call (if 1, direct call from JavaScript code, if 0 call
* through the runtime system)
- * - stack_area_base (High end of the memory area to use as
+ * - stack_area_base (high end of the memory area to use as
* backtracking stack)
+ * - capture array size (may fit multiple sets of matches)
* - int* capture_array (int[num_saved_registers_], for output).
- * - end of input (Address of end of string)
- * - start of input (Address of first character in string)
+ * - end of input (address of end of string)
+ * - start of input (address of first character in string)
* - start index (character index of start)
* - String* input_string (input string)
* - return address
* - backup of callee save registers (rbx, possibly rsi and rdi).
+ * - success counter (only useful for global regexp to count matches)
* - Offset of location before start of input (effectively character
- * position -1). Used to initialize capture registers to a non-position.
+ * position -1). Used to initialize capture registers to a non-position.
* - At start of string (if 1, we are starting at the start of the
* string, otherwise 0)
* - register 0 rbp[-n] (Only positions must be stored in the first
@@ -94,7 +98,7 @@ namespace internal {
*
* The first num_saved_registers_ registers are initialized to point to
* "character -1" in the string (i.e., char_size() bytes before the first
- * character of the string). The remaining registers starts out uninitialized.
+ * character of the string). The remaining registers starts out uninitialized.
*
* The first seven values must be provided by the calling code by
* calling the code's entry address cast to a function pointer with the
@@ -113,10 +117,12 @@ namespace internal {
RegExpMacroAssemblerX64::RegExpMacroAssemblerX64(
Mode mode,
- int registers_to_save)
- : masm_(Isolate::Current(), NULL, kRegExpCodeSize),
+ int registers_to_save,
+ Zone* zone)
+ : NativeRegExpMacroAssembler(zone),
+ masm_(Isolate::Current(), NULL, kRegExpCodeSize),
no_root_array_scope_(&masm_),
- code_relative_fixup_positions_(4),
+ code_relative_fixup_positions_(4, zone),
mode_(mode),
num_registers_(registers_to_save),
num_saved_registers_(registers_to_save),
@@ -523,15 +529,6 @@ void RegExpMacroAssemblerX64::CheckNotBackReference(
}
-void RegExpMacroAssemblerX64::CheckNotRegistersEqual(int reg1,
- int reg2,
- Label* on_not_equal) {
- __ movq(rax, register_location(reg1));
- __ cmpq(rax, register_location(reg2));
- BranchOrBacktrack(not_equal, on_not_equal);
-}
-
-
void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
__ cmpl(current_character(), Immediate(c));
@@ -542,9 +539,13 @@ void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ movl(rax, current_character());
- __ and_(rax, Immediate(mask));
- __ cmpl(rax, Immediate(c));
+ if (c == 0) {
+ __ testl(current_character(), Immediate(mask));
+ } else {
+ __ movl(rax, Immediate(mask));
+ __ and_(rax, current_character());
+ __ cmpl(rax, Immediate(c));
+ }
BranchOrBacktrack(equal, on_equal);
}
@@ -552,9 +553,13 @@ void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerX64::CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_not_equal) {
- __ movl(rax, current_character());
- __ and_(rax, Immediate(mask));
- __ cmpl(rax, Immediate(c));
+ if (c == 0) {
+ __ testl(current_character(), Immediate(mask));
+ } else {
+ __ movl(rax, Immediate(mask));
+ __ and_(rax, current_character());
+ __ cmpl(rax, Immediate(c));
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
@@ -572,6 +577,42 @@ void RegExpMacroAssemblerX64::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerX64::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ leal(rax, Operand(current_character(), -from));
+ __ cmpl(rax, Immediate(to - from));
+ BranchOrBacktrack(below_equal, on_in_range);
+}
+
+
+void RegExpMacroAssemblerX64::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ leal(rax, Operand(current_character(), -from));
+ __ cmpl(rax, Immediate(to - from));
+ BranchOrBacktrack(above, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerX64::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ Move(rax, table);
+ Register index = current_character();
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ movq(rbx, current_character());
+ __ and_(rbx, Immediate(kTableMask));
+ index = rbx;
+ }
+ __ cmpb(FieldOperand(rax, index, times_1, ByteArray::kHeaderSize),
+ Immediate(0));
+ BranchOrBacktrack(not_equal, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -700,13 +741,16 @@ bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type,
void RegExpMacroAssemblerX64::Fail() {
- ASSERT(FAILURE == 0); // Return value for failure is zero.
- __ Set(rax, 0);
+ STATIC_ASSERT(FAILURE == 0); // Return value for failure is zero.
+ if (!global()) {
+ __ Set(rax, FAILURE);
+ }
__ jmp(&exit_label_);
}
Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
+ Label return_rax;
// Finalize code - write the entry point code now we know how many
// registers we need.
// Entry code:
@@ -740,7 +784,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
ASSERT_EQ(kInputStart, -3 * kPointerSize);
ASSERT_EQ(kInputEnd, -4 * kPointerSize);
ASSERT_EQ(kRegisterOutput, -5 * kPointerSize);
- ASSERT_EQ(kStackHighEnd, -6 * kPointerSize);
+ ASSERT_EQ(kNumOutputRegisters, -6 * kPointerSize);
__ push(rdi);
__ push(rsi);
__ push(rdx);
@@ -751,7 +795,8 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ push(rbx); // Callee-save
#endif
- __ push(Immediate(0)); // Make room for "at start" constant.
+ __ push(Immediate(0)); // Number of successful matches in a global regexp.
+ __ push(Immediate(0)); // Make room for "input start - 1" constant.
// Check if we have space on the stack for registers.
Label stack_limit_hit;
@@ -771,14 +816,14 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// Exit with OutOfMemory exception. There is not enough space on the stack
// for our working registers.
__ Set(rax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_rax);
__ bind(&stack_limit_hit);
__ Move(code_object_pointer(), masm_.CodeObject());
CallCheckStackGuardState(); // Preserves no registers beside rbp and rsp.
__ testq(rax, rax);
// If returned value is non-zero, we exit with the returned value as result.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_rax);
__ bind(&stack_ok);
@@ -803,19 +848,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
// position registers.
__ movq(Operand(rbp, kInputStartMinusOne), rax);
- if (num_saved_registers_ > 0) {
- // Fill saved registers with initial value = start offset - 1
- // Fill in stack push order, to avoid accessing across an unwritten
- // page (a problem on Windows).
- __ Set(rcx, kRegisterZero);
- Label init_loop;
- __ bind(&init_loop);
- __ movq(Operand(rbp, rcx, times_1, 0), rax);
- __ subq(rcx, Immediate(kPointerSize));
- __ cmpq(rcx,
- Immediate(kRegisterZero - num_saved_registers_ * kPointerSize));
- __ j(greater, &init_loop);
- }
+#ifdef WIN32
// Ensure that we have written to each stack page, in order. Skipping a page
// on Windows can cause segmentation faults. Assuming page size is 4k.
const int kPageSize = 4096;
@@ -825,21 +858,49 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
i += kRegistersPerPage) {
__ movq(register_location(i), rax); // One write every page.
}
+#endif // WIN32
- // Initialize backtrack stack pointer.
- __ movq(backtrack_stackpointer(), Operand(rbp, kStackHighEnd));
// Initialize code object pointer.
__ Move(code_object_pointer(), masm_.CodeObject());
- // Load previous char as initial value of current-character.
- Label at_start;
- __ cmpb(Operand(rbp, kStartIndex), Immediate(0));
- __ j(equal, &at_start);
- LoadCurrentCharacterUnchecked(-1, 1); // Load previous char.
- __ jmp(&start_label_);
- __ bind(&at_start);
+
+ Label load_char_start_regexp, start_regexp;
+ // Load newline if index is at start, previous character otherwise.
+ __ cmpl(Operand(rbp, kStartIndex), Immediate(0));
+ __ j(not_equal, &load_char_start_regexp, Label::kNear);
__ Set(current_character(), '\n');
- __ jmp(&start_label_);
+ __ jmp(&start_regexp, Label::kNear);
+
+ // Global regexp restarts matching here.
+ __ bind(&load_char_start_regexp);
+ // Load previous char as initial value of current character register.
+ LoadCurrentCharacterUnchecked(-1, 1);
+ __ bind(&start_regexp);
+ // Initialize on-stack registers.
+ if (num_saved_registers_ > 0) {
+ // Fill saved registers with initial value = start offset - 1
+ // Fill in stack push order, to avoid accessing across an unwritten
+ // page (a problem on Windows).
+ if (num_saved_registers_ > 8) {
+ __ Set(rcx, kRegisterZero);
+ Label init_loop;
+ __ bind(&init_loop);
+ __ movq(Operand(rbp, rcx, times_1, 0), rax);
+ __ subq(rcx, Immediate(kPointerSize));
+ __ cmpq(rcx,
+ Immediate(kRegisterZero - num_saved_registers_ * kPointerSize));
+ __ j(greater, &init_loop);
+ } else { // Unroll the loop.
+ for (int i = 0; i < num_saved_registers_; i++) {
+ __ movq(register_location(i), rax);
+ }
+ }
+ }
+
+ // Initialize backtrack stack pointer.
+ __ movq(backtrack_stackpointer(), Operand(rbp, kStackHighEnd));
+
+ __ jmp(&start_label_);
// Exit code:
if (success_label_.is_linked()) {
@@ -858,6 +919,10 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
}
for (int i = 0; i < num_saved_registers_; i++) {
__ movq(rax, register_location(i));
+ if (i == 0 && global_with_zero_length_check()) {
+ // Keep capture start in rdx for the zero-length check later.
+ __ movq(rdx, rax);
+ }
__ addq(rax, rcx); // Convert to index from start, not end.
if (mode_ == UC16) {
__ sar(rax, Immediate(1)); // Convert byte index to character index.
@@ -865,12 +930,57 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ movl(Operand(rbx, i * kIntSize), rax);
}
}
- __ Set(rax, SUCCESS);
+
+ if (global()) {
+ // Restart matching if the regular expression is flagged as global.
+ // Increment success counter.
+ __ incq(Operand(rbp, kSuccessfulCaptures));
+ // Capture results have been stored, so the number of remaining global
+ // output registers is reduced by the number of stored captures.
+ __ movsxlq(rcx, Operand(rbp, kNumOutputRegisters));
+ __ subq(rcx, Immediate(num_saved_registers_));
+ // Check whether we have enough room for another set of capture results.
+ __ cmpq(rcx, Immediate(num_saved_registers_));
+ __ j(less, &exit_label_);
+
+ __ movq(Operand(rbp, kNumOutputRegisters), rcx);
+ // Advance the location for output.
+ __ addq(Operand(rbp, kRegisterOutput),
+ Immediate(num_saved_registers_ * kIntSize));
+
+ // Prepare rax to initialize registers with its value in the next run.
+ __ movq(rax, Operand(rbp, kInputStartMinusOne));
+
+ if (global_with_zero_length_check()) {
+ // Special case for zero-length matches.
+ // rdx: capture start index
+ __ cmpq(rdi, rdx);
+ // Not a zero-length match, restart.
+ __ j(not_equal, &load_char_start_regexp);
+ // rdi (offset from the end) is zero if we already reached the end.
+ __ testq(rdi, rdi);
+ __ j(zero, &exit_label_, Label::kNear);
+ // Advance current position after a zero-length match.
+ if (mode_ == UC16) {
+ __ addq(rdi, Immediate(2));
+ } else {
+ __ incq(rdi);
+ }
+ }
+
+ __ jmp(&load_char_start_regexp);
+ } else {
+ __ movq(rax, Immediate(SUCCESS));
+ }
}
- // Exit and return rax
__ bind(&exit_label_);
+ if (global()) {
+ // Return the number of successful captures.
+ __ movq(rax, Operand(rbp, kSuccessfulCaptures));
+ }
+ __ bind(&return_rax);
#ifdef _WIN64
// Restore callee save registers.
__ lea(rsp, Operand(rbp, kLastCalleeSaveRegister));
@@ -907,7 +1017,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ testq(rax, rax);
// If returning non-zero, we should end execution with the given
// result as return value.
- __ j(not_zero, &exit_label_);
+ __ j(not_zero, &return_rax);
// Restore registers.
__ Move(code_object_pointer(), masm_.CodeObject());
@@ -968,7 +1078,7 @@ Handle<HeapObject> RegExpMacroAssemblerX64::GetCode(Handle<String> source) {
__ bind(&exit_with_exception);
// Exit with Result EXCEPTION(-1) to signal thrown exception.
__ Set(rax, EXCEPTION);
- __ jmp(&exit_label_);
+ __ jmp(&return_rax);
}
FixupCodeRelativePositions();
@@ -1091,8 +1201,9 @@ void RegExpMacroAssemblerX64::SetRegister(int register_index, int to) {
}
-void RegExpMacroAssemblerX64::Succeed() {
+bool RegExpMacroAssemblerX64::Succeed() {
__ jmp(&success_label_);
+ return global();
}
diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.h b/deps/v8/src/x64/regexp-macro-assembler-x64.h
index 7102225e64..a082cf2df3 100644
--- a/deps/v8/src/x64/regexp-macro-assembler-x64.h
+++ b/deps/v8/src/x64/regexp-macro-assembler-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,7 +41,7 @@ namespace internal {
class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
public:
- RegExpMacroAssemblerX64(Mode mode, int registers_to_save);
+ RegExpMacroAssemblerX64(Mode mode, int registers_to_save, Zone* zone);
virtual ~RegExpMacroAssemblerX64();
virtual int stack_limit_slack();
virtual void AdvanceCurrentPosition(int by);
@@ -66,7 +66,6 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
- virtual void CheckNotRegistersEqual(int reg1, int reg2, Label* on_not_equal);
virtual void CheckNotCharacter(uint32_t c, Label* on_not_equal);
virtual void CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
@@ -75,6 +74,14 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
@@ -101,7 +108,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
virtual void ReadStackPointerFromRegister(int reg);
virtual void SetCurrentPositionFromEnd(int by);
virtual void SetRegister(int register_index, int to);
- virtual void Succeed();
+ virtual bool Succeed();
virtual void WriteCurrentPositionToRegister(int reg, int cp_offset);
virtual void ClearRegisters(int reg_from, int reg_to);
virtual void WriteStackPointerToRegister(int reg);
@@ -146,7 +153,12 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize;
- static const int kStackHighEnd = kRegisterOutput + kPointerSize;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value. NumOutputRegisters is passed as 32-bit value. The upper
+ // 32 bit of this 64-bit stack slot may contain garbage.
+ static const int kNumOutputRegisters = kRegisterOutput + kPointerSize;
+ static const int kStackHighEnd = kNumOutputRegisters + kPointerSize;
// DirectCall is passed as 32 bit int (values 0 or 1).
static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
@@ -159,8 +171,12 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex - kPointerSize;
static const int kInputEnd = kInputStart - kPointerSize;
static const int kRegisterOutput = kInputEnd - kPointerSize;
- static const int kStackHighEnd = kRegisterOutput - kPointerSize;
- static const int kDirectCall = kFrameAlign;
+ // For the case of global regular expression, we have room to store at least
+ // one set of capture results. For the case of non-global regexp, we ignore
+ // this value.
+ static const int kNumOutputRegisters = kRegisterOutput - kPointerSize;
+ static const int kStackHighEnd = kFrameAlign;
+ static const int kDirectCall = kStackHighEnd + kPointerSize;
static const int kIsolate = kDirectCall + kPointerSize;
#endif
@@ -175,14 +191,14 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
// AMD64 Calling Convention has only one callee-save register that
// we use. We push this after the frame pointer (and after the
// parameters).
- static const int kBackup_rbx = kStackHighEnd - kPointerSize;
+ static const int kBackup_rbx = kNumOutputRegisters - kPointerSize;
static const int kLastCalleeSaveRegister = kBackup_rbx;
#endif
+ static const int kSuccessfulCaptures = kLastCalleeSaveRegister - kPointerSize;
// When adding local variables remember to push space for them in
// the frame in GetCode.
- static const int kInputStartMinusOne =
- kLastCalleeSaveRegister - kPointerSize;
+ static const int kInputStartMinusOne = kSuccessfulCaptures - kPointerSize;
// First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize;
@@ -224,7 +240,7 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
void BranchOrBacktrack(Condition condition, Label* to);
void MarkPositionForCodeRelativeFixup() {
- code_relative_fixup_positions_.Add(masm_.pc_offset());
+ code_relative_fixup_positions_.Add(masm_.pc_offset(), zone());
}
void FixupCodeRelativePositions();
diff --git a/deps/v8/src/x64/simulator-x64.h b/deps/v8/src/x64/simulator-x64.h
index df8423a654..8aba70181f 100644
--- a/deps/v8/src/x64/simulator-x64.h
+++ b/deps/v8/src/x64/simulator-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,12 +40,12 @@ namespace internal {
(entry(p0, p1, p2, p3, p4))
typedef int (*regexp_matcher)(String*, int, const byte*,
- const byte*, int*, Address, int, Isolate*);
+ const byte*, int*, int, Address, int, Isolate*);
// Call the generated regexp code directly. The code at the entry address should
// expect eight int/pointer sized arguments and return an int.
-#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \
- (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7))
+#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7, p8) \
+ (FUNCTION_CAST<regexp_matcher>(entry)(p0, p1, p2, p3, p4, p5, p6, p7, p8))
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
(reinterpret_cast<TryCatch*>(try_catch_address))
diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc
index 96ff499354..a6acd9791e 100644
--- a/deps/v8/src/x64/stub-cache-x64.cc
+++ b/deps/v8/src/x64/stub-cache-x64.cc
@@ -379,6 +379,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ push(receiver);
__ push(holder);
__ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
+ __ PushAddress(ExternalReference::isolate_address());
}
@@ -393,7 +394,7 @@ static void CompileCallLoadPropertyWithInterceptor(
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
masm->isolate());
- __ Set(rax, 5);
+ __ Set(rax, 6);
__ LoadAddress(rbx, ref);
CEntryStub stub(1);
@@ -402,7 +403,7 @@ static void CompileCallLoadPropertyWithInterceptor(
// Number of pointers to be reserved on stack for fast API call.
-static const int kFastApiCallArguments = 3;
+static const int kFastApiCallArguments = 4;
// Reserves space for the extra arguments to API function in the
@@ -452,10 +453,11 @@ static void GenerateFastApiCall(MacroAssembler* masm,
// -- rsp[16] : api function
// (first fast api call extra argument)
// -- rsp[24] : api call data
- // -- rsp[32] : last argument
+ // -- rsp[32] : isolate
+ // -- rsp[40] : last argument
// -- ...
- // -- rsp[(argc + 3) * 8] : first argument
- // -- rsp[(argc + 4) * 8] : receiver
+ // -- rsp[(argc + 4) * 8] : first argument
+ // -- rsp[(argc + 5) * 8] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
@@ -473,11 +475,15 @@ static void GenerateFastApiCall(MacroAssembler* masm,
} else {
__ Move(Operand(rsp, 3 * kPointerSize), call_data);
}
+ __ movq(kScratchRegister, ExternalReference::isolate_address());
+ __ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister);
// Prepare arguments.
- __ lea(rbx, Operand(rsp, 3 * kPointerSize));
+ __ lea(rbx, Operand(rsp, 4 * kPointerSize));
-#ifdef _WIN64
+#if defined(__MINGW64__)
+ Register arguments_arg = rcx;
+#elif defined(_WIN64)
// Win64 uses first register--rcx--for returned value.
Register arguments_arg = rdx;
#else
@@ -663,7 +669,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ CallExternalReference(
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
masm->isolate()),
- 5);
+ 6);
// Restore the name_ register.
__ pop(name_);
@@ -725,10 +731,22 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Handle<JSObject> object,
int index,
Handle<Map> transition,
+ Handle<String> name,
Register receiver_reg,
Register name_reg,
- Register scratch,
+ Register scratch1,
+ Register scratch2,
Label* miss_label) {
+ LookupResult lookup(masm->isolate());
+ object->Lookup(*name, &lookup);
+ if (lookup.IsFound() && (lookup.IsReadOnly() || !lookup.IsCacheable())) {
+ // In sloppy mode, we could just return the value and be done. However, we
+ // might be in strict mode, where we have to throw. Since we cannot tell,
+ // go into slow case unconditionally.
+ __ jmp(miss_label);
+ return;
+ }
+
// Check that the map of the object hasn't changed.
CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS
: REQUIRE_EXACT_MAP;
@@ -737,7 +755,32 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
- __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ __ CheckAccessGlobalProxy(receiver_reg, scratch1, miss_label);
+ }
+
+ // Check that we are allowed to write this.
+ if (!transition.is_null() && object->GetPrototype()->IsJSObject()) {
+ JSObject* holder;
+ if (lookup.IsFound()) {
+ holder = lookup.holder();
+ } else {
+ // Find the top object.
+ holder = *object;
+ do {
+ holder = JSObject::cast(holder->GetPrototype());
+ } while (holder->GetPrototype()->IsJSObject());
+ }
+ // We need an extra register, push
+ __ push(name_reg);
+ Label miss_pop, done_check;
+ CheckPrototypes(object, receiver_reg, Handle<JSObject>(holder), name_reg,
+ scratch1, scratch2, name, &miss_pop);
+ __ jmp(&done_check);
+ __ bind(&miss_pop);
+ __ pop(name_reg);
+ __ jmp(miss_label);
+ __ bind(&done_check);
+ __ pop(name_reg);
}
// Stub never generated for non-global objects that require access
@@ -748,11 +791,11 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the properties array.
- __ pop(scratch); // Return address.
+ __ pop(scratch1); // Return address.
__ push(receiver_reg);
__ Push(transition);
__ push(rax);
- __ push(scratch);
+ __ push(scratch1);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
masm->isolate()),
@@ -762,9 +805,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
}
if (!transition.is_null()) {
- // Update the map of the object; no write barrier updating is
- // needed because the map is never in new space.
- __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), transition);
+ // Update the map of the object.
+ __ Move(scratch1, transition);
+ __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
+
+ // Update the write barrier for the map field and pass the now unused
+ // name_reg as scratch register.
+ __ RecordWriteField(receiver_reg,
+ HeapObject::kMapOffset,
+ scratch1,
+ name_reg,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
}
// Adjust for the number of properties stored in the object. Even in the
@@ -781,19 +834,19 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// Pass the value being stored in the now unused name_reg.
__ movq(name_reg, rax);
__ RecordWriteField(
- receiver_reg, offset, name_reg, scratch, kDontSaveFPRegs);
+ receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs);
} else {
// Write to the properties array.
int offset = index * kPointerSize + FixedArray::kHeaderSize;
// Get the properties array (optimistically).
- __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
- __ movq(FieldOperand(scratch, offset), rax);
+ __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ movq(FieldOperand(scratch1, offset), rax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ movq(name_reg, rax);
__ RecordWriteField(
- scratch, offset, name_reg, receiver_reg, kDontSaveFPRegs);
+ scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs);
}
// Return the value (register rax).
@@ -1005,11 +1058,15 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
} else {
__ Push(Handle<Object>(callback->data()));
}
+ __ PushAddress(ExternalReference::isolate_address()); // isolate
__ push(name_reg); // name
// Save a pointer to where we pushed the arguments pointer.
// This will be passed as the const AccessorInfo& to the C++ callback.
-#ifdef _WIN64
+#if defined(__MINGW64__)
+ Register accessor_info_arg = rdx;
+ Register name_arg = rcx;
+#elif defined(_WIN64)
// Win64 uses first register--rcx--for returned value.
Register accessor_info_arg = r8;
Register name_arg = rdx;
@@ -1022,14 +1079,14 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ movq(name_arg, rsp);
__ push(scratch2); // Restore return address.
- // 3 elements array for v8::Arguments::values_ and handler for name.
- const int kStackSpace = 4;
+ // 4 elements array for v8::Arguments::values_ and handler for name.
+ const int kStackSpace = 5;
// Allocate v8::AccessorInfo in non-GCed stack space.
const int kArgStackSpace = 1;
__ PrepareCallApiFunction(kArgStackSpace);
- __ lea(rax, Operand(name_arg, 3 * kPointerSize));
+ __ lea(rax, Operand(name_arg, 4 * kPointerSize));
// v8::AccessorInfo::args_.
__ movq(StackSpaceOperand(0), rax);
@@ -1090,8 +1147,9 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
lookup->GetCallbackObject()->IsAccessorInfo()) {
- compile_followup_inline =
- AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL;
+ AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
+ compile_followup_inline = callback->getter() != NULL &&
+ callback->IsCompatibleReceiver(*object);
}
}
@@ -1186,6 +1244,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
__ push(holder_reg);
__ Move(holder_reg, callback);
__ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
+ __ PushAddress(ExternalReference::isolate_address());
__ push(holder_reg);
__ push(name_reg);
__ push(scratch2); // restore return address
@@ -1193,7 +1252,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
} else { // !compile_followup_inline
// Call the runtime system to load the interceptor.
@@ -1208,7 +1267,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
- __ TailCallExternalReference(ref, 5, 1);
+ __ TailCallExternalReference(ref, 6, 1);
}
}
@@ -1423,17 +1482,32 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&fast_object);
// In case of fast smi-only, convert to fast object, otherwise bail out.
__ bind(&not_fast_object);
- __ CheckFastSmiOnlyElements(rbx, &call_builtin);
+ __ CheckFastSmiElements(rbx, &call_builtin);
// rdx: receiver
// rbx: map
- __ movq(r9, rdi); // Backup rdi as it is going to be trashed.
- __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+
+ Label try_holey_map;
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
FAST_ELEMENTS,
rbx,
rdi,
+ &try_holey_map);
+
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ // Restore edi.
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
+ __ jmp(&fast_object);
+
+ __ bind(&try_holey_map);
+ __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
+ FAST_HOLEY_ELEMENTS,
+ rbx,
+ rdi,
&call_builtin);
- ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
- __ movq(rdi, r9);
+ ElementsTransitionGenerator::
+ GenerateMapChangeElementsTransition(masm());
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
__ bind(&fast_object);
} else {
__ CheckFastObjectElements(rbx, &call_builtin);
@@ -2000,7 +2074,7 @@ Handle<Code> CallStubCompiler::CompileFastApiCall(
name, depth, &miss);
// Move the return address on top of the stack.
- __ movq(rax, Operand(rsp, 3 * kPointerSize));
+ __ movq(rax, Operand(rsp, 4 * kPointerSize));
__ movq(Operand(rsp, 0 * kPointerSize), rax);
GenerateFastApiCall(masm(), optimization, argc);
@@ -2285,7 +2359,13 @@ Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object,
Label miss;
// Generate store field code. Preserves receiver and name on jump to miss.
- GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ rdx, rcx, rbx, rdi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2344,6 +2424,52 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
}
+Handle<Code> StoreStubCompiler::CompileStoreViaSetter(
+ Handle<JSObject> receiver,
+ Handle<JSFunction> setter,
+ Handle<String> name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rdx : receiver
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Save value register, so we can restore it later.
+ __ push(rax);
+
+ // Call the JavaScript getter with the receiver and the value on the stack.
+ __ push(rdx);
+ __ push(rax);
+ ParameterCount actual(1);
+ __ InvokeFunction(setter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // We have to return the passed value, not the return value of the setter.
+ __ pop(rax);
+
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+
+ __ bind(&miss);
+ Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
Handle<JSObject> receiver,
Handle<String> name) {
@@ -2458,7 +2584,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object,
__ j(not_equal, &miss);
// Generate store field code. Preserves receiver and name on jump to miss.
- GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss);
+ GenerateStoreField(masm(),
+ object,
+ index,
+ transition,
+ name,
+ rdx, rcx, rbx, rdi,
+ &miss);
// Handle store cache miss.
__ bind(&miss);
@@ -2612,6 +2744,44 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
}
+Handle<Code> LoadStubCompiler::CompileLoadViaGetter(
+ Handle<String> name,
+ Handle<JSObject> receiver,
+ Handle<JSObject> holder,
+ Handle<JSFunction> getter) {
+ // ----------- S t a t e -------------
+ // -- rax : receiver
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -----------------------------------
+ Label miss;
+
+ // Check that the maps haven't changed.
+ __ JumpIfSmi(rax, &miss);
+ CheckPrototypes(receiver, rax, holder, rbx, rdx, rdi, name, &miss);
+
+ {
+ FrameScope scope(masm(), StackFrame::INTERNAL);
+
+ // Call the JavaScript getter with the receiver on the stack.
+ __ push(rax);
+ ParameterCount actual(0);
+ __ InvokeFunction(getter, actual, CALL_FUNCTION, NullCallWrapper(),
+ CALL_AS_METHOD);
+
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ }
+ __ ret(0);
+
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CALLBACKS, name);
+}
+
+
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
Handle<JSFunction> value,
@@ -3121,6 +3291,32 @@ void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
__ jmp(miss_ic, RelocInfo::CODE_TARGET);
}
+
+static void GenerateSmiKeyCheck(MacroAssembler* masm,
+ Register key,
+ Register scratch,
+ XMMRegister xmm_scratch0,
+ XMMRegister xmm_scratch1,
+ Label* fail) {
+ // Check that key is a smi or a heap number containing a smi and branch
+ // if the check fails.
+ Label key_ok;
+ __ JumpIfSmi(key, &key_ok);
+ __ CheckMap(key,
+ masm->isolate()->factory()->heap_number_map(),
+ fail,
+ DONT_DO_SMI_CHECK);
+ __ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
+ __ cvttsd2si(scratch, xmm_scratch0);
+ __ cvtlsi2sd(xmm_scratch1, scratch);
+ __ ucomisd(xmm_scratch1, xmm_scratch0);
+ __ j(not_equal, fail);
+ __ j(parity_even, fail); // NaN.
+ __ Integer32ToSmi(key, scratch);
+ __ bind(&key_ok);
+}
+
+
void KeyedLoadStubCompiler::GenerateLoadExternalArray(
MacroAssembler* masm,
ElementsKind elements_kind) {
@@ -3134,8 +3330,8 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
@@ -3269,8 +3465,8 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rcx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
@@ -3332,8 +3528,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
break;
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3371,37 +3570,38 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
} else {
// Perform float-to-int conversion with truncation (round-to-zero)
// behavior.
+ // Fast path: use machine instruction to convert to int64. If that
+ // fails (out-of-range), go into the runtime.
+ __ cvttsd2siq(r8, xmm0);
+ __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
+ __ cmpq(r8, kScratchRegister);
+ __ j(equal, &slow);
- // Convert to int32 and store the low byte/word.
- // If the value is NaN or +/-infinity, the result is 0x80000000,
- // which is automatically zero when taken mod 2^n, n < 32.
// rdx: value (converted to an untagged integer)
// rdi: untagged index
// rbx: base pointer of external storage
switch (elements_kind) {
case EXTERNAL_BYTE_ELEMENTS:
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ cvttsd2si(rdx, xmm0);
- __ movb(Operand(rbx, rdi, times_1, 0), rdx);
+ __ movb(Operand(rbx, rdi, times_1, 0), r8);
break;
case EXTERNAL_SHORT_ELEMENTS:
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ cvttsd2si(rdx, xmm0);
- __ movw(Operand(rbx, rdi, times_2, 0), rdx);
+ __ movw(Operand(rbx, rdi, times_2, 0), r8);
break;
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- // Convert to int64, so that NaN and infinities become
- // 0x8000000000000000, which is zero mod 2^32.
- __ cvttsd2siq(rdx, xmm0);
- __ movl(Operand(rbx, rdi, times_4, 0), rdx);
+ __ movl(Operand(rbx, rdi, times_4, 0), r8);
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
- case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_SMI_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
+ case FAST_HOLEY_ELEMENTS:
+ case FAST_HOLEY_SMI_ELEMENTS:
+ case FAST_HOLEY_DOUBLE_ELEMENTS:
case DICTIONARY_ELEMENTS:
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
@@ -3451,8 +3651,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
@@ -3493,8 +3693,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rax, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rax, rcx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
@@ -3549,10 +3749,10 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rcx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ JumpIfNotSmi(rax, &transition_elements_kind);
}
@@ -3576,13 +3776,13 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ j(not_equal, &miss_force_generic);
__ bind(&finish_store);
- if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ if (IsFastSmiElementsKind(elements_kind)) {
__ SmiToInteger32(rcx, rcx);
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
rax);
} else {
// Do the store and update the write barrier.
- ASSERT(elements_kind == FAST_ELEMENTS);
+ ASSERT(IsFastObjectElementsKind(elements_kind));
__ SmiToInteger32(rcx, rcx);
__ lea(rcx,
FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize));
@@ -3691,8 +3891,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- // Check that the key is a smi.
- __ JumpIfNotSmi(rcx, &miss_force_generic);
+ // Check that the key is a smi or a heap number convertible to a smi.
+ GenerateSmiKeyCheck(masm, rcx, rbx, xmm0, xmm1, &miss_force_generic);
// Get the elements array.
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
diff --git a/deps/v8/src/zone-inl.h b/deps/v8/src/zone-inl.h
index ee96ec052e..d75e297af1 100644
--- a/deps/v8/src/zone-inl.h
+++ b/deps/v8/src/zone-inl.h
@@ -90,30 +90,17 @@ ZoneSplayTree<Config>::~ZoneSplayTree() {
// Reset the root to avoid unneeded iteration over all tree nodes
// in the destructor. For a zone-allocated tree, nodes will be
// freed by the Zone.
- SplayTree<Config, ZoneListAllocationPolicy>::ResetRoot();
+ SplayTree<Config, ZoneAllocationPolicy>::ResetRoot();
}
-// TODO(isolates): for performance reasons, this should be replaced with a new
-// operator that takes the zone in which the object should be
-// allocated.
-void* ZoneObject::operator new(size_t size) {
- return ZONE->New(static_cast<int>(size));
-}
-
void* ZoneObject::operator new(size_t size, Zone* zone) {
return zone->New(static_cast<int>(size));
}
-
-inline void* ZoneListAllocationPolicy::New(int size) {
- return ZONE->New(size);
-}
-
-
-template <typename T>
-void* ZoneList<T>::operator new(size_t size) {
- return ZONE->New(static_cast<int>(size));
+inline void* ZoneAllocationPolicy::New(size_t size) {
+ ASSERT(zone_);
+ return zone_->New(size);
}
diff --git a/deps/v8/src/zone.h b/deps/v8/src/zone.h
index 864846553a..1bc4984aa2 100644
--- a/deps/v8/src/zone.h
+++ b/deps/v8/src/zone.h
@@ -148,7 +148,6 @@ class Zone {
class ZoneObject {
public:
// Allocate a new ZoneObject of 'size' bytes in the Zone.
- INLINE(void* operator new(size_t size));
INLINE(void* operator new(size_t size, Zone* zone));
// Ideally, the delete operator should be private instead of
@@ -164,16 +163,16 @@ class ZoneObject {
};
-// The ZoneListAllocationPolicy is used to specialize the GenericList
-// implementation to allocate ZoneLists and their elements in the
-// Zone.
-class ZoneListAllocationPolicy {
+// The ZoneAllocationPolicy is used to specialize generic data
+// structures to allocate themselves and their elements in the Zone.
+struct ZoneAllocationPolicy {
public:
- // Allocate 'size' bytes of memory in the zone.
- static void* New(int size);
+ explicit ZoneAllocationPolicy(Zone* zone) : zone_(zone) { }
+ INLINE(void* New(size_t size));
+ INLINE(static void Delete(void *pointer)) { }
- // De-allocation attempts are silently ignored.
- static void Delete(void* p) { }
+ private:
+ Zone* zone_;
};
@@ -182,20 +181,48 @@ class ZoneListAllocationPolicy {
// Zone. ZoneLists cannot be deleted individually; you can delete all
// objects in the Zone by calling Zone::DeleteAll().
template<typename T>
-class ZoneList: public List<T, ZoneListAllocationPolicy> {
+class ZoneList: public List<T, ZoneAllocationPolicy> {
public:
- INLINE(void* operator new(size_t size));
- INLINE(void* operator new(size_t size, Zone* zone));
-
// Construct a new ZoneList with the given capacity; the length is
// always zero. The capacity must be non-negative.
- explicit ZoneList(int capacity)
- : List<T, ZoneListAllocationPolicy>(capacity) { }
+ ZoneList(int capacity, Zone* zone)
+ : List<T, ZoneAllocationPolicy>(capacity, ZoneAllocationPolicy(zone)) { }
+
+ INLINE(void* operator new(size_t size, Zone* zone));
// Construct a new ZoneList by copying the elements of the given ZoneList.
- explicit ZoneList(const ZoneList<T>& other)
- : List<T, ZoneListAllocationPolicy>(other.length()) {
- AddAll(other);
+ ZoneList(const ZoneList<T>& other, Zone* zone)
+ : List<T, ZoneAllocationPolicy>(other.length(),
+ ZoneAllocationPolicy(zone)) {
+ AddAll(other, ZoneAllocationPolicy(zone));
+ }
+
+ // We add some convenience wrappers so that we can pass in a Zone
+ // instead of a (less convenient) ZoneAllocationPolicy.
+ INLINE(void Add(const T& element, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Add(element, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void AddAll(const List<T, ZoneAllocationPolicy>& other,
+ Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void AddAll(const Vector<T>& other, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::AddAll(other, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void InsertAt(int index, const T& element, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::InsertAt(index, element,
+ ZoneAllocationPolicy(zone));
+ }
+ INLINE(Vector<T> AddBlock(T value, int count, Zone* zone)) {
+ return List<T, ZoneAllocationPolicy>::AddBlock(value, count,
+ ZoneAllocationPolicy(zone));
+ }
+ INLINE(void Allocate(int length, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Allocate(length, ZoneAllocationPolicy(zone));
+ }
+ INLINE(void Initialize(int capacity, Zone* zone)) {
+ List<T, ZoneAllocationPolicy>::Initialize(capacity,
+ ZoneAllocationPolicy(zone));
}
void operator delete(void* pointer) { UNREACHABLE(); }
@@ -232,15 +259,15 @@ class ZoneScope BASE_EMBEDDED {
// different configurations of a concrete splay tree (see splay-tree.h).
// The tree itself and all its elements are allocated in the Zone.
template <typename Config>
-class ZoneSplayTree: public SplayTree<Config, ZoneListAllocationPolicy> {
+class ZoneSplayTree: public SplayTree<Config, ZoneAllocationPolicy> {
public:
- ZoneSplayTree()
- : SplayTree<Config, ZoneListAllocationPolicy>() {}
+ explicit ZoneSplayTree(Zone* zone)
+ : SplayTree<Config, ZoneAllocationPolicy>(ZoneAllocationPolicy(zone)) {}
~ZoneSplayTree();
};
-typedef TemplateHashMapImpl<ZoneListAllocationPolicy> ZoneHashMap;
+typedef TemplateHashMapImpl<ZoneAllocationPolicy> ZoneHashMap;
} } // namespace v8::internal
diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status
index af28be19d8..fc111ab94b 100644
--- a/deps/v8/test/cctest/cctest.status
+++ b/deps/v8/test/cctest/cctest.status
@@ -27,6 +27,7 @@
prefix cctest
+# All tests prefixed with 'Bug' are expected to fail.
test-api/Bug*: FAIL
##############################################################################
@@ -75,17 +76,3 @@ test-serialize/DeserializeFromSecondSerializationAndRunScript2: SKIP
test-serialize/DeserializeAndRunScript2: SKIP
test-serialize/DeserializeFromSecondSerialization: SKIP
-##############################################################################
-[ $arch == arm && $crankshaft ]
-
-# Tests that time out with crankshaft.
-test-debug/ThreadedDebugging: SKIP
-test-debug/DebugBreakLoop: SKIP
-
-
-##############################################################################
-[ $arch == mips && $crankshaft ]
-
-# Tests that time out with crankshaft.
-test-debug/ThreadedDebugging: SKIP
-test-debug/DebugBreakLoop: SKIP
diff --git a/deps/v8/test/cctest/test-accessors.cc b/deps/v8/test/cctest/test-accessors.cc
index b1900f9ed3..0b342ff3d9 100644
--- a/deps/v8/test/cctest/test-accessors.cc
+++ b/deps/v8/test/cctest/test-accessors.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -116,6 +116,8 @@ static v8::Handle<v8::Object> x_holder;
static v8::Handle<Value> XGetter(Local<String> name, const AccessorInfo& info) {
ApiTestFuzzer::Fuzz();
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(isolate, info.GetIsolate());
CHECK_EQ(x_receiver, info.This());
CHECK_EQ(x_holder, info.Holder());
return v8_num(x_register);
@@ -125,6 +127,8 @@ static v8::Handle<Value> XGetter(Local<String> name, const AccessorInfo& info) {
static void XSetter(Local<String> name,
Local<Value> value,
const AccessorInfo& info) {
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(isolate, info.GetIsolate());
CHECK_EQ(x_holder, info.This());
CHECK_EQ(x_holder, info.Holder());
x_register = value->Int32Value();
@@ -236,12 +240,15 @@ THREADED_TEST(HandleScopePop) {
static v8::Handle<Value> CheckAccessorArgsCorrect(Local<String> name,
const AccessorInfo& info) {
+ CHECK(info.GetIsolate() == v8::Isolate::GetCurrent());
CHECK(info.This() == info.Holder());
CHECK(info.Data()->Equals(v8::String::New("data")));
ApiTestFuzzer::Fuzz();
+ CHECK(info.GetIsolate() == v8::Isolate::GetCurrent());
CHECK(info.This() == info.Holder());
CHECK(info.Data()->Equals(v8::String::New("data")));
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+ CHECK(info.GetIsolate() == v8::Isolate::GetCurrent());
CHECK(info.This() == info.Holder());
CHECK(info.Data()->Equals(v8::String::New("data")));
return v8::Integer::New(17);
diff --git a/deps/v8/test/cctest/test-alloc.cc b/deps/v8/test/cctest/test-alloc.cc
index 769fe7be29..e195d14923 100644
--- a/deps/v8/test/cctest/test-alloc.cc
+++ b/deps/v8/test/cctest/test-alloc.cc
@@ -34,6 +34,15 @@
using namespace v8::internal;
+static inline void SimulateFullSpace(PagedSpace* space) {
+ int old_linear_size = static_cast<int>(space->limit() - space->top());
+ space->Free(space->top(), old_linear_size);
+ space->SetTop(space->limit(), space->limit());
+ space->ResetFreeList();
+ space->ClearStats();
+}
+
+
static MaybeObject* AllocateAfterFailures() {
static int attempts = 0;
if (++attempts < 3) return Failure::RetryAfterGC();
@@ -65,24 +74,12 @@ static MaybeObject* AllocateAfterFailures() {
CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure());
// Old data space.
- OldSpace* old_data_space = heap->old_data_space();
- static const int kOldDataSpaceFillerSize = ByteArray::SizeFor(0);
- while (old_data_space->Available() > kOldDataSpaceFillerSize) {
- CHECK(!heap->AllocateByteArray(0, TENURED)->IsFailure());
- }
+ SimulateFullSpace(heap->old_data_space());
CHECK(!heap->AllocateRawAsciiString(100, TENURED)->IsFailure());
// Old pointer space.
- OldSpace* old_pointer_space = heap->old_pointer_space();
- static const int kOldPointerSpaceFillerLength = 10000;
- static const int kOldPointerSpaceFillerSize = FixedArray::SizeFor(
- kOldPointerSpaceFillerLength);
- while (old_pointer_space->Available() > kOldPointerSpaceFillerSize) {
- CHECK(!heap->AllocateFixedArray(kOldPointerSpaceFillerLength, TENURED)->
- IsFailure());
- }
- CHECK(!heap->AllocateFixedArray(kOldPointerSpaceFillerLength, TENURED)->
- IsFailure());
+ SimulateFullSpace(heap->old_pointer_space());
+ CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure());
// Large object space.
static const int kLargeObjectSpaceFillerLength = 300000;
@@ -97,14 +94,9 @@ static MaybeObject* AllocateAfterFailures() {
IsFailure());
// Map space.
- MapSpace* map_space = heap->map_space();
- static const int kMapSpaceFillerSize = Map::kSize;
- InstanceType instance_type = JS_OBJECT_TYPE;
+ SimulateFullSpace(heap->map_space());
int instance_size = JSObject::kHeaderSize;
- while (map_space->Available() > kMapSpaceFillerSize) {
- CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
- }
- CHECK(!heap->AllocateMap(instance_type, instance_size)->IsFailure());
+ CHECK(!heap->AllocateMap(JS_OBJECT_TYPE, instance_size)->IsFailure());
// Test that we can allocate in old pointer space and code space.
CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure());
diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc
index f4ab9ad0df..ed31f6ffae 100644
--- a/deps/v8/test/cctest/test-api.cc
+++ b/deps/v8/test/cctest/test-api.cc
@@ -7662,7 +7662,7 @@ THREADED_TEST(ShadowObject) {
value = Script::Compile(v8_str("f()"))->Run();
CHECK_EQ(42, value->Int32Value());
- Script::Compile(v8_str("y = 42"))->Run();
+ Script::Compile(v8_str("y = 43"))->Run();
CHECK_EQ(1, shadow_y_setter_call_count);
value = Script::Compile(v8_str("y"))->Run();
CHECK_EQ(1, shadow_y_getter_call_count);
@@ -8608,6 +8608,8 @@ static void CheckInterceptorLoadIC(NamedPropertyGetter getter,
static v8::Handle<Value> InterceptorLoadICGetter(Local<String> name,
const AccessorInfo& info) {
ApiTestFuzzer::Fuzz();
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(isolate, info.GetIsolate());
CHECK_EQ(v8_str("data"), info.Data());
CHECK_EQ(v8_str("x"), name);
return v8::Integer::New(42);
@@ -9334,6 +9336,8 @@ static v8::Handle<Value> InterceptorCallICFastApi(Local<String> name,
static v8::Handle<Value> FastApiCallback_TrivialSignature(
const v8::Arguments& args) {
ApiTestFuzzer::Fuzz();
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(isolate, args.GetIsolate());
CHECK_EQ(args.This(), args.Holder());
CHECK(args.Data()->Equals(v8_str("method_data")));
return v8::Integer::New(args[0]->Int32Value() + 1);
@@ -9342,6 +9346,8 @@ static v8::Handle<Value> FastApiCallback_TrivialSignature(
static v8::Handle<Value> FastApiCallback_SimpleSignature(
const v8::Arguments& args) {
ApiTestFuzzer::Fuzz();
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(isolate, args.GetIsolate());
CHECK_EQ(args.This()->GetPrototype(), args.Holder());
CHECK(args.Data()->Equals(v8_str("method_data")));
// Note, we're using HasRealNamedProperty instead of Has to avoid
@@ -10254,6 +10260,7 @@ static v8::Handle<Value> ChildGetter(Local<String> name,
THREADED_TEST(Overriding) {
+ i::FLAG_es5_readonly = true;
v8::HandleScope scope;
LocalContext context;
@@ -10300,11 +10307,11 @@ THREADED_TEST(Overriding) {
value = v8_compile("o.g")->Run();
CHECK_EQ(42, value->Int32Value());
- // Check 'h' can be shadowed.
+ // Check that 'h' cannot be shadowed.
value = v8_compile("o.h = 3; o.h")->Run();
- CHECK_EQ(3, value->Int32Value());
+ CHECK_EQ(1, value->Int32Value());
- // Check 'i' is cannot be shadowed or changed.
+ // Check that 'i' cannot be shadowed or changed.
value = v8_compile("o.i = 3; o.i")->Run();
CHECK_EQ(42, value->Int32Value());
}
@@ -10865,13 +10872,18 @@ THREADED_TEST(NestedHandleScopeAndContexts) {
}
+static int64_t cast(intptr_t x) { return static_cast<int64_t>(x); }
+
+
THREADED_TEST(ExternalAllocatedMemory) {
v8::HandleScope outer;
v8::Persistent<Context> env(Context::New());
CHECK(!env.IsEmpty());
- const int kSize = 1024*1024;
- CHECK_EQ(v8::V8::AdjustAmountOfExternalAllocatedMemory(kSize), kSize);
- CHECK_EQ(v8::V8::AdjustAmountOfExternalAllocatedMemory(-kSize), 0);
+ const intptr_t kSize = 1024*1024;
+ CHECK_EQ(cast(v8::V8::AdjustAmountOfExternalAllocatedMemory(kSize)),
+ cast(kSize));
+ CHECK_EQ(cast(v8::V8::AdjustAmountOfExternalAllocatedMemory(-kSize)),
+ cast(0));
}
@@ -12137,9 +12149,10 @@ TEST(RegExpStringModification) {
}
-// Test that we can set a property on the global object even if there
+// Test that we cannot set a property on the global object if there
// is a read-only property in the prototype chain.
TEST(ReadOnlyPropertyInGlobalProto) {
+ i::FLAG_es5_readonly = true;
v8::HandleScope scope;
v8::Handle<v8::ObjectTemplate> templ = v8::ObjectTemplate::New();
LocalContext context(0, templ);
@@ -12151,12 +12164,13 @@ TEST(ReadOnlyPropertyInGlobalProto) {
// Check without 'eval' or 'with'.
v8::Handle<v8::Value> res =
CompileRun("function f() { x = 42; return x; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
// Check with 'eval'.
- res = CompileRun("function f() { eval('1'); y = 42; return y; }; f()");
- CHECK_EQ(v8::Integer::New(42), res);
+ res = CompileRun("function f() { eval('1'); y = 43; return y; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
// Check with 'with'.
- res = CompileRun("function f() { with (this) { y = 42 }; return y; }; f()");
- CHECK_EQ(v8::Integer::New(42), res);
+ res = CompileRun("function f() { with (this) { y = 44 }; return y; }; f()");
+ CHECK_EQ(v8::Integer::New(0), res);
}
static int force_set_set_count = 0;
@@ -12365,6 +12379,46 @@ THREADED_TEST(ForceDeleteIC) {
}
+TEST(InlinedFunctionAcrossContexts) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope outer_scope;
+ v8::Persistent<v8::Context> ctx1 = v8::Context::New();
+ v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+ ctx1->Enter();
+
+ {
+ v8::HandleScope inner_scope;
+ CompileRun("var G = 42; function foo() { return G; }");
+ v8::Local<v8::Value> foo = ctx1->Global()->Get(v8_str("foo"));
+ ctx2->Enter();
+ ctx2->Global()->Set(v8_str("o"), foo);
+ v8::Local<v8::Value> res = CompileRun(
+ "function f() { return o(); }"
+ "for (var i = 0; i < 10; ++i) f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ CHECK_EQ(42, res->Int32Value());
+ ctx2->Exit();
+ v8::Handle<v8::String> G_property = v8::String::New("G");
+ CHECK(ctx1->Global()->ForceDelete(G_property));
+ ctx2->Enter();
+ ExpectString(
+ "(function() {"
+ " try {"
+ " return f();"
+ " } catch(e) {"
+ " return e.toString();"
+ " }"
+ " })()",
+ "ReferenceError: G is not defined");
+ ctx2->Exit();
+ ctx1->Exit();
+ ctx1.Dispose();
+ }
+ ctx2.Dispose();
+}
+
+
v8::Persistent<Context> calling_context0;
v8::Persistent<Context> calling_context1;
v8::Persistent<Context> calling_context2;
@@ -12430,19 +12484,16 @@ THREADED_TEST(GetCallingContext) {
// Check that a variable declaration with no explicit initialization
-// value does not shadow an existing property in the prototype chain.
-//
-// This is consistent with Firefox and Safari.
-//
-// See http://crbug.com/12548.
+// value does shadow an existing property in the prototype chain.
THREADED_TEST(InitGlobalVarInProtoChain) {
+ i::FLAG_es52_globals = true;
v8::HandleScope scope;
LocalContext context;
// Introduce a variable in the prototype chain.
CompileRun("__proto__.x = 42");
- v8::Handle<v8::Value> result = CompileRun("var x; x");
+ v8::Handle<v8::Value> result = CompileRun("var x = 43; x");
CHECK(!result->IsUndefined());
- CHECK_EQ(42, result->Int32Value());
+ CHECK_EQ(43, result->Int32Value());
}
@@ -13947,75 +13998,104 @@ TEST(SourceURLInStackTrace) {
}
+static void CreateGarbageInOldSpace() {
+ v8::HandleScope scope;
+ i::AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < 1000; i++) {
+ FACTORY->NewFixedArray(1000, i::TENURED);
+ }
+}
+
// Test that idle notification can be handled and eventually returns true.
-// This just checks the contract of the IdleNotification() function,
-// and does not verify that it does reasonable work.
-THREADED_TEST(IdleNotification) {
+TEST(IdleNotification) {
+ const intptr_t MB = 1024 * 1024;
v8::HandleScope scope;
LocalContext env;
- {
- // Create garbage in old-space to generate work for idle notification.
- i::AlwaysAllocateScope always_allocate;
- for (int i = 0; i < 100; i++) {
- FACTORY->NewFixedArray(1000, i::TENURED);
- }
+ intptr_t initial_size = HEAP->SizeOfObjects();
+ CreateGarbageInOldSpace();
+ intptr_t size_with_garbage = HEAP->SizeOfObjects();
+ CHECK_GT(size_with_garbage, initial_size + MB);
+ bool finished = false;
+ for (int i = 0; i < 200 && !finished; i++) {
+ finished = v8::V8::IdleNotification();
}
- bool finshed_idle_work = false;
- for (int i = 0; i < 100 && !finshed_idle_work; i++) {
- finshed_idle_work = v8::V8::IdleNotification();
- }
- CHECK(finshed_idle_work);
+ intptr_t final_size = HEAP->SizeOfObjects();
+ CHECK(finished);
+ CHECK_LT(final_size, initial_size + 1);
}
-// Test that idle notification can be handled and eventually returns true.
-// This just checks the contract of the IdleNotification() function,
-// and does not verify that it does reasonable work.
+
+// Test that idle notification can be handled and eventually collects garbage.
TEST(IdleNotificationWithSmallHint) {
+ const intptr_t MB = 1024 * 1024;
+ const int IdlePauseInMs = 900;
v8::HandleScope scope;
LocalContext env;
- {
- // Create garbage in old-space to generate work for idle notification.
- i::AlwaysAllocateScope always_allocate;
- for (int i = 0; i < 100; i++) {
- FACTORY->NewFixedArray(1000, i::TENURED);
- }
+ intptr_t initial_size = HEAP->SizeOfObjects();
+ CreateGarbageInOldSpace();
+ intptr_t size_with_garbage = HEAP->SizeOfObjects();
+ CHECK_GT(size_with_garbage, initial_size + MB);
+ bool finished = false;
+ for (int i = 0; i < 200 && !finished; i++) {
+ finished = v8::V8::IdleNotification(IdlePauseInMs);
}
- intptr_t old_size = HEAP->SizeOfObjects();
- bool finshed_idle_work = false;
- bool no_idle_work = v8::V8::IdleNotification(10);
- for (int i = 0; i < 200 && !finshed_idle_work; i++) {
- finshed_idle_work = v8::V8::IdleNotification(10);
- }
- intptr_t new_size = HEAP->SizeOfObjects();
- CHECK(finshed_idle_work);
- CHECK(no_idle_work || new_size < old_size);
+ intptr_t final_size = HEAP->SizeOfObjects();
+ CHECK(finished);
+ CHECK_LT(final_size, initial_size + 1);
}
-// This just checks the contract of the IdleNotification() function,
-// and does not verify that it does reasonable work.
+// Test that idle notification can be handled and eventually collects garbage.
TEST(IdleNotificationWithLargeHint) {
+ const intptr_t MB = 1024 * 1024;
+ const int IdlePauseInMs = 900;
v8::HandleScope scope;
LocalContext env;
- {
- // Create garbage in old-space to generate work for idle notification.
- i::AlwaysAllocateScope always_allocate;
- for (int i = 0; i < 100; i++) {
- FACTORY->NewFixedArray(1000, i::TENURED);
- }
- }
- intptr_t old_size = HEAP->SizeOfObjects();
- bool finshed_idle_work = false;
- bool no_idle_work = v8::V8::IdleNotification(900);
- for (int i = 0; i < 200 && !finshed_idle_work; i++) {
- finshed_idle_work = v8::V8::IdleNotification(900);
+ intptr_t initial_size = HEAP->SizeOfObjects();
+ CreateGarbageInOldSpace();
+ intptr_t size_with_garbage = HEAP->SizeOfObjects();
+ CHECK_GT(size_with_garbage, initial_size + MB);
+ bool finished = false;
+ for (int i = 0; i < 200 && !finished; i++) {
+ finished = v8::V8::IdleNotification(IdlePauseInMs);
}
- intptr_t new_size = HEAP->SizeOfObjects();
- CHECK(finshed_idle_work);
- CHECK(no_idle_work || new_size < old_size);
+ intptr_t final_size = HEAP->SizeOfObjects();
+ CHECK(finished);
+ CHECK_LT(final_size, initial_size + 1);
}
+TEST(Regress2107) {
+ const intptr_t MB = 1024 * 1024;
+ const int kShortIdlePauseInMs = 100;
+ const int kLongIdlePauseInMs = 1000;
+ v8::HandleScope scope;
+ LocalContext env;
+ intptr_t initial_size = HEAP->SizeOfObjects();
+ // Send idle notification to start a round of incremental GCs.
+ v8::V8::IdleNotification(kShortIdlePauseInMs);
+ // Emulate 7 page reloads.
+ for (int i = 0; i < 7; i++) {
+ v8::Persistent<v8::Context> ctx = v8::Context::New();
+ ctx->Enter();
+ CreateGarbageInOldSpace();
+ ctx->Exit();
+ ctx.Dispose();
+ v8::V8::ContextDisposedNotification();
+ v8::V8::IdleNotification(kLongIdlePauseInMs);
+ }
+ // Create garbage and check that idle notification still collects it.
+ CreateGarbageInOldSpace();
+ intptr_t size_with_garbage = HEAP->SizeOfObjects();
+ CHECK_GT(size_with_garbage, initial_size + MB);
+ bool finished = false;
+ for (int i = 0; i < 200 && !finished; i++) {
+ finished = v8::V8::IdleNotification(kShortIdlePauseInMs);
+ }
+ intptr_t final_size = HEAP->SizeOfObjects();
+ CHECK_LT(final_size, initial_size + 1);
+}
+
static uint32_t* stack_limit;
static v8::Handle<Value> GetStackLimitCallback(const v8::Arguments& args) {
@@ -16425,3 +16505,309 @@ TEST(PrimaryStubCache) {
StubCacheHelper(false);
}
+
+static int fatal_error_callback_counter = 0;
+static void CountingErrorCallback(const char* location, const char* message) {
+ printf("CountingErrorCallback(\"%s\", \"%s\")\n", location, message);
+ fatal_error_callback_counter++;
+}
+
+
+TEST(StaticGetters) {
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ i::Handle<i::Object> undefined_value = FACTORY->undefined_value();
+ CHECK(*v8::Utils::OpenHandle(*v8::Undefined()) == *undefined_value);
+ CHECK(*v8::Utils::OpenHandle(*v8::Undefined(isolate)) == *undefined_value);
+ i::Handle<i::Object> null_value = FACTORY->null_value();
+ CHECK(*v8::Utils::OpenHandle(*v8::Null()) == *null_value);
+ CHECK(*v8::Utils::OpenHandle(*v8::Null(isolate)) == *null_value);
+ i::Handle<i::Object> true_value = FACTORY->true_value();
+ CHECK(*v8::Utils::OpenHandle(*v8::True()) == *true_value);
+ CHECK(*v8::Utils::OpenHandle(*v8::True(isolate)) == *true_value);
+ i::Handle<i::Object> false_value = FACTORY->false_value();
+ CHECK(*v8::Utils::OpenHandle(*v8::False()) == *false_value);
+ CHECK(*v8::Utils::OpenHandle(*v8::False(isolate)) == *false_value);
+
+ // Test after-death behavior.
+ CHECK(i::Internals::IsInitialized(isolate));
+ CHECK_EQ(0, fatal_error_callback_counter);
+ v8::V8::SetFatalErrorHandler(CountingErrorCallback);
+ v8::Utils::ReportApiFailure("StaticGetters()", "Kill V8");
+ i::Isolate::Current()->TearDown();
+ CHECK(!i::Internals::IsInitialized(isolate));
+ CHECK_EQ(1, fatal_error_callback_counter);
+ CHECK(v8::Undefined().IsEmpty());
+ CHECK_EQ(2, fatal_error_callback_counter);
+ CHECK(v8::Undefined(isolate).IsEmpty());
+ CHECK_EQ(3, fatal_error_callback_counter);
+ CHECK(v8::Null().IsEmpty());
+ CHECK_EQ(4, fatal_error_callback_counter);
+ CHECK(v8::Null(isolate).IsEmpty());
+ CHECK_EQ(5, fatal_error_callback_counter);
+ CHECK(v8::True().IsEmpty());
+ CHECK_EQ(6, fatal_error_callback_counter);
+ CHECK(v8::True(isolate).IsEmpty());
+ CHECK_EQ(7, fatal_error_callback_counter);
+ CHECK(v8::False().IsEmpty());
+ CHECK_EQ(8, fatal_error_callback_counter);
+ CHECK(v8::False(isolate).IsEmpty());
+ CHECK_EQ(9, fatal_error_callback_counter);
+}
+
+
+TEST(IsolateEmbedderData) {
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ CHECK_EQ(NULL, isolate->GetData());
+ CHECK_EQ(NULL, ISOLATE->GetData());
+ static void* data1 = reinterpret_cast<void*>(0xacce55ed);
+ isolate->SetData(data1);
+ CHECK_EQ(data1, isolate->GetData());
+ CHECK_EQ(data1, ISOLATE->GetData());
+ static void* data2 = reinterpret_cast<void*>(0xdecea5ed);
+ ISOLATE->SetData(data2);
+ CHECK_EQ(data2, isolate->GetData());
+ CHECK_EQ(data2, ISOLATE->GetData());
+ ISOLATE->TearDown();
+ CHECK_EQ(data2, isolate->GetData());
+ CHECK_EQ(data2, ISOLATE->GetData());
+}
+
+
+TEST(StringEmpty) {
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::Isolate* isolate = v8::Isolate::GetCurrent();
+ i::Handle<i::Object> empty_string = FACTORY->empty_symbol();
+ CHECK(*v8::Utils::OpenHandle(*v8::String::Empty()) == *empty_string);
+ CHECK(*v8::Utils::OpenHandle(*v8::String::Empty(isolate)) == *empty_string);
+
+ // Test after-death behavior.
+ CHECK(i::Internals::IsInitialized(isolate));
+ CHECK_EQ(0, fatal_error_callback_counter);
+ v8::V8::SetFatalErrorHandler(CountingErrorCallback);
+ v8::Utils::ReportApiFailure("StringEmpty()", "Kill V8");
+ i::Isolate::Current()->TearDown();
+ CHECK(!i::Internals::IsInitialized(isolate));
+ CHECK_EQ(1, fatal_error_callback_counter);
+ CHECK(v8::String::Empty().IsEmpty());
+ CHECK_EQ(2, fatal_error_callback_counter);
+ CHECK(v8::String::Empty(isolate).IsEmpty());
+ CHECK_EQ(3, fatal_error_callback_counter);
+}
+
+
+static int instance_checked_getter_count = 0;
+static Handle<Value> InstanceCheckedGetter(Local<String> name,
+ const AccessorInfo& info) {
+ CHECK_EQ(name, v8_str("foo"));
+ instance_checked_getter_count++;
+ return v8_num(11);
+}
+
+
+static int instance_checked_setter_count = 0;
+static void InstanceCheckedSetter(Local<String> name,
+ Local<Value> value,
+ const AccessorInfo& info) {
+ CHECK_EQ(name, v8_str("foo"));
+ CHECK_EQ(value, v8_num(23));
+ instance_checked_setter_count++;
+}
+
+
+static void CheckInstanceCheckedResult(int getters,
+ int setters,
+ bool expects_callbacks,
+ TryCatch* try_catch) {
+ if (expects_callbacks) {
+ CHECK(!try_catch->HasCaught());
+ CHECK_EQ(getters, instance_checked_getter_count);
+ CHECK_EQ(setters, instance_checked_setter_count);
+ } else {
+ CHECK(try_catch->HasCaught());
+ CHECK_EQ(0, instance_checked_getter_count);
+ CHECK_EQ(0, instance_checked_setter_count);
+ }
+ try_catch->Reset();
+}
+
+
+static void CheckInstanceCheckedAccessors(bool expects_callbacks) {
+ instance_checked_getter_count = 0;
+ instance_checked_setter_count = 0;
+ TryCatch try_catch;
+
+ // Test path through generic runtime code.
+ CompileRun("obj.foo");
+ CheckInstanceCheckedResult(1, 0, expects_callbacks, &try_catch);
+ CompileRun("obj.foo = 23");
+ CheckInstanceCheckedResult(1, 1, expects_callbacks, &try_catch);
+
+ // Test path through generated LoadIC and StoredIC.
+ CompileRun("function test_get(o) { o.foo; }"
+ "test_get(obj);");
+ CheckInstanceCheckedResult(2, 1, expects_callbacks, &try_catch);
+ CompileRun("test_get(obj);");
+ CheckInstanceCheckedResult(3, 1, expects_callbacks, &try_catch);
+ CompileRun("test_get(obj);");
+ CheckInstanceCheckedResult(4, 1, expects_callbacks, &try_catch);
+ CompileRun("function test_set(o) { o.foo = 23; }"
+ "test_set(obj);");
+ CheckInstanceCheckedResult(4, 2, expects_callbacks, &try_catch);
+ CompileRun("test_set(obj);");
+ CheckInstanceCheckedResult(4, 3, expects_callbacks, &try_catch);
+ CompileRun("test_set(obj);");
+ CheckInstanceCheckedResult(4, 4, expects_callbacks, &try_catch);
+
+ // Test path through optimized code.
+ CompileRun("%OptimizeFunctionOnNextCall(test_get);"
+ "test_get(obj);");
+ CheckInstanceCheckedResult(5, 4, expects_callbacks, &try_catch);
+ CompileRun("%OptimizeFunctionOnNextCall(test_set);"
+ "test_set(obj);");
+ CheckInstanceCheckedResult(5, 5, expects_callbacks, &try_catch);
+
+ // Cleanup so that closures start out fresh in next check.
+ CompileRun("%DeoptimizeFunction(test_get);"
+ "%ClearFunctionTypeFeedback(test_get);"
+ "%DeoptimizeFunction(test_set);"
+ "%ClearFunctionTypeFeedback(test_set);");
+}
+
+
+THREADED_TEST(InstanceCheckOnInstanceAccessor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> inst = templ->InstanceTemplate();
+ inst->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+}
+
+
+THREADED_TEST(InstanceCheckOnInstanceAccessorWithInterceptor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> inst = templ->InstanceTemplate();
+ AddInterceptor(templ, EmptyInterceptorGetter, EmptyInterceptorSetter);
+ inst->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+}
+
+
+THREADED_TEST(InstanceCheckOnPrototypeAccessor) {
+ v8::internal::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ LocalContext context;
+
+ Local<FunctionTemplate> templ = FunctionTemplate::New();
+ Local<ObjectTemplate> proto = templ->PrototypeTemplate();
+ proto->SetAccessor(v8_str("foo"),
+ InstanceCheckedGetter, InstanceCheckedSetter,
+ Handle<Value>(),
+ v8::DEFAULT,
+ v8::None,
+ v8::AccessorSignature::New(templ));
+ context->Global()->Set(v8_str("f"), templ->GetFunction());
+
+ printf("Testing positive ...\n");
+ CompileRun("var obj = new f();");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+
+ printf("Testing negative ...\n");
+ CompileRun("var obj = {};"
+ "obj.__proto__ = new f();");
+ CHECK(!templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(false);
+
+ printf("Testing positive with modified prototype chain ...\n");
+ CompileRun("var obj = new f();"
+ "var pro = {};"
+ "pro.__proto__ = obj.__proto__;"
+ "obj.__proto__ = pro;");
+ CHECK(templ->HasInstance(context->Global()->Get(v8_str("obj"))));
+ CheckInstanceCheckedAccessors(true);
+}
+
+
+TEST(TryFinallyMessage) {
+ v8::HandleScope scope;
+ LocalContext context;
+ {
+ // Test that the original error message is not lost if there is a
+ // recursive call into Javascript is done in the finally block, e.g. to
+ // initialize an IC. (crbug.com/129171)
+ TryCatch try_catch;
+ const char* trigger_ic =
+ "try { \n"
+ " throw new Error('test'); \n"
+ "} finally { \n"
+ " var x = 0; \n"
+ " x++; \n" // Trigger an IC initialization here.
+ "} \n";
+ CompileRun(trigger_ic);
+ CHECK(try_catch.HasCaught());
+ Local<Message> message = try_catch.Message();
+ CHECK(!message.IsEmpty());
+ CHECK_EQ(2, message->GetLineNumber());
+ }
+
+ {
+ // Test that the original exception message is indeed overwritten if
+ // a new error is thrown in the finally block.
+ TryCatch try_catch;
+ const char* throw_again =
+ "try { \n"
+ " throw new Error('test'); \n"
+ "} finally { \n"
+ " var x = 0; \n"
+ " x++; \n"
+ " throw new Error('again'); \n" // This is the new uncaught error.
+ "} \n";
+ CompileRun(throw_again);
+ CHECK(try_catch.HasCaught());
+ Local<Message> message = try_catch.Message();
+ CHECK(!message.IsEmpty());
+ CHECK_EQ(6, message->GetLineNumber());
+ }
+}
diff --git a/deps/v8/test/cctest/test-dataflow.cc b/deps/v8/test/cctest/test-dataflow.cc
index a63008d210..005d440d13 100644
--- a/deps/v8/test/cctest/test-dataflow.cc
+++ b/deps/v8/test/cctest/test-dataflow.cc
@@ -37,7 +37,7 @@ using namespace v8::internal;
TEST(BitVector) {
v8::internal::V8::Initialize(NULL);
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- Zone* zone = ZONE;
+ Zone* zone = Isolate::Current()->zone();
{
BitVector v(15, zone);
v.Add(1);
diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc
index ffa845813f..e40f406c3c 100644
--- a/deps/v8/test/cctest/test-debug.cc
+++ b/deps/v8/test/cctest/test-debug.cc
@@ -5013,7 +5013,10 @@ static void ThreadedMessageHandler(const v8::Debug::Message& message) {
if (IsBreakEventMessage(print_buffer)) {
// Check that we are inside the while loop.
int source_line = GetSourceLineFromBreakEventMessage(print_buffer);
- CHECK(8 <= source_line && source_line <= 13);
+ // TODO(2047): This should really be 8 <= source_line <= 13; but we
+ // currently have an off-by-one error when calculating the source
+ // position corresponding to the program counter at the debug break.
+ CHECK(7 <= source_line && source_line <= 13);
threaded_debugging_barriers.barrier_2.Wait();
}
}
diff --git a/deps/v8/test/cctest/test-decls.cc b/deps/v8/test/cctest/test-decls.cc
index aa733c70bc..e6bdc9f505 100644
--- a/deps/v8/test/cctest/test-decls.cc
+++ b/deps/v8/test/cctest/test-decls.cc
@@ -521,6 +521,7 @@ class ExistsInPrototypeContext: public DeclarationContext {
TEST(ExistsInPrototype) {
+ i::FLAG_es52_globals = true;
HandleScope scope;
// Sanity check to make sure that the holder of the interceptor
@@ -535,17 +536,17 @@ TEST(ExistsInPrototype) {
{ ExistsInPrototypeContext context;
context.Check("var x; x",
- 1, // get
+ 0, // get
0,
- 1, // declaration
- EXPECT_EXCEPTION);
+ 0, // declaration
+ EXPECT_RESULT, Undefined());
}
{ ExistsInPrototypeContext context;
context.Check("var x = 0; x",
0,
0,
- 1, // declaration
+ 0, // declaration
EXPECT_RESULT, Number::New(0));
}
@@ -553,7 +554,7 @@ TEST(ExistsInPrototype) {
context.Check("const x; x",
0,
0,
- 1, // declaration
+ 0, // declaration
EXPECT_RESULT, Undefined());
}
@@ -561,7 +562,7 @@ TEST(ExistsInPrototype) {
context.Check("const x = 0; x",
0,
0,
- 1, // declaration
+ 0, // declaration
EXPECT_RESULT, Number::New(0));
}
}
@@ -583,13 +584,14 @@ class AbsentInPrototypeContext: public DeclarationContext {
TEST(AbsentInPrototype) {
+ i::FLAG_es52_globals = true;
HandleScope scope;
{ AbsentInPrototypeContext context;
context.Check("if (false) { var x = 0; }; x",
0,
0,
- 1, // declaration
+ 0, // declaration
EXPECT_RESULT, Undefined());
}
}
diff --git a/deps/v8/test/cctest/test-disasm-arm.cc b/deps/v8/test/cctest/test-disasm-arm.cc
index 0e9432d95d..3a2d9e8361 100644
--- a/deps/v8/test/cctest/test-disasm-arm.cc
+++ b/deps/v8/test/cctest/test-disasm-arm.cc
@@ -92,6 +92,10 @@ bool DisassembleAndCompare(byte* pc, const char* compare_string) {
if (!DisassembleAndCompare(progcounter, compare_string)) failure = true; \
}
+// Force emission of any pending literals into a pool.
+#define EMIT_PENDING_LITERALS() \
+ assm.CheckConstPool(true, false)
+
// Verify that all invocations of the COMPARE macro passed successfully.
// Exit with a failure if at least one of the tests failed.
@@ -280,6 +284,10 @@ TEST(Type0) {
// is pretty strange anyway.
COMPARE(mov(r5, Operand(0x01234), SetCC, ne),
"159fc000 ldrne ip, [pc, #+0]");
+ // Emit a literal pool now, otherwise this could be dumped later, in the
+ // middle of a different test.
+ EMIT_PENDING_LITERALS();
+
// We only disassemble one instruction so the eor instruction is not here.
// The eor does the setcc so we get a movw here.
COMPARE(eor(r5, r4, Operand(0x1234), SetCC, ne),
diff --git a/deps/v8/test/cctest/test-disasm-x64.cc b/deps/v8/test/cctest/test-disasm-x64.cc
index da85eb933e..c6332e249b 100644
--- a/deps/v8/test/cctest/test-disasm-x64.cc
+++ b/deps/v8/test/cctest/test-disasm-x64.cc
@@ -264,6 +264,7 @@ TEST(DisasmX64) {
ExternalReference after_break_target =
ExternalReference(Debug_Address::AfterBreakTarget(),
assm.isolate());
+ USE(after_break_target);
#endif // ENABLE_DEBUGGER_SUPPORT
__ jmp(ic, RelocInfo::CODE_TARGET);
__ nop();
diff --git a/deps/v8/test/cctest/test-double.cc b/deps/v8/test/cctest/test-double.cc
index 3594a4fe32..6ef42c64dd 100644
--- a/deps/v8/test/cctest/test-double.cc
+++ b/deps/v8/test/cctest/test-double.cc
@@ -112,21 +112,6 @@ TEST(IsInfinite) {
}
-TEST(IsNan) {
- CHECK(Double(OS::nan_value()).IsNan());
- uint64_t other_nan = V8_2PART_UINT64_C(0xFFFFFFFF, 00000001);
- CHECK(Double(other_nan).IsNan());
- CHECK(!Double(V8_INFINITY).IsNan());
- CHECK(!Double(-V8_INFINITY).IsNan());
- CHECK(!Double(0.0).IsNan());
- CHECK(!Double(-0.0).IsNan());
- CHECK(!Double(1.0).IsNan());
- CHECK(!Double(-1.0).IsNan());
- uint64_t min_double64 = V8_2PART_UINT64_C(0x00000000, 00000001);
- CHECK(!Double(min_double64).IsNan());
-}
-
-
TEST(Sign) {
CHECK_EQ(1, Double(1.0).Sign());
CHECK_EQ(1, Double(V8_INFINITY).Sign());
diff --git a/deps/v8/test/cctest/test-func-name-inference.cc b/deps/v8/test/cctest/test-func-name-inference.cc
index 8f405b726e..762cc9f0fa 100644
--- a/deps/v8/test/cctest/test-func-name-inference.cc
+++ b/deps/v8/test/cctest/test-func-name-inference.cc
@@ -400,3 +400,41 @@ TEST(AssignmentAndCall) {
// See MultipleAssignments test.
CheckFunctionName(script, "return 2", "Enclosing.Bar");
}
+
+
+TEST(MethodAssignmentInAnonymousFunctionCall) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "(function () {\n"
+ " var EventSource = function () { };\n"
+ " EventSource.prototype.addListener = function () {\n"
+ " return 2012;\n"
+ " };\n"
+ " this.PublicEventSource = EventSource;\n"
+ "})();");
+ CheckFunctionName(script, "return 2012", "EventSource.addListener");
+}
+
+
+TEST(ReturnAnonymousFunction) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "(function() {\n"
+ " function wrapCode() {\n"
+ " return function () {\n"
+ " return 2012;\n"
+ " };\n"
+ " };\n"
+ " var foo = 10;\n"
+ " function f() {\n"
+ " return wrapCode();\n"
+ " }\n"
+ " this.ref = f;\n"
+ "})()");
+ script->Run();
+ CheckFunctionName(script, "return 2012", "");
+}
diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc
index a56f250c2a..9d2755ddc8 100644
--- a/deps/v8/test/cctest/test-heap-profiler.cc
+++ b/deps/v8/test/cctest/test-heap-profiler.cc
@@ -2,11 +2,15 @@
//
// Tests for heap profiler
+#include <ctype.h>
+
#include "v8.h"
#include "cctest.h"
+#include "hashmap.h"
#include "heap-profiler.h"
#include "snapshot.h"
+#include "debug.h"
#include "utils-inl.h"
#include "../include/v8-profiler.h"
@@ -24,22 +28,30 @@ class NamedEntriesDetector {
if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
}
+ static bool AddressesMatch(void* key1, void* key2) {
+ return key1 == key2;
+ }
+
void CheckAllReachables(i::HeapEntry* root) {
+ i::HashMap visited(AddressesMatch);
i::List<i::HeapEntry*> list(10);
list.Add(root);
- root->paint();
CheckEntry(root);
while (!list.is_empty()) {
i::HeapEntry* entry = list.RemoveLast();
- i::Vector<i::HeapGraphEdge> children = entry->children();
+ i::Vector<i::HeapGraphEdge*> children = entry->children();
for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == i::HeapGraphEdge::kShortcut) continue;
- i::HeapEntry* child = children[i].to();
- if (!child->painted()) {
- list.Add(child);
- child->paint();
- CheckEntry(child);
- }
+ if (children[i]->type() == i::HeapGraphEdge::kShortcut) continue;
+ i::HeapEntry* child = children[i]->to();
+ i::HashMap::Entry* entry = visited.Lookup(
+ reinterpret_cast<void*>(child),
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(child)),
+ true);
+ if (entry->value)
+ continue;
+ entry->value = reinterpret_cast<void*>(1);
+ list.Add(child);
+ CheckEntry(child);
}
}
}
@@ -102,24 +114,19 @@ TEST(HeapSnapshot) {
"var c2 = new C2(a2);");
const v8::HeapSnapshot* snapshot_env2 =
v8::HeapProfiler::TakeSnapshot(v8_str("env2"));
- i::HeapSnapshot* i_snapshot_env2 =
- const_cast<i::HeapSnapshot*>(
- reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2);
// Verify, that JS global object of env2 has '..2' properties.
const v8::HeapGraphNode* a2_node =
- GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "a2");
+ GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "a2");
CHECK_NE(NULL, a2_node);
CHECK_NE(
- NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "b2_1"));
+ NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b2_1"));
CHECK_NE(
- NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "b2_2"));
- CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "c2"));
+ NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "b2_2"));
+ CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kProperty, "c2"));
- // Paint all nodes reachable from global object.
NamedEntriesDetector det;
- i_snapshot_env2->ClearPaint();
det.CheckAllReachables(const_cast<i::HeapEntry*>(
reinterpret_cast<const i::HeapEntry*>(global_env2)));
CHECK(det.has_A2);
@@ -137,12 +144,13 @@ TEST(HeapSnapshotObjectSizes) {
CompileRun(
"function X(a, b) { this.a = a; this.b = b; }\n"
"x = new X(new X(), new X());\n"
+ "dummy = new X();\n"
"(function() { x.a.a = x.b; })();");
const v8::HeapSnapshot* snapshot =
v8::HeapProfiler::TakeSnapshot(v8_str("sizes"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* x =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "x");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "x");
CHECK_NE(NULL, x);
const v8::HeapGraphNode* x1 =
GetProperty(x, v8::HeapGraphEdge::kProperty, "a");
@@ -152,9 +160,9 @@ TEST(HeapSnapshotObjectSizes) {
CHECK_NE(NULL, x2);
// Test sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize());
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize());
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize());
+ CHECK_NE(0, x->GetSelfSize());
+ CHECK_NE(0, x1->GetSelfSize());
+ CHECK_NE(0, x2->GetSelfSize());
}
@@ -169,7 +177,7 @@ TEST(BoundFunctionInSnapshot) {
v8::HeapProfiler::TakeSnapshot(v8_str("sizes"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* f =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "boundFunction");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "boundFunction");
CHECK(f);
CHECK_EQ(v8::String::New("native_bind"), f->GetName());
const v8::HeapGraphNode* bindings =
@@ -233,15 +241,15 @@ TEST(HeapSnapshotCodeObjects) {
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* compiled =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "compiled");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "compiled");
CHECK_NE(NULL, compiled);
CHECK_EQ(v8::HeapGraphNode::kClosure, compiled->GetType());
const v8::HeapGraphNode* lazy =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "lazy");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "lazy");
CHECK_NE(NULL, lazy);
CHECK_EQ(v8::HeapGraphNode::kClosure, lazy->GetType());
const v8::HeapGraphNode* anonymous =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "anonymous");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "anonymous");
CHECK_NE(NULL, anonymous);
CHECK_EQ(v8::HeapGraphNode::kClosure, anonymous->GetType());
v8::String::AsciiValue anonymous_name(anonymous->GetName());
@@ -293,9 +301,9 @@ TEST(HeapSnapshotHeapNumbers) {
const v8::HeapSnapshot* snapshot =
v8::HeapProfiler::TakeSnapshot(v8_str("numbers"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
- CHECK_EQ(NULL, GetProperty(global, v8::HeapGraphEdge::kShortcut, "a"));
+ CHECK_EQ(NULL, GetProperty(global, v8::HeapGraphEdge::kProperty, "a"));
const v8::HeapGraphNode* b =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "b");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, b);
CHECK_EQ(v8::HeapGraphNode::kHeapNumber, b->GetType());
}
@@ -313,10 +321,10 @@ TEST(HeapSnapshotSlicedString) {
v8::HeapProfiler::TakeSnapshot(v8_str("strings"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* parent_string =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "parent_string");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "parent_string");
CHECK_NE(NULL, parent_string);
const v8::HeapGraphNode* child_string =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "child_string");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "child_string");
CHECK_NE(NULL, child_string);
const v8::HeapGraphNode* parent =
GetProperty(child_string, v8::HeapGraphEdge::kInternal, "parent");
@@ -344,12 +352,12 @@ TEST(HeapSnapshotInternalReferences) {
}
-// Trying to introduce a check helper for uint64_t causes many
+// Trying to introduce a check helper for uint32_t causes many
// overloading ambiguities, so it seems easier just to cast
// them to a signed type.
-#define CHECK_EQ_UINT64_T(a, b) \
- CHECK_EQ(static_cast<int64_t>(a), static_cast<int64_t>(b))
-#define CHECK_NE_UINT64_T(a, b) \
+#define CHECK_EQ_SNAPSHOT_OBJECT_ID(a, b) \
+ CHECK_EQ(static_cast<int32_t>(a), static_cast<int32_t>(b))
+#define CHECK_NE_SNAPSHOT_OBJECT_ID(a, b) \
CHECK((a) != (b)) // NOLINT
TEST(HeapEntryIdsAndArrayShift) {
@@ -378,31 +386,24 @@ TEST(HeapEntryIdsAndArrayShift) {
const v8::HeapGraphNode* global1 = GetGlobalObject(snapshot1);
const v8::HeapGraphNode* global2 = GetGlobalObject(snapshot2);
- CHECK_NE_UINT64_T(0, global1->GetId());
- CHECK_EQ_UINT64_T(global1->GetId(), global2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, global1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(global1->GetId(), global2->GetId());
const v8::HeapGraphNode* a1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a1);
- const v8::HeapGraphNode* e1 =
- GetProperty(a1, v8::HeapGraphEdge::kHidden, "1");
- CHECK_NE(NULL, e1);
const v8::HeapGraphNode* k1 =
- GetProperty(e1, v8::HeapGraphEdge::kInternal, "elements");
+ GetProperty(a1, v8::HeapGraphEdge::kInternal, "elements");
CHECK_NE(NULL, k1);
const v8::HeapGraphNode* a2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a2);
- const v8::HeapGraphNode* e2 =
- GetProperty(a2, v8::HeapGraphEdge::kHidden, "1");
- CHECK_NE(NULL, e2);
const v8::HeapGraphNode* k2 =
- GetProperty(e2, v8::HeapGraphEdge::kInternal, "elements");
+ GetProperty(a2, v8::HeapGraphEdge::kInternal, "elements");
CHECK_NE(NULL, k2);
- CHECK_EQ_UINT64_T(a1->GetId(), a2->GetId());
- CHECK_EQ_UINT64_T(e1->GetId(), e2->GetId());
- CHECK_EQ_UINT64_T(k1->GetId(), k2->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(a1->GetId(), a2->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(k1->GetId(), k2->GetId());
}
TEST(HeapEntryIdsAndGC) {
@@ -414,50 +415,56 @@ TEST(HeapEntryIdsAndGC) {
"function B(x) { this.x = x; }\n"
"var a = new A();\n"
"var b = new B(a);");
+ v8::Local<v8::String> s1_str = v8_str("s1");
+ v8::Local<v8::String> s2_str = v8_str("s2");
const v8::HeapSnapshot* snapshot1 =
- v8::HeapProfiler::TakeSnapshot(v8_str("s1"));
+ v8::HeapProfiler::TakeSnapshot(s1_str);
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
const v8::HeapSnapshot* snapshot2 =
- v8::HeapProfiler::TakeSnapshot(v8_str("s2"));
+ v8::HeapProfiler::TakeSnapshot(s2_str);
+
+ CHECK_GT(snapshot1->GetMaxSnapshotJSObjectId(), 7000);
+ CHECK(snapshot1->GetMaxSnapshotJSObjectId() <=
+ snapshot2->GetMaxSnapshotJSObjectId());
const v8::HeapGraphNode* global1 = GetGlobalObject(snapshot1);
const v8::HeapGraphNode* global2 = GetGlobalObject(snapshot2);
- CHECK_NE_UINT64_T(0, global1->GetId());
- CHECK_EQ_UINT64_T(global1->GetId(), global2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, global1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(global1->GetId(), global2->GetId());
const v8::HeapGraphNode* A1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "A");
CHECK_NE(NULL, A1);
const v8::HeapGraphNode* A2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "A");
CHECK_NE(NULL, A2);
- CHECK_NE_UINT64_T(0, A1->GetId());
- CHECK_EQ_UINT64_T(A1->GetId(), A2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, A1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(A1->GetId(), A2->GetId());
const v8::HeapGraphNode* B1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "B");
CHECK_NE(NULL, B1);
const v8::HeapGraphNode* B2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "B");
CHECK_NE(NULL, B2);
- CHECK_NE_UINT64_T(0, B1->GetId());
- CHECK_EQ_UINT64_T(B1->GetId(), B2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, B1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(B1->GetId(), B2->GetId());
const v8::HeapGraphNode* a1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a1);
const v8::HeapGraphNode* a2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a2);
- CHECK_NE_UINT64_T(0, a1->GetId());
- CHECK_EQ_UINT64_T(a1->GetId(), a2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, a1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(a1->GetId(), a2->GetId());
const v8::HeapGraphNode* b1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, b1);
const v8::HeapGraphNode* b2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, b2);
- CHECK_NE_UINT64_T(0, b1->GetId());
- CHECK_EQ_UINT64_T(b1->GetId(), b2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, b1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(b1->GetId(), b2->GetId());
}
@@ -474,66 +481,6 @@ TEST(HeapSnapshotRootPreservedAfterSorting) {
}
-TEST(HeapEntryDominator) {
- // The graph looks like this:
- //
- // -> node1
- // a |^
- // -> node5 ba
- // a v|
- // node6 -> node2
- // b a |^
- // -> node4 ba
- // b v|
- // -> node3
- //
- // The dominator for all nodes is node6.
-
- v8::HandleScope scope;
- LocalContext env;
-
- CompileRun(
- "function X(a, b) { this.a = a; this.b = b; }\n"
- "node6 = new X(new X(new X()), new X(new X(),new X()));\n"
- "(function(){\n"
- "node6.a.a.b = node6.b.a; // node1 -> node2\n"
- "node6.b.a.a = node6.a.a; // node2 -> node1\n"
- "node6.b.a.b = node6.b.b; // node2 -> node3\n"
- "node6.b.b.a = node6.b.a; // node3 -> node2\n"
- "})();");
-
- const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8_str("dominators"));
-
- const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
- CHECK_NE(NULL, global);
- const v8::HeapGraphNode* node6 =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "node6");
- CHECK_NE(NULL, node6);
- const v8::HeapGraphNode* node5 =
- GetProperty(node6, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node5);
- const v8::HeapGraphNode* node4 =
- GetProperty(node6, v8::HeapGraphEdge::kProperty, "b");
- CHECK_NE(NULL, node4);
- const v8::HeapGraphNode* node3 =
- GetProperty(node4, v8::HeapGraphEdge::kProperty, "b");
- CHECK_NE(NULL, node3);
- const v8::HeapGraphNode* node2 =
- GetProperty(node4, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node2);
- const v8::HeapGraphNode* node1 =
- GetProperty(node5, v8::HeapGraphEdge::kProperty, "a");
- CHECK_NE(NULL, node1);
-
- CHECK_EQ(node6, node1->GetDominatorNode());
- CHECK_EQ(node6, node2->GetDominatorNode());
- CHECK_EQ(node6, node3->GetDominatorNode());
- CHECK_EQ(node6, node4->GetDominatorNode());
- CHECK_EQ(node6, node5->GetDominatorNode());
-}
-
-
namespace {
class TestJSONStream : public v8::OutputStream {
@@ -551,9 +498,14 @@ class TestJSONStream : public v8::OutputStream {
memcpy(chunk.start(), buffer, chars_written);
return kContinue;
}
+ virtual WriteResult WriteUint32Chunk(uint32_t* buffer, int chars_written) {
+ ASSERT(false);
+ return kAbort;
+ }
void WriteTo(i::Vector<char> dest) { buffer_.WriteTo(dest); }
int eos_signaled() { return eos_signaled_; }
int size() { return buffer_.size(); }
+
private:
i::Collector<char> buffer_;
int eos_signaled_;
@@ -607,42 +559,44 @@ TEST(HeapSnapshotJSONSerialization) {
env->Global()->Get(v8_str("parsed"))->ToObject();
CHECK(parsed_snapshot->Has(v8_str("snapshot")));
CHECK(parsed_snapshot->Has(v8_str("nodes")));
+ CHECK(parsed_snapshot->Has(v8_str("edges")));
CHECK(parsed_snapshot->Has(v8_str("strings")));
// Get node and edge "member" offsets.
v8::Local<v8::Value> meta_analysis_result = CompileRun(
- "var parsed_meta = parsed.nodes[0];\n"
- "var children_count_offset ="
- " parsed_meta.fields.indexOf('children_count');\n"
- "var children_offset ="
- " parsed_meta.fields.indexOf('children');\n"
- "var children_meta ="
- " parsed_meta.types[children_offset];\n"
- "var child_fields_count = children_meta.fields.length;\n"
- "var child_type_offset ="
- " children_meta.fields.indexOf('type');\n"
- "var child_name_offset ="
- " children_meta.fields.indexOf('name_or_index');\n"
- "var child_to_node_offset ="
- " children_meta.fields.indexOf('to_node');\n"
+ "var meta = parsed.snapshot.meta;\n"
+ "var edge_count_offset = meta.node_fields.indexOf('edge_count');\n"
+ "var node_fields_count = meta.node_fields.length;\n"
+ "var edge_fields_count = meta.edge_fields.length;\n"
+ "var edge_type_offset = meta.edge_fields.indexOf('type');\n"
+ "var edge_name_offset = meta.edge_fields.indexOf('name_or_index');\n"
+ "var edge_to_node_offset = meta.edge_fields.indexOf('to_node');\n"
"var property_type ="
- " children_meta.types[child_type_offset].indexOf('property');\n"
+ " meta.edge_types[edge_type_offset].indexOf('property');\n"
"var shortcut_type ="
- " children_meta.types[child_type_offset].indexOf('shortcut');");
+ " meta.edge_types[edge_type_offset].indexOf('shortcut');\n"
+ "var node_count = parsed.nodes.length / node_fields_count;\n"
+ "var first_edge_indexes = parsed.first_edge_indexes = [];\n"
+ "for (var i = 0, first_edge_index = 0; i < node_count; ++i) {\n"
+ " first_edge_indexes[i] = first_edge_index;\n"
+ " first_edge_index += edge_fields_count *\n"
+ " parsed.nodes[i * node_fields_count + edge_count_offset];\n"
+ "}\n");
CHECK(!meta_analysis_result.IsEmpty());
// A helper function for processing encoded nodes.
CompileRun(
"function GetChildPosByProperty(pos, prop_name, prop_type) {\n"
" var nodes = parsed.nodes;\n"
+ " var edges = parsed.edges;\n"
" var strings = parsed.strings;\n"
- " for (var i = 0,\n"
- " count = nodes[pos + children_count_offset] * child_fields_count;\n"
- " i < count; i += child_fields_count) {\n"
- " var child_pos = pos + children_offset + i;\n"
- " if (nodes[child_pos + child_type_offset] === prop_type\n"
- " && strings[nodes[child_pos + child_name_offset]] === prop_name)\n"
- " return nodes[child_pos + child_to_node_offset];\n"
+ " var node_ordinal = pos / node_fields_count;\n"
+ " for (var i = parsed.first_edge_indexes[node_ordinal],\n"
+ " count = parsed.first_edge_indexes[node_ordinal + 1];\n"
+ " i < count; i += edge_fields_count) {\n"
+ " if (edges[i + edge_type_offset] === prop_type\n"
+ " && strings[edges[i + edge_name_offset]] === prop_name)\n"
+ " return edges[i + edge_to_node_offset];\n"
" }\n"
" return null;\n"
"}\n");
@@ -651,8 +605,8 @@ TEST(HeapSnapshotJSONSerialization) {
"GetChildPosByProperty(\n"
" GetChildPosByProperty(\n"
" GetChildPosByProperty("
- " parsed.nodes[1 + children_offset + child_to_node_offset],"
- " \"b\",shortcut_type),\n"
+ " parsed.edges[edge_to_node_offset],"
+ " \"b\", property_type),\n"
" \"x\", property_type),"
" \"s\", property_type)");
CHECK(!string_obj_pos_val.IsEmpty());
@@ -685,6 +639,212 @@ TEST(HeapSnapshotJSONSerializationAborting) {
CHECK_EQ(0, stream.eos_signaled());
}
+namespace {
+
+class TestStatsStream : public v8::OutputStream {
+ public:
+ TestStatsStream()
+ : eos_signaled_(0),
+ updates_written_(0),
+ entries_count_(0),
+ entries_size_(0),
+ intervals_count_(0),
+ first_interval_index_(-1) { }
+ TestStatsStream(const TestStatsStream& stream)
+ : v8::OutputStream(stream),
+ eos_signaled_(stream.eos_signaled_),
+ updates_written_(stream.updates_written_),
+ entries_count_(stream.entries_count_),
+ entries_size_(stream.entries_size_),
+ intervals_count_(stream.intervals_count_),
+ first_interval_index_(stream.first_interval_index_) { }
+ virtual ~TestStatsStream() {}
+ virtual void EndOfStream() { ++eos_signaled_; }
+ virtual WriteResult WriteAsciiChunk(char* buffer, int chars_written) {
+ ASSERT(false);
+ return kAbort;
+ }
+ virtual WriteResult WriteHeapStatsChunk(v8::HeapStatsUpdate* buffer,
+ int updates_written) {
+ ++intervals_count_;
+ ASSERT(updates_written);
+ updates_written_ += updates_written;
+ entries_count_ = 0;
+ if (first_interval_index_ == -1 && updates_written != 0)
+ first_interval_index_ = buffer[0].index;
+ for (int i = 0; i < updates_written; ++i) {
+ entries_count_ += buffer[i].count;
+ entries_size_ += buffer[i].size;
+ }
+
+ return kContinue;
+ }
+ int eos_signaled() { return eos_signaled_; }
+ int updates_written() { return updates_written_; }
+ uint32_t entries_count() const { return entries_count_; }
+ uint32_t entries_size() const { return entries_size_; }
+ int intervals_count() const { return intervals_count_; }
+ int first_interval_index() const { return first_interval_index_; }
+
+ private:
+ int eos_signaled_;
+ int updates_written_;
+ uint32_t entries_count_;
+ uint32_t entries_size_;
+ int intervals_count_;
+ int first_interval_index_;
+};
+
+} // namespace
+
+static TestStatsStream GetHeapStatsUpdate(
+ v8::SnapshotObjectId* object_id = NULL) {
+ TestStatsStream stream;
+ v8::SnapshotObjectId last_seen_id =
+ v8::HeapProfiler::PushHeapObjectsStats(&stream);
+ if (object_id)
+ *object_id = last_seen_id;
+ CHECK_EQ(1, stream.eos_signaled());
+ return stream;
+}
+
+
+TEST(HeapSnapshotObjectsStats) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ v8::HeapProfiler::StartHeapObjectsTracking();
+ // We have to call GC 5 times. In other case the garbage will be
+ // the reason of flakiness.
+ for (int i = 0; i < 5; ++i) {
+ HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+ }
+
+ v8::SnapshotObjectId initial_id;
+ {
+ // Single chunk of data expected in update. Initial data.
+ TestStatsStream stats_update = GetHeapStatsUpdate(&initial_id);
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_LT(0, stats_update.entries_size());
+ CHECK_EQ(0, stats_update.first_interval_index());
+ }
+
+ // No data expected in update because nothing has happened.
+ v8::SnapshotObjectId same_id;
+ CHECK_EQ(0, GetHeapStatsUpdate(&same_id).updates_written());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(initial_id, same_id);
+
+ {
+ v8::SnapshotObjectId additional_string_id;
+ v8::HandleScope inner_scope_1;
+ v8_str("string1");
+ {
+ // Single chunk of data with one new entry expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate(&additional_string_id);
+ CHECK_LT(same_id, additional_string_id);
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_LT(0, stats_update.entries_size());
+ CHECK_EQ(1, stats_update.entries_count());
+ CHECK_EQ(2, stats_update.first_interval_index());
+ }
+
+ // No data expected in update because nothing happened.
+ v8::SnapshotObjectId last_id;
+ CHECK_EQ(0, GetHeapStatsUpdate(&last_id).updates_written());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(additional_string_id, last_id);
+
+ {
+ v8::HandleScope inner_scope_2;
+ v8_str("string2");
+
+ uint32_t entries_size;
+ {
+ v8::HandleScope inner_scope_3;
+ v8_str("string3");
+ v8_str("string4");
+
+ {
+ // Single chunk of data with three new entries expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_LT(0, entries_size = stats_update.entries_size());
+ CHECK_EQ(3, stats_update.entries_count());
+ CHECK_EQ(4, stats_update.first_interval_index());
+ }
+ }
+
+ {
+ // Single chunk of data with two left entries expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_GT(entries_size, stats_update.entries_size());
+ CHECK_EQ(1, stats_update.entries_count());
+ // Two strings from forth interval were released.
+ CHECK_EQ(4, stats_update.first_interval_index());
+ }
+ }
+
+ {
+ // Single chunk of data with 0 left entries expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_EQ(0, stats_update.entries_size());
+ CHECK_EQ(0, stats_update.entries_count());
+ // The last string from forth interval was released.
+ CHECK_EQ(4, stats_update.first_interval_index());
+ }
+ }
+ {
+ // Single chunk of data with 0 left entries expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_EQ(0, stats_update.entries_size());
+ CHECK_EQ(0, stats_update.entries_count());
+ // The only string from the second interval was released.
+ CHECK_EQ(2, stats_update.first_interval_index());
+ }
+
+ v8::Local<v8::Array> array = v8::Array::New();
+ CHECK_EQ(0, array->Length());
+ // Force array's buffer allocation.
+ array->Set(2, v8_num(7));
+
+ uint32_t entries_size;
+ {
+ // Single chunk of data with 2 entries expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ CHECK_EQ(1, stats_update.updates_written());
+ CHECK_LT(0, entries_size = stats_update.entries_size());
+ // They are the array and its buffer.
+ CHECK_EQ(2, stats_update.entries_count());
+ CHECK_EQ(8, stats_update.first_interval_index());
+ }
+
+ for (int i = 0; i < 100; ++i)
+ array->Set(i, v8_num(i));
+
+ {
+ // Single chunk of data with 1 entry expected in update.
+ TestStatsStream stats_update = GetHeapStatsUpdate();
+ CHECK_EQ(1, stats_update.intervals_count());
+ // The first interval was changed because old buffer was collected.
+ // The second interval was changed because new buffer was allocated.
+ CHECK_EQ(2, stats_update.updates_written());
+ CHECK_LT(entries_size, stats_update.entries_size());
+ CHECK_EQ(2, stats_update.entries_count());
+ CHECK_EQ(8, stats_update.first_interval_index());
+ }
+
+ v8::HeapProfiler::StopHeapObjectsTracking();
+}
+
static void CheckChildrenIds(const v8::HeapSnapshot* snapshot,
const v8::HeapGraphNode* node,
@@ -695,7 +855,7 @@ static void CheckChildrenIds(const v8::HeapSnapshot* snapshot,
const v8::HeapGraphEdge* prop = node->GetChild(i);
const v8::HeapGraphNode* child =
snapshot->GetNodeById(prop->GetToNode()->GetId());
- CHECK_EQ_UINT64_T(prop->GetToNode()->GetId(), child->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(prop->GetToNode()->GetId(), child->GetId());
CHECK_EQ(prop->GetToNode(), child);
CheckChildrenIds(snapshot, child, level + 1, max_level);
}
@@ -715,6 +875,42 @@ TEST(HeapSnapshotGetNodeById) {
}
+TEST(HeapSnapshotGetSnapshotObjectId) {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun("globalObject = {};\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("get_snapshot_object_id"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* global_object =
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "globalObject");
+ CHECK(global_object);
+
+ v8::Local<v8::Value> globalObjectHandle =
+ env->Global()->Get(v8::String::New("globalObject"));
+ CHECK(!globalObjectHandle.IsEmpty());
+ CHECK(globalObjectHandle->IsObject());
+
+ v8::SnapshotObjectId id =
+ v8::HeapProfiler::GetSnapshotObjectId(globalObjectHandle);
+ CHECK_NE(static_cast<int>(v8::HeapProfiler::kUnknownObjectId),
+ id);
+ CHECK_EQ(static_cast<int>(id), global_object->GetId());
+}
+
+
+TEST(HeapSnapshotUnknownSnapshotObjectId) {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun("globalObject = {};\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("unknown_object_id"));
+ const v8::HeapGraphNode* node =
+ snapshot->GetNodeById(v8::HeapProfiler::kUnknownObjectId);
+ CHECK_EQ(NULL, node);
+}
+
+
namespace {
class TestActivityControl : public v8::ActivityControl {
@@ -953,9 +1149,8 @@ TEST(HeapSnapshotImplicitReferences) {
v8::HeapProfiler::TakeSnapshot(v8_str("implicit_refs"));
const v8::HeapGraphNode* global_object = GetGlobalObject(snapshot);
- // Use kShortcut type to skip intermediate JSGlobalPropertyCell
const v8::HeapGraphNode* obj0 = GetProperty(
- global_object, v8::HeapGraphEdge::kShortcut, "root_object");
+ global_object, v8::HeapGraphEdge::kProperty, "root_object");
CHECK(obj0);
CHECK_EQ(v8::HeapGraphNode::kObject, obj0->GetType());
const v8::HeapGraphNode* obj1 = GetProperty(
@@ -1128,7 +1323,7 @@ TEST(GetHeapValue) {
env->Global()->GetPrototype().As<v8::Object>();
CHECK(js_global == global->GetHeapValue());
const v8::HeapGraphNode* obj = GetProperty(
- global, v8::HeapGraphEdge::kShortcut, "a");
+ global, v8::HeapGraphEdge::kProperty, "a");
CHECK(obj->GetHeapValue()->IsObject());
v8::Local<v8::Object> js_obj = js_global->Get(v8_str("a")).As<v8::Object>();
CHECK(js_obj == obj->GetHeapValue());
@@ -1157,7 +1352,7 @@ TEST(GetHeapValueForDeletedObject) {
v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* obj = GetProperty(
- global, v8::HeapGraphEdge::kShortcut, "a");
+ global, v8::HeapGraphEdge::kProperty, "a");
const v8::HeapGraphNode* prop = GetProperty(
obj, v8::HeapGraphEdge::kProperty, "p");
{
@@ -1244,7 +1439,7 @@ TEST(FastCaseGetter) {
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
const v8::HeapGraphNode* obj1 =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "obj1");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "obj1");
CHECK_NE(NULL, obj1);
const v8::HeapGraphNode* getterFunction =
GetProperty(obj1, v8::HeapGraphEdge::kProperty, "get-propWithGetter");
@@ -1326,7 +1521,7 @@ TEST(SfiAndJsFunctionWeakRefs) {
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
const v8::HeapGraphNode* fun =
- GetProperty(global, v8::HeapGraphEdge::kShortcut, "fun");
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "fun");
CHECK(HasWeakEdge(fun));
const v8::HeapGraphNode* shared =
GetProperty(fun, v8::HeapGraphEdge::kInternal, "shared");
@@ -1334,6 +1529,30 @@ TEST(SfiAndJsFunctionWeakRefs) {
}
+TEST(NoDebugObjectInSnapshot) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ v8::internal::Isolate::Current()->debug()->Load();
+ CompileRun("foo = {};");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
+ const v8::HeapGraphNode* root = snapshot->GetRoot();
+ int globals_count = 0;
+ for (int i = 0; i < root->GetChildrenCount(); ++i) {
+ const v8::HeapGraphEdge* edge = root->GetChild(i);
+ if (edge->GetType() == v8::HeapGraphEdge::kShortcut) {
+ ++globals_count;
+ const v8::HeapGraphNode* global = edge->GetToNode();
+ const v8::HeapGraphNode* foo =
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "foo");
+ CHECK_NE(NULL, foo);
+ }
+ }
+ CHECK_EQ(1, globals_count);
+}
+
+
TEST(PersistentHandleCount) {
v8::HandleScope scope;
LocalContext env;
@@ -1366,3 +1585,44 @@ TEST(PersistentHandleCount) {
p_BBB.Dispose();
CHECK_EQ(global_handle_count, v8::HeapProfiler::GetPersistentHandleCount());
}
+
+
+TEST(AllStrongGcRootsHaveNames) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun("foo = {};");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
+ const v8::HeapGraphNode* gc_roots = GetNode(
+ snapshot->GetRoot(), v8::HeapGraphNode::kObject, "(GC roots)");
+ CHECK_NE(NULL, gc_roots);
+ const v8::HeapGraphNode* strong_roots = GetNode(
+ gc_roots, v8::HeapGraphNode::kObject, "(Strong roots)");
+ CHECK_NE(NULL, strong_roots);
+ for (int i = 0; i < strong_roots->GetChildrenCount(); ++i) {
+ const v8::HeapGraphEdge* edge = strong_roots->GetChild(i);
+ CHECK_EQ(v8::HeapGraphEdge::kInternal, edge->GetType());
+ v8::String::AsciiValue name(edge->GetName());
+ CHECK(isalpha(**name));
+ }
+}
+
+
+TEST(NoRefsToNonEssentialEntries) {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun("global_object = {};\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* global_object =
+ GetProperty(global, v8::HeapGraphEdge::kProperty, "global_object");
+ CHECK_NE(NULL, global_object);
+ const v8::HeapGraphNode* properties =
+ GetProperty(global_object, v8::HeapGraphEdge::kInternal, "properties");
+ CHECK_EQ(NULL, properties);
+ const v8::HeapGraphNode* elements =
+ GetProperty(global_object, v8::HeapGraphEdge::kInternal, "elements");
+ CHECK_EQ(NULL, elements);
+}
diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc
index f97bf17219..498b67db2c 100644
--- a/deps/v8/test/cctest/test-heap.cc
+++ b/deps/v8/test/cctest/test-heap.cc
@@ -673,7 +673,7 @@ TEST(JSArray) {
array->SetElementsLength(Smi::FromInt(0))->ToObjectChecked();
CHECK_EQ(Smi::FromInt(0), array->length());
// Must be in fast mode.
- CHECK(array->HasFastTypeElements());
+ CHECK(array->HasFastSmiOrObjectElements());
// array[length] = name.
array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
@@ -811,7 +811,9 @@ TEST(Iteration) {
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = FACTORY->NewJSArray(10);
- objs[next_objs_index++] = FACTORY->NewJSArray(10, FAST_ELEMENTS, TENURED);
+ objs[next_objs_index++] = FACTORY->NewJSArray(10,
+ FAST_HOLEY_ELEMENTS,
+ TENURED);
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] =
@@ -1214,7 +1216,9 @@ TEST(TestSizeOfObjects) {
// The heap size should go back to initial size after a full GC, even
// though sweeping didn't finish yet.
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(!HEAP->old_pointer_space()->IsSweepingComplete());
+
+ // Normally sweeping would not be complete here, but no guarantees.
+
CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
// Advancing the sweeper step-wise should not change the heap size.
@@ -1264,9 +1268,9 @@ static void FillUpNewSpace(NewSpace* new_space) {
v8::HandleScope scope;
AlwaysAllocateScope always_allocate;
intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
- intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
+ intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
for (intptr_t i = 0; i < number_of_fillers; i++) {
- CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
+ CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(32, NOT_TENURED)));
}
}
@@ -1275,6 +1279,13 @@ TEST(GrowAndShrinkNewSpace) {
InitializeVM();
NewSpace* new_space = HEAP->new_space();
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ // The max size cannot exceed the reserved size, since semispaces must be
+ // always within the reserved space. We can't test new space growing and
+ // shrinking if the reserved size is the same as the minimum (initial) size.
+ return;
+ }
+
// Explicitly growing should double the space capacity.
intptr_t old_capacity, new_capacity;
old_capacity = new_space->Capacity();
@@ -1315,6 +1326,14 @@ TEST(GrowAndShrinkNewSpace) {
TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
InitializeVM();
+
+ if (HEAP->ReservedSemiSpaceSize() == HEAP->InitialSemiSpaceSize()) {
+ // The max size cannot exceed the reserved size, since semispaces must be
+ // always within the reserved space. We can't test new space growing and
+ // shrinking if the reserved size is the same as the minimum (initial) size.
+ return;
+ }
+
v8::HandleScope scope;
NewSpace* new_space = HEAP->new_space();
intptr_t old_capacity, new_capacity;
@@ -1560,25 +1579,30 @@ TEST(PrototypeTransitionClearing) {
*v8::Handle<v8::Object>::Cast(
v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
- // Verify that only dead prototype transitions are cleared.
- CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
+ // Verify that only dead prototype transitions are cleared. There is an
+ // extra, 11th, prototype transition on the Object map, which is the
+ // transition to a map with the used_for_prototype flag set (the key is
+ // the_hole).
+ CHECK_EQ(11, baseObject->map()->NumberOfProtoTransitions());
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK_EQ(10 - 3, baseObject->map()->NumberOfProtoTransitions());
+ const int transitions = 11 - 3;
+ CHECK_EQ(transitions, baseObject->map()->NumberOfProtoTransitions());
// Verify that prototype transitions array was compacted.
FixedArray* trans = baseObject->map()->prototype_transitions();
- for (int i = 0; i < 10 - 3; i++) {
+ for (int i = 0; i < transitions; i++) {
int j = Map::kProtoTransitionHeaderSize +
i * Map::kProtoTransitionElementsPerEntry;
CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
- CHECK(trans->get(j + Map::kProtoTransitionPrototypeOffset)->IsJSObject());
+ Object* proto = trans->get(j + Map::kProtoTransitionPrototypeOffset);
+ CHECK(proto->IsTheHole() || proto->IsJSObject());
}
// Make sure next prototype is placed on an old-space evacuation candidate.
Handle<JSObject> prototype;
PagedSpace* space = HEAP->old_pointer_space();
do {
- prototype = FACTORY->NewJSArray(32 * KB, FAST_ELEMENTS, TENURED);
+ prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
} while (space->FirstPage() == space->LastPage() ||
!space->LastPage()->Contains(prototype->address()));
@@ -1634,6 +1658,15 @@ TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
while (!marking->IsStopped() && !marking->IsComplete()) {
marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
}
+ if (!marking->IsStopped() || marking->should_hurry()) {
+ // We don't normally finish a GC via Step(), we normally finish by
+ // setting the stack guard and then do the final steps in the stack
+ // guard interrupt. But here we didn't ask for that, and there is no
+ // JS code running to trigger the interrupt, so we explicitly finalize
+ // here.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags,
+ "Test finalizing incremental mark-sweep");
+ }
CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
CHECK_EQ(0, f->shared()->opt_count());
@@ -1680,3 +1713,191 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
CHECK_EQ(0, f->shared()->opt_count());
CHECK_EQ(0, f->shared()->code()->profiler_ticks());
}
+
+
+// Test that HAllocateObject will always return an object in new-space.
+TEST(OptimizedAllocationAlwaysInNewSpace) {
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
+ v8::HandleScope scope;
+
+ FillUpNewSpace(HEAP->new_space());
+ AlwaysAllocateScope always_allocate;
+ v8::Local<v8::Value> res = CompileRun(
+ "function c(x) {"
+ " this.x = x;"
+ " for (var i = 0; i < 32; i++) {"
+ " this['x' + i] = x;"
+ " }"
+ "}"
+ "function f(x) { return new c(x); };"
+ "f(1); f(2); f(3);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(4);");
+ CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
+
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ CHECK(HEAP->InNewSpace(*o));
+}
+
+
+static int CountMapTransitions(Map* map) {
+ int result = 0;
+ DescriptorArray* descs = map->instance_descriptors();
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ if (descs->IsTransitionOnly(i)) {
+ result++;
+ }
+ }
+ return result;
+}
+
+
+// Test that map transitions are cleared and maps are collected with
+// incremental marking as well.
+TEST(Regress1465) {
+ i::FLAG_allow_natives_syntax = true;
+ i::FLAG_trace_incremental_marking = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ #define TRANSITION_COUNT 256
+ for (int i = 0; i < TRANSITION_COUNT; i++) {
+ EmbeddedVector<char, 64> buffer;
+ OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
+ CompileRun(buffer.start());
+ }
+ CompileRun("var root = new Object;");
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // Count number of live transitions before marking.
+ int transitions_before = CountMapTransitions(root->map());
+ CompileRun("%DebugPrint(root);");
+ CHECK_EQ(TRANSITION_COUNT, transitions_before);
+
+ // Go through all incremental marking steps in one swoop.
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(marking->IsStopped());
+
+ // Count number of live transitions after marking. Note that one transition
+ // is left, because 'o' still holds an instance of one transition target.
+ int transitions_after = CountMapTransitions(root->map());
+ CompileRun("%DebugPrint(root);");
+ CHECK_EQ(1, transitions_after);
+}
+
+
+TEST(Regress2143a) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ // Go through all incremental marking steps in one swoop.
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+
+ // Compile a StoreIC that performs the prepared map transition. This
+ // will restart incremental marking and should make sure the root is
+ // marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(root);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(marking->IsStopped());
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
+
+
+TEST(Regress2143b) {
+ i::FLAG_collect_maps = true;
+ i::FLAG_incremental_marking = true;
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ v8::HandleScope scope;
+
+ // Prepare a map transition from the root object together with a yet
+ // untransitioned root object.
+ CompileRun("var root = new Object;"
+ "root.foo = 0;"
+ "root = new Object;");
+
+ // Go through all incremental marking steps in one swoop.
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+
+ // Compile an optimized LStoreNamedField that performs the prepared
+ // map transition. This will restart incremental marking and should
+ // make sure the root is marked grey again.
+ CompileRun("function f(o) {"
+ " o.foo = 0;"
+ "}"
+ "f(new Object);"
+ "f(new Object);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(root);"
+ "%DeoptimizeFunction(f);");
+
+ // This bug only triggers with aggressive IC clearing.
+ HEAP->AgeInlineCaches();
+
+ // Explicitly request GC to perform final marking step and sweeping.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(marking->IsStopped());
+
+ Handle<JSObject> root =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("root"))));
+
+ // The root object should be in a sane state.
+ CHECK(root->IsJSObject());
+ CHECK(root->map()->IsMap());
+}
diff --git a/deps/v8/test/cctest/test-list.cc b/deps/v8/test/cctest/test-list.cc
index 7520b05fcb..740b432f3e 100644
--- a/deps/v8/test/cctest/test-list.cc
+++ b/deps/v8/test/cctest/test-list.cc
@@ -35,7 +35,7 @@ using namespace v8::internal;
// Use a testing allocator that clears memory before deletion.
class ZeroingAllocationPolicy {
public:
- static void* New(size_t size) {
+ void* New(size_t size) {
// Stash the size in the first word to use for Delete.
size_t true_size = size + sizeof(size_t);
size_t* result = reinterpret_cast<size_t*>(malloc(true_size));
@@ -130,6 +130,18 @@ TEST(RemoveLast) {
}
+TEST(Allocate) {
+ List<int> list(4);
+ list.Add(1);
+ CHECK_EQ(1, list.length());
+ list.Allocate(100);
+ CHECK_EQ(100, list.length());
+ CHECK_LE(100, list.capacity());
+ list[99] = 123;
+ CHECK_EQ(123, list[99]);
+}
+
+
TEST(Clear) {
List<int> list(4);
CHECK_EQ(0, list.length());
diff --git a/deps/v8/test/cctest/test-liveedit.cc b/deps/v8/test/cctest/test-liveedit.cc
index 2498fca906..013de026f2 100644
--- a/deps/v8/test/cctest/test-liveedit.cc
+++ b/deps/v8/test/cctest/test-liveedit.cc
@@ -81,7 +81,8 @@ class ListDiffOutputWriter : public Comparator::Output {
(*next_chunk_pointer_) = NULL;
}
void AddChunk(int pos1, int pos2, int len1, int len2) {
- current_chunk_ = new DiffChunkStruct(pos1, pos2, len1, len2);
+ current_chunk_ =
+ new(Isolate::Current()->zone()) DiffChunkStruct(pos1, pos2, len1, len2);
(*next_chunk_pointer_) = current_chunk_;
next_chunk_pointer_ = &current_chunk_->next;
}
diff --git a/deps/v8/test/cctest/test-mark-compact.cc b/deps/v8/test/cctest/test-mark-compact.cc
index 973af19662..27123704b1 100644
--- a/deps/v8/test/cctest/test-mark-compact.cc
+++ b/deps/v8/test/cctest/test-mark-compact.cc
@@ -531,18 +531,18 @@ TEST(BootUpMemoryUse) {
// there we just skip the test.
if (initial_memory >= 0) {
InitializeVM();
- intptr_t booted_memory = MemoryInUse();
+ intptr_t delta = MemoryInUse() - initial_memory;
if (sizeof(initial_memory) == 8) {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 6686 * 1024); // 6476.
+ CHECK_LE(delta, 3600 * 1024); // 3396.
} else {
- CHECK_LE(booted_memory - initial_memory, 6809 * 1024); // 6628.
+ CHECK_LE(delta, 4000 * 1024); // 3948.
}
} else {
if (v8::internal::Snapshot::IsEnabled()) {
- CHECK_LE(booted_memory - initial_memory, 6532 * 1024); // 6388.
+ CHECK_LE(delta, 2600 * 1024); // 2484.
} else {
- CHECK_LE(booted_memory - initial_memory, 6940 * 1024); // 6456
+ CHECK_LE(delta, 2950 * 1024); // 2844
}
}
}
diff --git a/deps/v8/test/cctest/test-parsing.cc b/deps/v8/test/cctest/test-parsing.cc
index 6bcae7c308..b9123f01f0 100755
--- a/deps/v8/test/cctest/test-parsing.cc
+++ b/deps/v8/test/cctest/test-parsing.cc
@@ -1016,7 +1016,8 @@ TEST(ScopePositions) {
FACTORY->NewStringFromUtf8(i::CStrVector(program.start())));
CHECK_EQ(source->length(), kProgramSize);
i::Handle<i::Script> script = FACTORY->NewScript(source);
- i::Parser parser(script, i::kAllowLazy | i::EXTENDED_MODE, NULL, NULL);
+ i::Parser parser(script, i::kAllowLazy | i::EXTENDED_MODE, NULL, NULL,
+ i::Isolate::Current()->zone());
i::CompilationInfo info(script);
info.MarkAsGlobal();
info.SetLanguageMode(source_data[i].language_mode);
@@ -1060,7 +1061,7 @@ void TestParserSync(i::Handle<i::String> source, int flags) {
i::Handle<i::Script> script = FACTORY->NewScript(source);
bool save_harmony_scoping = i::FLAG_harmony_scoping;
i::FLAG_harmony_scoping = harmony_scoping;
- i::Parser parser(script, flags, NULL, NULL);
+ i::Parser parser(script, flags, NULL, NULL, i::Isolate::Current()->zone());
i::CompilationInfo info(script);
info.MarkAsGlobal();
i::FunctionLiteral* function = parser.ParseProgram(&info);
diff --git a/deps/v8/test/cctest/test-regexp.cc b/deps/v8/test/cctest/test-regexp.cc
index d941d0f7b0..325c686063 100644
--- a/deps/v8/test/cctest/test-regexp.cc
+++ b/deps/v8/test/cctest/test-regexp.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -88,7 +88,8 @@ static SmartArrayPointer<const char> Parse(const char* input) {
CHECK(v8::internal::RegExpParser::ParseRegExp(&reader, false, &result));
CHECK(result.tree != NULL);
CHECK(result.error.is_null());
- SmartArrayPointer<const char> output = result.tree->ToString();
+ SmartArrayPointer<const char> output =
+ result.tree->ToString(Isolate::Current()->zone());
return output;
}
@@ -469,8 +470,10 @@ static bool NotWord(uc16 c) {
static void TestCharacterClassEscapes(uc16 c, bool (pred)(uc16 c)) {
ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* ranges = new ZoneList<CharacterRange>(2);
- CharacterRange::AddClassEscape(c, ranges);
+ Zone* zone = Isolate::Current()->zone();
+ ZoneList<CharacterRange>* ranges =
+ new(zone) ZoneList<CharacterRange>(2, zone);
+ CharacterRange::AddClassEscape(c, ranges, zone);
for (unsigned i = 0; i < (1 << 16); i++) {
bool in_class = false;
for (int j = 0; !in_class && j < ranges->length(); j++) {
@@ -504,7 +507,16 @@ static RegExpNode* Compile(const char* input, bool multiline, bool is_ascii) {
return NULL;
Handle<String> pattern = isolate->factory()->
NewStringFromUtf8(CStrVector(input));
- RegExpEngine::Compile(&compile_data, false, multiline, pattern, is_ascii);
+ Handle<String> sample_subject =
+ isolate->factory()->NewStringFromUtf8(CStrVector(""));
+ RegExpEngine::Compile(&compile_data,
+ false,
+ false,
+ multiline,
+ pattern,
+ sample_subject,
+ is_ascii,
+ isolate->zone());
return compile_data.node;
}
@@ -555,7 +567,7 @@ TEST(SplayTreeSimple) {
v8::internal::V8::Initialize(NULL);
static const unsigned kLimit = 1000;
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneSplayTree<TestConfig> tree;
+ ZoneSplayTree<TestConfig> tree(Isolate::Current()->zone());
bool seen[kLimit];
for (unsigned i = 0; i < kLimit; i++) seen[i] = false;
#define CHECK_MAPS_EQUAL() do { \
@@ -623,11 +635,12 @@ TEST(DispatchTableConstruction) {
}
// Enter test data into dispatch table.
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- DispatchTable table;
+ DispatchTable table(Isolate::Current()->zone());
for (int i = 0; i < kRangeCount; i++) {
uc16* range = ranges[i];
for (int j = 0; j < 2 * kRangeSize; j += 2)
- table.AddRange(CharacterRange(range[j], range[j + 1]), i);
+ table.AddRange(CharacterRange(range[j], range[j + 1]), i,
+ Isolate::Current()->zone());
}
// Check that the table looks as we would expect
for (int p = 0; p < kLimit; p++) {
@@ -717,6 +730,7 @@ static ArchRegExpMacroAssembler::Result Execute(Code* code,
input_start,
input_end,
captures,
+ 0,
Isolate::Current());
}
@@ -726,7 +740,8 @@ TEST(MacroAssemblerNativeSuccess) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->zone());
m.Succeed();
@@ -761,7 +776,8 @@ TEST(MacroAssemblerNativeSimple) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -818,7 +834,8 @@ TEST(MacroAssemblerNativeSimpleUC16) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4,
+ Isolate::Current()->zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -880,7 +897,8 @@ TEST(MacroAssemblerNativeBacktrack) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->zone());
Label fail;
Label backtrack;
@@ -918,7 +936,8 @@ TEST(MacroAssemblerNativeBackReferenceASCII) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->zone());
m.WriteCurrentPositionToRegister(0, 0);
m.AdvanceCurrentPosition(2);
@@ -965,7 +984,8 @@ TEST(MacroAssemblerNativeBackReferenceUC16) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::UC16, 4,
+ Isolate::Current()->zone());
m.WriteCurrentPositionToRegister(0, 0);
m.AdvanceCurrentPosition(2);
@@ -995,11 +1015,11 @@ TEST(MacroAssemblerNativeBackReferenceUC16) {
int output[4];
NativeRegExpMacroAssembler::Result result =
Execute(*code,
- *input,
- 0,
- start_adr,
- start_adr + input->length() * 2,
- output);
+ *input,
+ 0,
+ start_adr,
+ start_adr + input->length() * 2,
+ output);
CHECK_EQ(NativeRegExpMacroAssembler::SUCCESS, result);
CHECK_EQ(0, output[0]);
@@ -1015,7 +1035,8 @@ TEST(MacroAssemblernativeAtStart) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->zone());
Label not_at_start, newline, fail;
m.CheckNotAtStart(&not_at_start);
@@ -1072,7 +1093,8 @@ TEST(MacroAssemblerNativeBackRefNoCase) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 4,
+ Isolate::Current()->zone());
Label fail, succ;
@@ -1129,7 +1151,8 @@ TEST(MacroAssemblerNativeRegisters) {
ContextInitializer initializer;
Factory* factory = Isolate::Current()->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 6);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 6,
+ Isolate::Current()->zone());
uc16 foo_chars[3] = {'f', 'o', 'o'};
Vector<const uc16> foo(foo_chars, 3);
@@ -1231,7 +1254,8 @@ TEST(MacroAssemblerStackOverflow) {
Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 0,
+ Isolate::Current()->zone());
Label loop;
m.Bind(&loop);
@@ -1269,7 +1293,8 @@ TEST(MacroAssemblerNativeLotsOfRegisters) {
Isolate* isolate = Isolate::Current();
Factory* factory = isolate->factory();
- ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 2);
+ ArchRegExpMacroAssembler m(NativeRegExpMacroAssembler::ASCII, 2,
+ Isolate::Current()->zone());
// At least 2048, to ensure the allocated space for registers
// span one full page.
@@ -1387,16 +1412,18 @@ TEST(AddInverseToTable) {
static const int kRangeCount = 16;
for (int t = 0; t < 10; t++) {
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->zone();
ZoneList<CharacterRange>* ranges =
- new ZoneList<CharacterRange>(kRangeCount);
+ new(zone)
+ ZoneList<CharacterRange>(kRangeCount, zone);
for (int i = 0; i < kRangeCount; i++) {
int from = PseudoRandom(t + 87, i + 25) % kLimit;
int to = from + (PseudoRandom(i + 87, t + 25) % (kLimit / 20));
if (to > kLimit) to = kLimit;
- ranges->Add(CharacterRange(from, to));
+ ranges->Add(CharacterRange(from, to), zone);
}
- DispatchTable table;
- DispatchTableConstructor cons(&table, false);
+ DispatchTable table(zone);
+ DispatchTableConstructor cons(&table, false, Isolate::Current()->zone());
cons.set_choice_index(0);
cons.AddInverse(ranges);
for (int i = 0; i < kLimit; i++) {
@@ -1408,11 +1435,12 @@ TEST(AddInverseToTable) {
}
}
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->zone();
ZoneList<CharacterRange>* ranges =
- new ZoneList<CharacterRange>(1);
- ranges->Add(CharacterRange(0xFFF0, 0xFFFE));
- DispatchTable table;
- DispatchTableConstructor cons(&table, false);
+ new(zone) ZoneList<CharacterRange>(1, zone);
+ ranges->Add(CharacterRange(0xFFF0, 0xFFFE), zone);
+ DispatchTable table(zone);
+ DispatchTableConstructor cons(&table, false, Isolate::Current()->zone());
cons.set_choice_index(0);
cons.AddInverse(ranges);
CHECK(!table.Get(0xFFFE)->Get(0));
@@ -1521,9 +1549,11 @@ TEST(UncanonicalizeEquivalence) {
static void TestRangeCaseIndependence(CharacterRange input,
Vector<CharacterRange> expected) {
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ Zone* zone = Isolate::Current()->zone();
int count = expected.length();
- ZoneList<CharacterRange>* list = new ZoneList<CharacterRange>(count);
- input.AddCaseEquivalents(list, false);
+ ZoneList<CharacterRange>* list =
+ new(zone) ZoneList<CharacterRange>(count, zone);
+ input.AddCaseEquivalents(list, false, zone);
CHECK_EQ(count, list->length());
for (int i = 0; i < list->length(); i++) {
CHECK_EQ(expected[i].from(), list->at(i).from());
@@ -1585,18 +1615,21 @@ static bool InClass(uc16 c, ZoneList<CharacterRange>* ranges) {
TEST(CharClassDifference) {
v8::internal::V8::Initialize(NULL);
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* base = new ZoneList<CharacterRange>(1);
- base->Add(CharacterRange::Everything());
- Vector<const uc16> overlay = CharacterRange::GetWordBounds();
+ Zone* zone = Isolate::Current()->zone();
+ ZoneList<CharacterRange>* base =
+ new(zone) ZoneList<CharacterRange>(1, zone);
+ base->Add(CharacterRange::Everything(), zone);
+ Vector<const int> overlay = CharacterRange::GetWordBounds();
ZoneList<CharacterRange>* included = NULL;
ZoneList<CharacterRange>* excluded = NULL;
- CharacterRange::Split(base, overlay, &included, &excluded);
+ CharacterRange::Split(base, overlay, &included, &excluded,
+ Isolate::Current()->zone());
for (int i = 0; i < (1 << 16); i++) {
bool in_base = InClass(i, base);
if (in_base) {
bool in_overlay = false;
for (int j = 0; !in_overlay && j < overlay.length(); j += 2) {
- if (overlay[j] <= i && i <= overlay[j+1])
+ if (overlay[j] <= i && i < overlay[j+1])
in_overlay = true;
}
CHECK_EQ(in_overlay, InClass(i, included));
@@ -1612,12 +1645,14 @@ TEST(CharClassDifference) {
TEST(CanonicalizeCharacterSets) {
v8::internal::V8::Initialize(NULL);
ZoneScope scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange>* list = new ZoneList<CharacterRange>(4);
+ Zone* zone = Isolate::Current()->zone();
+ ZoneList<CharacterRange>* list =
+ new(zone) ZoneList<CharacterRange>(4, zone);
CharacterSet set(list);
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(30, 40));
- list->Add(CharacterRange(50, 60));
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(30, 40), zone);
+ list->Add(CharacterRange(50, 60), zone);
set.Canonicalize();
ASSERT_EQ(3, list->length());
ASSERT_EQ(10, list->at(0).from());
@@ -1628,9 +1663,9 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(60, list->at(2).to());
list->Rewind(0);
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(50, 60));
- list->Add(CharacterRange(30, 40));
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(50, 60), zone);
+ list->Add(CharacterRange(30, 40), zone);
set.Canonicalize();
ASSERT_EQ(3, list->length());
ASSERT_EQ(10, list->at(0).from());
@@ -1641,11 +1676,11 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(60, list->at(2).to());
list->Rewind(0);
- list->Add(CharacterRange(30, 40));
- list->Add(CharacterRange(10, 20));
- list->Add(CharacterRange(25, 25));
- list->Add(CharacterRange(100, 100));
- list->Add(CharacterRange(1, 1));
+ list->Add(CharacterRange(30, 40), zone);
+ list->Add(CharacterRange(10, 20), zone);
+ list->Add(CharacterRange(25, 25), zone);
+ list->Add(CharacterRange(100, 100), zone);
+ list->Add(CharacterRange(1, 1), zone);
set.Canonicalize();
ASSERT_EQ(5, list->length());
ASSERT_EQ(1, list->at(0).from());
@@ -1660,31 +1695,22 @@ TEST(CanonicalizeCharacterSets) {
ASSERT_EQ(100, list->at(4).to());
list->Rewind(0);
- list->Add(CharacterRange(10, 19));
- list->Add(CharacterRange(21, 30));
- list->Add(CharacterRange(20, 20));
+ list->Add(CharacterRange(10, 19), zone);
+ list->Add(CharacterRange(21, 30), zone);
+ list->Add(CharacterRange(20, 20), zone);
set.Canonicalize();
ASSERT_EQ(1, list->length());
ASSERT_EQ(10, list->at(0).from());
ASSERT_EQ(30, list->at(0).to());
}
-// Checks whether a character is in the set represented by a list of ranges.
-static bool CharacterInSet(ZoneList<CharacterRange>* set, uc16 value) {
- for (int i = 0; i < set->length(); i++) {
- CharacterRange range = set->at(i);
- if (range.from() <= value && value <= range.to()) {
- return true;
- }
- }
- return false;
-}
TEST(CharacterRangeMerge) {
v8::internal::V8::Initialize(NULL);
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- ZoneList<CharacterRange> l1(4);
- ZoneList<CharacterRange> l2(4);
+ ZoneList<CharacterRange> l1(4, Isolate::Current()->zone());
+ ZoneList<CharacterRange> l2(4, Isolate::Current()->zone());
+ Zone* zone = Isolate::Current()->zone();
// Create all combinations of intersections of ranges, both singletons and
// longer.
@@ -1699,8 +1725,8 @@ TEST(CharacterRangeMerge) {
// Y - outside after
for (int i = 0; i < 5; i++) {
- l1.Add(CharacterRange::Singleton(offset + 2));
- l2.Add(CharacterRange::Singleton(offset + i));
+ l1.Add(CharacterRange::Singleton(offset + 2), zone);
+ l2.Add(CharacterRange::Singleton(offset + i), zone);
offset += 6;
}
@@ -1715,8 +1741,8 @@ TEST(CharacterRangeMerge) {
// Y - disjoint after
for (int i = 0; i < 7; i++) {
- l1.Add(CharacterRange::Range(offset + 2, offset + 4));
- l2.Add(CharacterRange::Singleton(offset + i));
+ l1.Add(CharacterRange::Range(offset + 2, offset + 4), zone);
+ l2.Add(CharacterRange::Singleton(offset + i), zone);
offset += 8;
}
@@ -1736,96 +1762,35 @@ TEST(CharacterRangeMerge) {
// YYYYYYYYYYYY - containing entirely.
for (int i = 0; i < 9; i++) {
- l1.Add(CharacterRange::Range(offset + 6, offset + 15)); // Length 8.
- l2.Add(CharacterRange::Range(offset + 2 * i, offset + 2 * i + 3));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone); // Length 8.
+ l2.Add(CharacterRange::Range(offset + 2 * i, offset + 2 * i + 3), zone);
offset += 22;
}
- l1.Add(CharacterRange::Range(offset + 6, offset + 15));
- l2.Add(CharacterRange::Range(offset + 6, offset + 15));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
+ l2.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
offset += 22;
- l1.Add(CharacterRange::Range(offset + 6, offset + 15));
- l2.Add(CharacterRange::Range(offset + 4, offset + 17));
+ l1.Add(CharacterRange::Range(offset + 6, offset + 15), zone);
+ l2.Add(CharacterRange::Range(offset + 4, offset + 17), zone);
offset += 22;
// Different kinds of multi-range overlap:
// XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXX
// YYYY Y YYYY Y YYYY Y YYYY Y YYYY Y YYYY Y
- l1.Add(CharacterRange::Range(offset, offset + 21));
- l1.Add(CharacterRange::Range(offset + 31, offset + 52));
+ l1.Add(CharacterRange::Range(offset, offset + 21), zone);
+ l1.Add(CharacterRange::Range(offset + 31, offset + 52), zone);
for (int i = 0; i < 6; i++) {
- l2.Add(CharacterRange::Range(offset + 2, offset + 5));
- l2.Add(CharacterRange::Singleton(offset + 8));
+ l2.Add(CharacterRange::Range(offset + 2, offset + 5), zone);
+ l2.Add(CharacterRange::Singleton(offset + 8), zone);
offset += 9;
}
ASSERT(CharacterRange::IsCanonical(&l1));
ASSERT(CharacterRange::IsCanonical(&l2));
- ZoneList<CharacterRange> first_only(4);
- ZoneList<CharacterRange> second_only(4);
- ZoneList<CharacterRange> both(4);
-
- // Merge one direction.
- CharacterRange::Merge(&l1, &l2, &first_only, &second_only, &both);
-
- CHECK(CharacterRange::IsCanonical(&first_only));
- CHECK(CharacterRange::IsCanonical(&second_only));
- CHECK(CharacterRange::IsCanonical(&both));
-
- for (uc16 i = 0; i < offset; i++) {
- bool in_first = CharacterInSet(&l1, i);
- bool in_second = CharacterInSet(&l2, i);
- CHECK((in_first && !in_second) == CharacterInSet(&first_only, i));
- CHECK((!in_first && in_second) == CharacterInSet(&second_only, i));
- CHECK((in_first && in_second) == CharacterInSet(&both, i));
- }
-
- first_only.Clear();
- second_only.Clear();
- both.Clear();
-
- // Merge other direction.
- CharacterRange::Merge(&l2, &l1, &second_only, &first_only, &both);
-
- CHECK(CharacterRange::IsCanonical(&first_only));
- CHECK(CharacterRange::IsCanonical(&second_only));
- CHECK(CharacterRange::IsCanonical(&both));
-
- for (uc16 i = 0; i < offset; i++) {
- bool in_first = CharacterInSet(&l1, i);
- bool in_second = CharacterInSet(&l2, i);
- CHECK((in_first && !in_second) == CharacterInSet(&first_only, i));
- CHECK((!in_first && in_second) == CharacterInSet(&second_only, i));
- CHECK((in_first && in_second) == CharacterInSet(&both, i));
- }
-
- first_only.Clear();
- second_only.Clear();
- both.Clear();
-
- // Merge but don't record all combinations.
- CharacterRange::Merge(&l1, &l2, NULL, NULL, &both);
-
- CHECK(CharacterRange::IsCanonical(&both));
-
- for (uc16 i = 0; i < offset; i++) {
- bool in_first = CharacterInSet(&l1, i);
- bool in_second = CharacterInSet(&l2, i);
- CHECK((in_first && in_second) == CharacterInSet(&both, i));
- }
-
- // Merge into same set.
- ZoneList<CharacterRange> all(4);
- CharacterRange::Merge(&l1, &l2, &all, &all, &all);
-
- CHECK(CharacterRange::IsCanonical(&all));
-
- for (uc16 i = 0; i < offset; i++) {
- bool in_first = CharacterInSet(&l1, i);
- bool in_second = CharacterInSet(&l2, i);
- CHECK((in_first || in_second) == CharacterInSet(&all, i));
- }
+ ZoneList<CharacterRange> first_only(4, Isolate::Current()->zone());
+ ZoneList<CharacterRange> second_only(4, Isolate::Current()->zone());
+ ZoneList<CharacterRange> both(4, Isolate::Current()->zone());
}
diff --git a/deps/v8/test/cctest/test-spaces.cc b/deps/v8/test/cctest/test-spaces.cc
index 92de2a60fa..0e957048ee 100644
--- a/deps/v8/test/cctest/test-spaces.cc
+++ b/deps/v8/test/cctest/test-spaces.cc
@@ -140,8 +140,8 @@ TEST(MemoryAllocator) {
heap->MaxReserved(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
- Page* first_page =
- memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
+ Page* first_page = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
first_page->InsertAfter(faked_space.anchor()->prev_page());
CHECK(first_page->is_valid());
@@ -153,8 +153,8 @@ TEST(MemoryAllocator) {
}
// Again, we should get n or n - 1 pages.
- Page* other =
- memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
+ Page* other = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
CHECK(other->is_valid());
total_pages++;
other->InsertAfter(first_page);
diff --git a/deps/v8/test/cctest/test-strings.cc b/deps/v8/test/cctest/test-strings.cc
index e11349bc85..7cddff3309 100644
--- a/deps/v8/test/cctest/test-strings.cc
+++ b/deps/v8/test/cctest/test-strings.cc
@@ -82,6 +82,7 @@ static void InitializeBuildingBlocks(
Handle<String> building_blocks[NUMBER_OF_BUILDING_BLOCKS]) {
// A list of pointers that we don't have any interest in cleaning up.
// If they are reachable from a root then leak detection won't complain.
+ Zone* zone = Isolate::Current()->zone();
for (int i = 0; i < NUMBER_OF_BUILDING_BLOCKS; i++) {
int len = gen() % 16;
if (len > 14) {
@@ -113,11 +114,11 @@ static void InitializeBuildingBlocks(
break;
}
case 2: {
- uc16* buf = ZONE->NewArray<uc16>(len);
+ uc16* buf = zone->NewArray<uc16>(len);
for (int j = 0; j < len; j++) {
buf[j] = gen() % 65536;
}
- Resource* resource = new Resource(Vector<const uc16>(buf, len));
+ Resource* resource = new(zone) Resource(Vector<const uc16>(buf, len));
building_blocks[i] = FACTORY->NewExternalStringFromTwoByte(resource);
for (int j = 0; j < len; j++) {
CHECK_EQ(buf[j], building_blocks[i]->Get(j));
@@ -348,10 +349,11 @@ TEST(Utf8Conversion) {
TEST(ExternalShortStringAdd) {
- ZoneScope zone(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zonescope(Isolate::Current(), DELETE_ON_EXIT);
InitializeVM();
v8::HandleScope handle_scope;
+ Zone* zone = Isolate::Current()->zone();
// Make sure we cover all always-flat lengths and at least one above.
static const int kMaxLength = 20;
@@ -365,25 +367,25 @@ TEST(ExternalShortStringAdd) {
// Generate short ascii and non-ascii external strings.
for (int i = 0; i <= kMaxLength; i++) {
- char* ascii = ZONE->NewArray<char>(i + 1);
+ char* ascii = zone->NewArray<char>(i + 1);
for (int j = 0; j < i; j++) {
ascii[j] = 'a';
}
// Terminating '\0' is left out on purpose. It is not required for external
// string data.
AsciiResource* ascii_resource =
- new AsciiResource(Vector<const char>(ascii, i));
+ new(zone) AsciiResource(Vector<const char>(ascii, i));
v8::Local<v8::String> ascii_external_string =
v8::String::NewExternal(ascii_resource);
ascii_external_strings->Set(v8::Integer::New(i), ascii_external_string);
- uc16* non_ascii = ZONE->NewArray<uc16>(i + 1);
+ uc16* non_ascii = zone->NewArray<uc16>(i + 1);
for (int j = 0; j < i; j++) {
non_ascii[j] = 0x1234;
}
// Terminating '\0' is left out on purpose. It is not required for external
// string data.
- Resource* resource = new Resource(Vector<const uc16>(non_ascii, i));
+ Resource* resource = new(zone) Resource(Vector<const uc16>(non_ascii, i));
v8::Local<v8::String> non_ascii_external_string =
v8::String::NewExternal(resource);
non_ascii_external_strings->Set(v8::Integer::New(i),
@@ -587,3 +589,105 @@ TEST(SliceFromSlice) {
CHECK(SlicedString::cast(*string)->parent()->IsSeqString());
CHECK_EQ("cdefghijklmnopqrstuvwx", *(string->ToCString()));
}
+
+
+TEST(AsciiArrayJoin) {
+ // Set heap limits.
+ static const int K = 1024;
+ v8::ResourceConstraints constraints;
+ constraints.set_max_young_space_size(256 * K);
+ constraints.set_max_old_space_size(4 * K * K);
+ v8::SetResourceConstraints(&constraints);
+
+ // String s is made of 2^17 = 131072 'c' characters and a is an array
+ // starting with 'bad', followed by 2^14 times the string s. That means the
+ // total length of the concatenated strings is 2^31 + 3. So on 32bit systems
+ // summing the lengths of the strings (as Smis) overflows and wraps.
+ static const char* join_causing_out_of_memory =
+ "var two_14 = Math.pow(2, 14);"
+ "var two_17 = Math.pow(2, 17);"
+ "var s = Array(two_17 + 1).join('c');"
+ "var a = ['bad'];"
+ "for (var i = 1; i <= two_14; i++) a.push(s);"
+ "a.join("");";
+
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::V8::IgnoreOutOfMemoryException();
+ v8::Local<v8::Script> script =
+ v8::Script::Compile(v8::String::New(join_causing_out_of_memory));
+ v8::Local<v8::Value> result = script->Run();
+
+ // Check for out of memory state.
+ CHECK(result.IsEmpty());
+ CHECK(context->HasOutOfMemoryException());
+}
+
+
+static void CheckException(const char* source) {
+ // An empty handle is returned upon exception.
+ CHECK(CompileRun(source).IsEmpty());
+}
+
+
+TEST(RobustSubStringStub) {
+ // This tests whether the SubStringStub can handle unsafe arguments.
+ // If not recognized, those unsafe arguments lead to out-of-bounds reads.
+ FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ HandleScope scope;
+ v8::Local<v8::Value> result;
+ Handle<String> string;
+ CompileRun("var short = 'abcdef';");
+
+ // Invalid indices.
+ CheckException("%_SubString(short, 0, 10000);");
+ CheckException("%_SubString(short, -1234, 5);");
+ CheckException("%_SubString(short, 5, 2);");
+ // Special HeapNumbers.
+ CheckException("%_SubString(short, 1, Infinity);");
+ CheckException("%_SubString(short, NaN, 5);");
+ // String arguments.
+ CheckException("%_SubString(short, '2', '5');");
+ // Ordinary HeapNumbers can be handled (in runtime).
+ result = CompileRun("%_SubString(short, Math.sqrt(4), 5.1);");
+ string = v8::Utils::OpenHandle(v8::String::Cast(*result));
+ CHECK_EQ("cde", *(string->ToCString()));
+
+ CompileRun("var long = 'abcdefghijklmnopqrstuvwxyz';");
+ // Invalid indices.
+ CheckException("%_SubString(long, 0, 10000);");
+ CheckException("%_SubString(long, -1234, 17);");
+ CheckException("%_SubString(long, 17, 2);");
+ // Special HeapNumbers.
+ CheckException("%_SubString(long, 1, Infinity);");
+ CheckException("%_SubString(long, NaN, 17);");
+ // String arguments.
+ CheckException("%_SubString(long, '2', '17');");
+ // Ordinary HeapNumbers within bounds can be handled (in runtime).
+ result = CompileRun("%_SubString(long, Math.sqrt(4), 17.1);");
+ string = v8::Utils::OpenHandle(v8::String::Cast(*result));
+ CHECK_EQ("cdefghijklmnopq", *(string->ToCString()));
+
+ // Test that out-of-bounds substring of a slice fails when the indices
+ // would have been valid for the underlying string.
+ CompileRun("var slice = long.slice(1, 15);");
+ CheckException("%_SubString(slice, 0, 17);");
+}
+
+
+TEST(RegExpOverflow) {
+ // Result string has the length 2^32, causing a 32-bit integer overflow.
+ InitializeVM();
+ HandleScope scope;
+ LocalContext context;
+ v8::V8::IgnoreOutOfMemoryException();
+ v8::Local<v8::Value> result = CompileRun(
+ "var a = 'a'; "
+ "for (var i = 0; i < 16; i++) { "
+ " a += a; "
+ "} "
+ "a.replace(/a/g, a); ");
+ CHECK(result.IsEmpty());
+ CHECK(context->HasOutOfMemoryException());
+}
diff --git a/deps/v8/test/cctest/test-thread-termination.cc b/deps/v8/test/cctest/test-thread-termination.cc
index 1aa57e3081..cebabaa97e 100644
--- a/deps/v8/test/cctest/test-thread-termination.cc
+++ b/deps/v8/test/cctest/test-thread-termination.cc
@@ -255,6 +255,10 @@ TEST(TerminateMultipleV8ThreadsDefaultIsolate) {
threads[i]->Join();
delete threads[i];
}
+ {
+ v8::Locker locker;
+ v8::Locker::StopPreemption();
+ }
delete semaphore;
semaphore = NULL;
diff --git a/deps/v8/test/cctest/test-weakmaps.cc b/deps/v8/test/cctest/test-weakmaps.cc
index 56d593628a..7bba7b6486 100644
--- a/deps/v8/test/cctest/test-weakmaps.cc
+++ b/deps/v8/test/cctest/test-weakmaps.cc
@@ -48,11 +48,11 @@ static Handle<JSWeakMap> AllocateJSWeakMap() {
static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
Handle<JSObject> key,
- int value) {
+ Handle<Object> value) {
Handle<ObjectHashTable> table = PutIntoObjectHashTable(
Handle<ObjectHashTable>(ObjectHashTable::cast(weakmap->table())),
Handle<JSObject>(JSObject::cast(*key)),
- Handle<Smi>(Smi::FromInt(value)));
+ value);
weakmap->set_table(*table);
}
@@ -65,6 +65,7 @@ static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
TEST(Weakness) {
+ FLAG_incremental_marking = false;
LocalContext context;
v8::HandleScope scope;
Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
@@ -83,7 +84,9 @@ TEST(Weakness) {
// Put entry into weak map.
{
v8::HandleScope scope;
- PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ PutIntoWeakMap(weakmap,
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(23)));
}
CHECK_EQ(1, ObjectHashTable::cast(weakmap->table())->NumberOfElements());
@@ -133,7 +136,7 @@ TEST(Shrinking) {
Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
for (int i = 0; i < 32; i++) {
Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
- PutIntoWeakMap(weakmap, object, i);
+ PutIntoWeakMap(weakmap, object, Handle<Smi>(Smi::FromInt(i)));
}
}
@@ -152,3 +155,72 @@ TEST(Shrinking) {
// Check shrunk capacity.
CHECK_EQ(32, ObjectHashTable::cast(weakmap->table())->Capacity());
}
+
+
+// Test that weak map values on an evacuation candidate which are not reachable
+// by other paths are correctly recorded in the slots buffer.
+TEST(Regress2060a) {
+ FLAG_always_compact = true;
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSFunction> function =
+ FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
+ Handle<JSObject> key = FACTORY->NewJSObject(function);
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Start second old-space page so that values land on evacuation candidate.
+ Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
+ FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
+
+ // Fill up weak map with values on an evacuation candidate.
+ {
+ v8::HandleScope scope;
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObject(function, TENURED);
+ CHECK(!HEAP->InNewSpace(object->address()));
+ CHECK(!first_page->Contains(object->address()));
+ PutIntoWeakMap(weakmap, key, object);
+ }
+ }
+
+ // Force compacting garbage collection.
+ CHECK(FLAG_always_compact);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+}
+
+
+// Test that weak map keys on an evacuation candidate which are reachable by
+// other strong paths are correctly recorded in the slots buffer.
+TEST(Regress2060b) {
+ FLAG_always_compact = true;
+#ifdef DEBUG
+ FLAG_verify_heap = true;
+#endif
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSFunction> function =
+ FACTORY->NewFunction(FACTORY->function_symbol(), FACTORY->null_value());
+
+ // Start second old-space page so that keys land on evacuation candidate.
+ Page* first_page = HEAP->old_pointer_space()->anchor()->next_page();
+ FACTORY->NewFixedArray(900 * KB / kPointerSize, TENURED);
+
+ // Fill up weak map with keys on an evacuation candidate.
+ Handle<JSObject> keys[32];
+ for (int i = 0; i < 32; i++) {
+ keys[i] = FACTORY->NewJSObject(function, TENURED);
+ CHECK(!HEAP->InNewSpace(keys[i]->address()));
+ CHECK(!first_page->Contains(keys[i]->address()));
+ }
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+ for (int i = 0; i < 32; i++) {
+ PutIntoWeakMap(weakmap, keys[i], Handle<Smi>(Smi::FromInt(i)));
+ }
+
+ // Force compacting garbage collection. The subsequent collections are used
+ // to verify that key references were actually updated.
+ CHECK(FLAG_always_compact);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+}
diff --git a/deps/v8/test/message/message.status b/deps/v8/test/message/message.status
index fc2896b1c9..441f8edd0d 100644
--- a/deps/v8/test/message/message.status
+++ b/deps/v8/test/message/message.status
@@ -28,4 +28,4 @@
prefix message
# All tests in the bug directory are expected to fail.
-bugs: FAIL
+bugs/*: FAIL
diff --git a/deps/v8/test/mjsunit/accessor-map-sharing.js b/deps/v8/test/mjsunit/accessor-map-sharing.js
new file mode 100644
index 0000000000..ab45afab05
--- /dev/null
+++ b/deps/v8/test/mjsunit/accessor-map-sharing.js
@@ -0,0 +1,176 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Handy abbreviations.
+var dp = Object.defineProperty;
+var gop = Object.getOwnPropertyDescriptor;
+
+function getter() { return 111; }
+function setter(x) { print(222); }
+function anotherGetter() { return 333; }
+function anotherSetter(x) { print(444); }
+var obj1, obj2;
+
+// Two objects with the same getter.
+obj1 = {};
+dp(obj1, "alpha", { get: getter });
+obj2 = {};
+dp(obj2, "alpha", { get: getter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter, oldskool.
+obj1 = {};
+obj1.__defineGetter__("bravo", getter);
+assertEquals(getter, obj1.__lookupGetter__("bravo"));
+obj2 = {};
+obj2.__defineGetter__("bravo", getter);
+assertEquals(getter, obj2.__lookupGetter__("bravo"));
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same setter.
+obj1 = {};
+dp(obj1, "charlie", { set: setter });
+obj2 = {};
+dp(obj2, "charlie", { set: setter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same setter, oldskool.
+obj1 = {};
+obj1.__defineSetter__("delta", setter);
+assertEquals(setter, obj1.__lookupSetter__("delta"));
+obj2 = {};
+obj2.__defineSetter__("delta", setter);
+assertEquals(setter, obj2.__lookupSetter__("delta"));
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter.
+obj1 = {};
+dp(obj1, "foxtrot", { get: getter, set: setter });
+obj2 = {};
+dp(obj2, "foxtrot", { get: getter, set: setter });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter, set separately.
+obj1 = {};
+dp(obj1, "golf", { get: getter, configurable: true });
+dp(obj1, "golf", { set: setter, configurable: true });
+obj2 = {};
+dp(obj2, "golf", { get: getter, configurable: true });
+dp(obj2, "golf", { set: setter, configurable: true });
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getter and setter, set separately, oldskool.
+obj1 = {};
+obj1.__defineGetter__("hotel", getter);
+obj1.__defineSetter__("hotel", setter);
+obj2 = {};
+obj2.__defineGetter__("hotel", getter);
+obj2.__defineSetter__("hotel", setter);
+assertTrue(%HaveSameMap(obj1, obj2));
+
+// Attribute-only change, shouldn't affect previous descriptor properties.
+obj1 = {};
+dp(obj1, "india", { get: getter, configurable: true, enumerable: true });
+assertEquals(getter, gop(obj1, "india").get);
+assertTrue(gop(obj1, "india").configurable);
+assertTrue(gop(obj1, "india").enumerable);
+dp(obj1, "india", { enumerable: false });
+assertEquals(getter, gop(obj1, "india").get);
+assertTrue(gop(obj1, "india").configurable);
+assertFalse(gop(obj1, "india").enumerable);
+
+// Attribute-only change, shouldn't affect objects with previously shared maps.
+obj1 = {};
+dp(obj1, "juliet", { set: setter, configurable: true, enumerable: false });
+assertEquals(setter, gop(obj1, "juliet").set);
+assertTrue(gop(obj1, "juliet").configurable);
+assertFalse(gop(obj1, "juliet").enumerable);
+obj2 = {};
+dp(obj2, "juliet", { set: setter, configurable: true, enumerable: false });
+assertEquals(setter, gop(obj2, "juliet").set);
+assertTrue(gop(obj2, "juliet").configurable);
+assertFalse(gop(obj2, "juliet").enumerable);
+dp(obj1, "juliet", { set: setter, configurable: false, enumerable: true });
+assertEquals(setter, gop(obj1, "juliet").set);
+assertFalse(gop(obj1, "juliet").configurable);
+assertTrue(gop(obj1, "juliet").enumerable);
+assertEquals(setter, gop(obj2, "juliet").set);
+assertTrue(gop(obj2, "juliet").configurable);
+assertFalse(gop(obj2, "juliet").enumerable);
+
+// Two objects with the different getters.
+obj1 = {};
+dp(obj1, "kilo", { get: getter });
+obj2 = {};
+dp(obj2, "kilo", { get: anotherGetter });
+assertEquals(getter, gop(obj1, "kilo").get);
+assertEquals(anotherGetter, gop(obj2, "kilo").get);
+assertFalse(%HaveSameMap(obj1, obj2));
+
+// Two objects with the same getters and different setters.
+obj1 = {};
+dp(obj1, "lima", { get: getter, set: setter });
+obj2 = {};
+dp(obj2, "lima", { get: getter, set: anotherSetter });
+assertEquals(setter, gop(obj1, "lima").set);
+assertEquals(anotherSetter, gop(obj2, "lima").set);
+assertFalse(%HaveSameMap(obj1, obj2));
+
+// Even 'undefined' is a kind of getter.
+obj1 = {};
+dp(obj1, "mike", { get: undefined });
+assertTrue("mike" in obj1);
+assertEquals(undefined, gop(obj1, "mike").get);
+assertEquals(undefined, obj1.__lookupGetter__("mike"));
+assertEquals(undefined, gop(obj1, "mike").set);
+assertEquals(undefined, obj1.__lookupSetter__("mike"));
+
+// Even 'undefined' is a kind of setter.
+obj1 = {};
+dp(obj1, "november", { set: undefined });
+assertTrue("november" in obj1);
+assertEquals(undefined, gop(obj1, "november").get);
+assertEquals(undefined, obj1.__lookupGetter__("november"));
+assertEquals(undefined, gop(obj1, "november").set);
+assertEquals(undefined, obj1.__lookupSetter__("november"));
+
+// Redefining a data property.
+obj1 = {};
+obj1.oscar = 12345;
+dp(obj1, "oscar", { set: setter });
+assertEquals(setter, gop(obj1, "oscar").set);
+
+// Re-adding the same getter/attributes pair.
+obj1 = {};
+dp(obj1, "papa", { get: getter, configurable: true });
+dp(obj1, "papa", { get: getter, set: setter, configurable: true });
+assertEquals(getter, gop(obj1, "papa").get);
+assertEquals(setter, gop(obj1, "papa").set);
+assertTrue(gop(obj1, "papa").configurable);
+assertFalse(gop(obj1, "papa").enumerable);
diff --git a/deps/v8/test/mjsunit/array-bounds-check-removal.js b/deps/v8/test/mjsunit/array-bounds-check-removal.js
new file mode 100644
index 0000000000..81064aa237
--- /dev/null
+++ b/deps/v8/test/mjsunit/array-bounds-check-removal.js
@@ -0,0 +1,145 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+var a = new Int32Array(1024);
+
+function test_base(base,cond) {
+ a[base + 1] = 1;
+ a[base + 4] = 2;
+ a[base + 3] = 3;
+ a[base + 2] = 4;
+ a[base + 4] = base + 4;
+ if (cond) {
+ a[base + 1] = 1;
+ a[base + 2] = 2;
+ a[base + 2] = 3;
+ a[base + 2] = 4;
+ a[base + 4] = base + 4;
+ } else {
+ a[base + 6] = 1;
+ a[base + 4] = 2;
+ a[base + 3] = 3;
+ a[base + 2] = 4;
+ a[base + 4] = base - 4;
+ }
+}
+
+function check_test_base(base,cond) {
+ if (cond) {
+ assertEquals(1, a[base + 1]);
+ assertEquals(4, a[base + 2]);
+ assertEquals(base + 4, a[base + 4]);
+ } else {
+ assertEquals(1, a[base + 6]);
+ assertEquals(3, a[base + 3]);
+ assertEquals(4, a[base + 2]);
+ assertEquals(base - 4, a[base + 4]);
+ }
+}
+
+
+function test_minus(base,cond) {
+ a[base - 1] = 1;
+ a[base - 2] = 2;
+ a[base + 4] = 3;
+ a[base] = 4;
+ a[base + 4] = base + 4;
+ if (cond) {
+ a[base - 4] = 1;
+ a[base + 5] = 2;
+ a[base + 3] = 3;
+ a[base + 2] = 4;
+ a[base + 4] = base + 4;
+ } else {
+ a[base + 6] = 1;
+ a[base + 4] = 2;
+ a[base + 3] = 3;
+ a[base + 2] = 4;
+ a[base + 4] = base - 4;
+ }
+}
+
+function check_test_minus(base,cond) {
+ if (cond) {
+ assertEquals(2, a[base + 5]);
+ assertEquals(3, a[base + 3]);
+ assertEquals(4, a[base + 2]);
+ assertEquals(base + 4, a[base + 4]);
+ } else {
+ assertEquals(1, a[base + 6]);
+ assertEquals(3, a[base + 3]);
+ assertEquals(4, a[base + 2]);
+ assertEquals(base - 4, a[base + 4]);
+ }
+}
+
+test_base(1,true);
+test_base(2,true);
+test_base(1,false);
+test_base(2,false);
+%OptimizeFunctionOnNextCall(test_base);
+test_base(3,true);
+check_test_base(3,true);
+test_base(3,false);
+check_test_base(3,false);
+
+test_minus(5,true);
+test_minus(6,true);
+%OptimizeFunctionOnNextCall(test_minus);
+test_minus(7,true);
+check_test_minus(7,true);
+test_minus(7,false);
+check_test_minus(7,false);
+
+// Optimization status:
+// YES: 1
+// NO: 2
+// ALWAYS: 3
+// NEVER: 4
+
+if (false) {
+test_base(5,true);
+test_base(6,true);
+test_base(5,false);
+test_base(6,false);
+%OptimizeFunctionOnNextCall(test_base);
+test_base(-2,true);
+assertTrue(%GetOptimizationStatus(test_base) != 1);
+
+test_base(5,true);
+test_base(6,true);
+test_base(5,false);
+test_base(6,false);
+%OptimizeFunctionOnNextCall(test_base);
+test_base(2048,true);
+assertTrue(%GetOptimizationStatus(test_base) != 1);
+}
+
+gc();
+
diff --git a/deps/v8/test/mjsunit/array-construct-transition.js b/deps/v8/test/mjsunit/array-construct-transition.js
index 577e321a55..f8d7c830e5 100644
--- a/deps/v8/test/mjsunit/array-construct-transition.js
+++ b/deps/v8/test/mjsunit/array-construct-transition.js
@@ -27,13 +27,13 @@
// Flags: --allow-natives-syntax --smi-only-arrays
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6));
if (support_smi_only_arrays) {
var a = new Array(0, 1, 2);
- assertTrue(%HasFastSmiOnlyElements(a));
+ assertTrue(%HasFastSmiElements(a));
var b = new Array(0.5, 1.2, 2.3);
assertTrue(%HasFastDoubleElements(b));
var c = new Array(0.5, 1.2, new Object());
- assertTrue(%HasFastElements(c));
+ assertTrue(%HasFastObjectElements(c));
}
diff --git a/deps/v8/test/mjsunit/array-literal-transitions.js b/deps/v8/test/mjsunit/array-literal-transitions.js
index f657525eb6..a96719d448 100644
--- a/deps/v8/test/mjsunit/array-literal-transitions.js
+++ b/deps/v8/test/mjsunit/array-literal-transitions.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,7 +33,7 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements([1,2,3,4,5,6,7,8,9,10]);
+support_smi_only_arrays = %HasFastSmiElements([1,2,3,4,5,6,7,8,9,10]);
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -46,14 +46,14 @@ function get(foo) { return foo; } // Used to generate dynamic values.
function array_literal_test() {
var a0 = [1, 2, 3];
- assertTrue(%HasFastSmiOnlyElements(a0));
+ assertTrue(%HasFastSmiElements(a0));
var a1 = [get(1), get(2), get(3)];
- assertTrue(%HasFastSmiOnlyElements(a1));
+ assertTrue(%HasFastSmiElements(a1));
var b0 = [1, 2, get("three")];
- assertTrue(%HasFastElements(b0));
+ assertTrue(%HasFastObjectElements(b0));
var b1 = [get(1), get(2), get("three")];
- assertTrue(%HasFastElements(b1));
+ assertTrue(%HasFastObjectElements(b1));
var c0 = [1, 2, get(3.5)];
assertTrue(%HasFastDoubleElements(c0));
@@ -75,7 +75,7 @@ function array_literal_test() {
var object = new Object();
var d0 = [1, 2, object];
- assertTrue(%HasFastElements(d0));
+ assertTrue(%HasFastObjectElements(d0));
assertEquals(object, d0[2]);
assertEquals(2, d0[1]);
assertEquals(1, d0[0]);
@@ -87,7 +87,7 @@ function array_literal_test() {
assertEquals(1, e0[0]);
var f0 = [1, 2, [1, 2]];
- assertTrue(%HasFastElements(f0));
+ assertTrue(%HasFastObjectElements(f0));
assertEquals([1,2], f0[2]);
assertEquals(2, f0[1]);
assertEquals(1, f0[0]);
@@ -115,9 +115,9 @@ if (support_smi_only_arrays) {
large =
[ 0, 1, 2, 3, 4, 5, d(), d(), d(), d(), d(), d(), o(), o(), o(), o() ];
assertFalse(%HasDictionaryElements(large));
- assertFalse(%HasFastSmiOnlyElements(large));
+ assertFalse(%HasFastSmiElements(large));
assertFalse(%HasFastDoubleElements(large));
- assertTrue(%HasFastElements(large));
+ assertTrue(%HasFastObjectElements(large));
assertEquals(large,
[0, 1, 2, 3, 4, 5, 2.5, 2.5, 2.5, 2.5, 2.5, 2.5,
new Object(), new Object(), new Object(), new Object()]);
diff --git a/deps/v8/test/mjsunit/compiler/alloc-object-huge.js b/deps/v8/test/mjsunit/compiler/alloc-object-huge.js
index d6d9f1b721..0b202f7580 100644
--- a/deps/v8/test/mjsunit/compiler/alloc-object-huge.js
+++ b/deps/v8/test/mjsunit/compiler/alloc-object-huge.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --inline-construct --nolimit-inlining
+// Flags: --allow-natives-syntax --inline-construct --max-inlined-source-size=999999 --max-inlined-nodes=999999 --max-inlined-nodes-cumulative=999999
// Test that huge constructors (more than 256 this assignments) are
// handled correctly.
diff --git a/deps/v8/test/mjsunit/compiler/inline-arguments.js b/deps/v8/test/mjsunit/compiler/inline-arguments.js
index b6adf7f6cc..f8a247608b 100644
--- a/deps/v8/test/mjsunit/compiler/inline-arguments.js
+++ b/deps/v8/test/mjsunit/compiler/inline-arguments.js
@@ -113,3 +113,70 @@ F4(1);
%OptimizeFunctionOnNextCall(test_adaptation);
test_adaptation();
})();
+
+// Test arguments access from the inlined function.
+function uninlinable(v) {
+ assertEquals(0, v);
+ try { } catch (e) { }
+ return 0;
+}
+
+function toarr_inner() {
+ var a = arguments;
+ var marker = a[0];
+ uninlinable(uninlinable(0, 0), marker.x);
+
+ var r = new Array();
+ for (var i = a.length - 1; i >= 1; i--) {
+ r.push(a[i]);
+ }
+
+ return r;
+}
+
+function toarr1(marker, a, b, c) {
+ return toarr_inner(marker, a / 2, b / 2, c / 2);
+}
+
+function toarr2(marker, a, b, c) {
+ var x = 0;
+ return uninlinable(uninlinable(0, 0),
+ x = toarr_inner(marker, a / 2, b / 2, c / 2)), x;
+}
+
+function test_toarr(toarr) {
+ var marker = { x: 0 };
+ assertArrayEquals([3, 2, 1], toarr(marker, 2, 4, 6));
+ assertArrayEquals([3, 2, 1], toarr(marker, 2, 4, 6));
+ %OptimizeFunctionOnNextCall(toarr);
+ assertArrayEquals([3, 2, 1], toarr(marker, 2, 4, 6));
+ delete marker.x;
+ assertArrayEquals([3, 2, 1], toarr(marker, 2, 4, 6));
+}
+
+test_toarr(toarr1);
+test_toarr(toarr2);
+
+// Test that arguments access from inlined function uses correct values.
+(function () {
+ function inner(x, y) {
+ "use strict";
+ x = 10;
+ y = 20;
+ for (var i = 0; i < 1; i++) {
+ for (var j = 1; j <= arguments.length; j++) {
+ return arguments[arguments.length - j];
+ }
+ }
+ }
+
+ function outer(x, y) {
+ return inner(x, y);
+ }
+
+ assertEquals(2, outer(1, 2));
+ assertEquals(2, outer(1, 2));
+ assertEquals(2, outer(1, 2));
+ %OptimizeFunctionOnNextCall(outer);
+ assertEquals(2, outer(1, 2));
+})();
diff --git a/deps/v8/test/mjsunit/compiler/inline-construct.js b/deps/v8/test/mjsunit/compiler/inline-construct.js
index af9e69c940..7a3f1e44bd 100644
--- a/deps/v8/test/mjsunit/compiler/inline-construct.js
+++ b/deps/v8/test/mjsunit/compiler/inline-construct.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax --expose-gc --inline-construct
+// Flags: --allow-natives-syntax --inline-construct
// Test inlining of constructor calls.
@@ -68,7 +68,9 @@ function TestInAllContexts(constructor) {
%DeoptimizeFunction(value_context);
%DeoptimizeFunction(test_context);
%DeoptimizeFunction(effect_context);
- gc(); // Makes V8 forget about type information for *_context.
+ %ClearFunctionTypeFeedback(value_context);
+ %ClearFunctionTypeFeedback(test_context);
+ %ClearFunctionTypeFeedback(effect_context);
}
diff --git a/deps/v8/test/mjsunit/compiler/literals.js b/deps/v8/test/mjsunit/compiler/literals.js
index e910bb3c6a..8607cd9595 100644
--- a/deps/v8/test/mjsunit/compiler/literals.js
+++ b/deps/v8/test/mjsunit/compiler/literals.js
@@ -36,38 +36,38 @@ assertEquals(8, eval("6;'abc';8"));
// Characters just outside the ranges of hex-escapes.
// "/" comes just before "0".
-assertEquals("x1/", "\x1/");
-assertEquals("u111/", "\u111/");
+assertThrows('"\\x1/"');
+assertThrows('"\\u111/"');
assertEquals("\\x1/", RegExp("\\x1/").source);
assertEquals("\\u111/", RegExp("\\u111/").source);
// ":" comes just after "9".
-assertEquals("x1:", "\x1:");
-assertEquals("u111:", "\u111:");
+assertThrows('"\\x1:"');
+assertThrows('"\\u111:"');
assertEquals("\\x1:", /\x1:/.source);
assertEquals("\\u111:", /\u111:/.source);
// "`" comes just before "a".
-assertEquals("x1`", "\x1`");
-assertEquals("u111`", "\u111`");
+assertThrows('"\\x1`"');
+assertThrows('"\\u111`"');
assertEquals("\\x1`", /\x1`/.source);
assertEquals("\\u111`", /\u111`/.source);
// "g" comes just before "f".
-assertEquals("x1g", "\x1g");
-assertEquals("u111g", "\u111g");
+assertThrows('"\\x1g"');
+assertThrows('"\\u111g"');
assertEquals("\\x1g", /\x1g/.source);
assertEquals("\\u111g", /\u111g/.source);
// "@" comes just before "A".
-assertEquals("x1@", "\x1@");
-assertEquals("u111@", "\u111@");
+assertThrows('"\\x1@"');
+assertThrows('"\\u111@"');
assertEquals("\\x1@", /\x1@/.source);
assertEquals("\\u111@", /\u111@/.source);
// "G" comes just after "F".
-assertEquals("x1G", "\x1G");
-assertEquals("u111G", "\u111G");
+assertThrows('"\\x1G"');
+assertThrows('"\\u111G"');
assertEquals("\\x1G", /\x1G/.source);
assertEquals("\\u111G", /\u111G/.source);
diff --git a/deps/v8/test/mjsunit/compiler/optimize-bitnot.js b/deps/v8/test/mjsunit/compiler/optimize-bitnot.js
new file mode 100644
index 0000000000..28315a4fe2
--- /dev/null
+++ b/deps/v8/test/mjsunit/compiler/optimize-bitnot.js
@@ -0,0 +1,42 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function f(x) {
+ return ~~x;
+}
+
+f(42);
+f(42);
+%OptimizeFunctionOnNextCall(f);
+assertEquals(42, f(42));
+assertEquals(42, f(42.5));
+assertEquals(1/0, 1/f(-0));
+assertEquals(-1, f(0xffffffff));
+assertEquals(0, f(undefined));
+assertEquals(0, f("abc"));
diff --git a/deps/v8/test/mjsunit/date-parse.js b/deps/v8/test/mjsunit/date-parse.js
index b46e39ab61..cb4a951c7a 100644
--- a/deps/v8/test/mjsunit/date-parse.js
+++ b/deps/v8/test/mjsunit/date-parse.js
@@ -287,6 +287,9 @@ for (var i = 0; i < 24 * 365 * 100; i += 150) {
var testCasesNegative = [
'May 25 2008 1:30 (PM)) UTC', // Bad unmatched ')' after number.
'May 25 2008 1:30( )AM (PM)', //
+ 'a1', // Issue 126448, 53209.
+ 'nasfdjklsfjoaifg1',
+ 'x_2',
'May 25 2008 AAA (GMT)']; // Unknown word after number.
testCasesNegative.forEach(function (s) {
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
index cf25c0c095..efbb2cc8ca 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -148,20 +148,9 @@ function listener(event, exec_state, event_data, data) {
assertFalse(frame.isConstructCall());
}
- // When function f is optimized (1 means YES, see runtime.cc) we
- // expect an optimized frame for f with g1, g2 and g3 inlined.
- if (%GetOptimizationStatus(f) == 1) {
- if (i == 1 || i == 2 || i == 3) {
- assertTrue(frame.isOptimizedFrame());
- assertTrue(frame.isInlinedFrame());
- assertEquals(4 - i, frame.inlinedFrameIndex());
- } else if (i == 4) {
- assertTrue(frame.isOptimizedFrame());
- assertFalse(frame.isInlinedFrame());
- } else {
- assertFalse(frame.isOptimizedFrame());
- assertFalse(frame.isInlinedFrame());
- }
+ if (i > 4) {
+ assertFalse(frame.isOptimizedFrame());
+ assertFalse(frame.isInlinedFrame());
}
}
diff --git a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
index c88a683a8c..9c56a12be2 100644
--- a/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/deps/v8/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -138,20 +138,9 @@ function listener(event, exec_state, event_data, data) {
assertFalse(frame.isConstructCall());
}
- // When function f is optimized (1 means YES, see runtime.cc) we
- // expect an optimized frame for f with g1, g2 and g3 inlined.
- if (%GetOptimizationStatus(f) == 1) {
- if (i == 1 || i == 2 || i == 3) {
- assertTrue(frame.isOptimizedFrame());
- assertTrue(frame.isInlinedFrame());
- assertEquals(4 - i, frame.inlinedFrameIndex());
- } else if (i == 4) {
- assertTrue(frame.isOptimizedFrame());
- assertFalse(frame.isInlinedFrame());
- } else {
- assertFalse(frame.isOptimizedFrame());
- assertFalse(frame.isInlinedFrame());
- }
+ if (i > 4) {
+ assertFalse(frame.isOptimizedFrame());
+ assertFalse(frame.isInlinedFrame());
}
}
diff --git a/deps/v8/test/mjsunit/debug-function-scopes.js b/deps/v8/test/mjsunit/debug-function-scopes.js
new file mode 100644
index 0000000000..4262b950da
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-function-scopes.js
@@ -0,0 +1,162 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Get the Debug object exposed from the debug context global object.
+var Debug = debug.Debug;
+
+function CheckScope(scope_mirror, scope_expectations, expected_scope_type) {
+ assertEquals(expected_scope_type, scope_mirror.scopeType());
+
+ var scope_object = scope_mirror.scopeObject().value();
+
+ for (var name in scope_expectations) {
+ var actual = scope_object[name];
+ var expected = scope_expectations[name];
+ assertEquals(expected, actual);
+ }
+}
+
+// A copy of the scope types from mirror-debugger.js.
+var ScopeType = { Global: 0,
+ Local: 1,
+ With: 2,
+ Closure: 3,
+ Catch: 4,
+ Block: 5 };
+
+var f1 = (function F1(x) {
+ function F2(y) {
+ var z = x + y;
+ with ({w: 5, v: "Capybara"}) {
+ var F3 = function(a, b) {
+ function F4(p) {
+ return p + a + b + z + w + v.length;
+ }
+ return F4;
+ }
+ return F3(4, 5);
+ }
+ }
+ return F2(17);
+})(5);
+
+var mirror = Debug.MakeMirror(f1);
+
+assertEquals(5, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), { a: 4, b: 5 }, ScopeType.Closure);
+CheckScope(mirror.scope(1), { w: 5, v: "Capybara" }, ScopeType.With);
+CheckScope(mirror.scope(2), { y: 17, z: 22 }, ScopeType.Closure);
+CheckScope(mirror.scope(3), { x: 5 }, ScopeType.Closure);
+CheckScope(mirror.scope(4), {}, ScopeType.Global);
+
+var f2 = function() { return 5; }
+
+var mirror = Debug.MakeMirror(f2);
+
+assertEquals(1, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), {}, ScopeType.Global);
+
+var f3 = (function F1(invisible_parameter) {
+ var invisible1 = 1;
+ var visible1 = 10;
+ return (function F2() {
+ var invisible2 = 2;
+ return (function F3() {
+ var visible2 = 20;
+ var invisible2 = 3;
+ return (function () {return visible1 + visible2 + visible1a;});
+ })();
+ })();
+})(5);
+
+var mirror = Debug.MakeMirror(f3);
+
+assertEquals(3, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), { visible2: 20 }, ScopeType.Closure);
+CheckScope(mirror.scope(1), { visible1: 10 }, ScopeType.Closure);
+CheckScope(mirror.scope(2), {}, ScopeType.Global);
+
+
+var f4 = (function One() {
+ try {
+ throw "I'm error 1";
+ } catch (e1) {
+ try {
+ throw "I'm error 2";
+ } catch (e2) {
+ return function GetError() {
+ return e1 + e2;
+ };
+ }
+ }
+})();
+
+var mirror = Debug.MakeMirror(f4);
+
+assertEquals(3, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), { e2: "I'm error 2" }, ScopeType.Catch);
+CheckScope(mirror.scope(1), { e1: "I'm error 1" }, ScopeType.Catch);
+CheckScope(mirror.scope(2), {}, ScopeType.Global);
+
+
+var f5 = (function Raz(p1, p2) {
+ var p3 = p1 + p2;
+ return (function() {
+ var p4 = 20;
+ var p5 = 21;
+ var p6 = 22;
+ return eval("(function(p7){return p1 + p4 + p6 + p7})");
+ })();
+})(1,2);
+
+var mirror = Debug.MakeMirror(f5);
+
+assertEquals(3, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), { p4: 20, p6: 22 }, ScopeType.Closure);
+CheckScope(mirror.scope(1), { p1: 1 }, ScopeType.Closure);
+CheckScope(mirror.scope(2), {}, ScopeType.Global);
+
+
+function CheckNoScopeVisible(f) {
+ var mirror = Debug.MakeMirror(f);
+ assertEquals(0, mirror.scopeCount());
+}
+
+CheckNoScopeVisible(Number);
+
+CheckNoScopeVisible(Function.toString);
+
+// This getter is known to be implemented as closure.
+CheckNoScopeVisible(new Error().__lookupGetter__("stack"));
+
diff --git a/deps/v8/test/mjsunit/debug-liveedit-stack-padding.js b/deps/v8/test/mjsunit/debug-liveedit-stack-padding.js
new file mode 100644
index 0000000000..36de356973
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-liveedit-stack-padding.js
@@ -0,0 +1,88 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+
+Debug = debug.Debug;
+
+SlimFunction = eval(
+ "(function() {\n " +
+ " return 'Cat';\n" +
+ "})\n"
+);
+
+var script = Debug.findScript(SlimFunction);
+
+Debug.setScriptBreakPointById(script.id, 1, 0);
+
+var orig_animal = "'Cat'";
+var patch_pos = script.source.indexOf(orig_animal);
+var new_animal_patch = "'Capybara'";
+
+debugger_handler = (function() {
+ var already_called = false;
+ return function() {
+ if (already_called) {
+ return;
+ }
+ already_called = true;
+
+ var change_log = new Array();
+ try {
+ Debug.LiveEdit.TestApi.ApplySingleChunkPatch(script, patch_pos,
+ orig_animal.length, new_animal_patch, change_log);
+ } finally {
+ print("Change log: " + JSON.stringify(change_log) + "\n");
+ }
+ };
+})();
+
+var saved_exception = null;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ try {
+ debugger_handler();
+ } catch (e) {
+ saved_exception = e;
+ }
+ } else {
+ print("Other: " + event);
+ }
+}
+
+Debug.setListener(listener);
+
+var animal = SlimFunction();
+
+if (saved_exception) {
+ print("Exception: " + saved_exception);
+ assertUnreachable();
+}
+
+assertEquals("Capybara", animal);
diff --git a/deps/v8/test/mjsunit/debug-scripts-request.js b/deps/v8/test/mjsunit/debug-scripts-request.js
index faa732e141..e027563b9b 100644
--- a/deps/v8/test/mjsunit/debug-scripts-request.js
+++ b/deps/v8/test/mjsunit/debug-scripts-request.js
@@ -78,8 +78,10 @@ function listener(event, exec_state, event_data, data) {
var response = safeEval(dcp.processDebugJSONRequest(request));
assertTrue(response.success);
- // Test filtering by id.
- assertEquals(2, response.body.length);
+ // Test filtering by id. We have to get at least one script back, but
+ // the exact number depends on the timing of GC.
+ assertTrue(response.body.length >= 1);
+
var script = response.body[0];
var request = '{' + base_request + ',"arguments":{"ids":[' +
script.id + ']}}';
diff --git a/deps/v8/test/mjsunit/debug-stepin-builtin-callback.js b/deps/v8/test/mjsunit/debug-stepin-builtin-callback.js
new file mode 100644
index 0000000000..223159d4f5
--- /dev/null
+++ b/deps/v8/test/mjsunit/debug-stepin-builtin-callback.js
@@ -0,0 +1,157 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test stepping into callbacks passed to builtin functions.
+
+Debug = debug.Debug
+
+var exception = false;
+
+function array_listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ if (breaks == 0) {
+ exec_state.prepareStep(Debug.StepAction.StepIn, 2);
+ breaks = 1;
+ } else if (breaks <= 3) {
+ breaks++;
+ // Check whether we break at the expected line.
+ print(event_data.sourceLineText());
+ assertTrue(event_data.sourceLineText().indexOf("Expected to step") > 0);
+ exec_state.prepareStep(Debug.StepAction.StepIn, 3);
+ }
+ }
+ } catch (e) {
+ exception = true;
+ }
+};
+
+function cb_false(num) {
+ print("element " + num); // Expected to step to this point.
+ return false;
+}
+
+function cb_true(num) {
+ print("element " + num); // Expected to step to this point.
+ return true;
+}
+
+function cb_reduce(a, b) {
+ print("elements " + a + " and " + b); // Expected to step to this point.
+ return a + b;
+}
+
+var a = [1, 2, 3, 4];
+
+Debug.setListener(array_listener);
+
+var breaks = 0;
+debugger;
+a.forEach(cb_true);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+breaks = 0;
+debugger;
+a.some(cb_false);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+breaks = 0;
+debugger;
+a.every(cb_true);
+assertEquals(4, breaks);
+assertFalse(exception);
+
+breaks = 0;
+debugger;
+a.map(cb_true);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+breaks = 0;
+debugger;
+a.filter(cb_true);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+breaks = 0;
+debugger;
+a.reduce(cb_reduce);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+breaks = 0;
+debugger;
+a.reduceRight(cb_reduce);
+assertFalse(exception);
+assertEquals(4, breaks);
+
+Debug.setListener(null);
+
+
+// Test two levels of builtin callbacks:
+// Array.forEach calls a callback function, which by itself uses
+// Array.forEach with another callback function.
+
+function second_level_listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ if (breaks == 0) {
+ exec_state.prepareStep(Debug.StepAction.StepIn, 3);
+ breaks = 1;
+ } else if (breaks <= 16) {
+ breaks++;
+ // Check whether we break at the expected line.
+ assertTrue(event_data.sourceLineText().indexOf("Expected to step") > 0);
+ // Step two steps further every four breaks to skip the
+ // forEach call in the first level of recurision.
+ var step = (breaks % 4 == 1) ? 6 : 3;
+ exec_state.prepareStep(Debug.StepAction.StepIn, step);
+ }
+ }
+ } catch (e) {
+ exception = true;
+ }
+};
+
+function cb_foreach(num) {
+ a.forEach(cb_true);
+ print("back to the first level of recursion.");
+}
+
+Debug.setListener(second_level_listener);
+
+breaks = 0;
+debugger;
+a.forEach(cb_foreach);
+assertFalse(exception);
+assertEquals(17, breaks);
+
+Debug.setListener(null);
diff --git a/deps/v8/test/mjsunit/declare-locally.js b/deps/v8/test/mjsunit/declare-locally.js
index 93fcb85f3c..20bfe6da1f 100644
--- a/deps/v8/test/mjsunit/declare-locally.js
+++ b/deps/v8/test/mjsunit/declare-locally.js
@@ -33,11 +33,13 @@
// This exercises the code in runtime.cc in
// DeclareGlobal...Locally().
+// Flags: --es52_globals
+
this.__proto__.foo = 42;
this.__proto__.bar = 87;
-eval("assertEquals(42, foo); var foo = 87;");
+eval("assertEquals(undefined, foo); var foo = 87;");
assertEquals(87, foo);
-eval("assertEquals(87, bar); const bar = 42;");
+eval("assertEquals(undefined, bar); const bar = 42;");
assertEquals(42, bar);
diff --git a/deps/v8/test/mjsunit/elements-kind.js b/deps/v8/test/mjsunit/elements-kind.js
index 4aa79de659..508a6b3cee 100644
--- a/deps/v8/test/mjsunit/elements-kind.js
+++ b/deps/v8/test/mjsunit/elements-kind.js
@@ -34,7 +34,7 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -59,8 +59,8 @@ var elements_kind = {
}
function getKind(obj) {
- if (%HasFastSmiOnlyElements(obj)) return elements_kind.fast_smi_only;
- if (%HasFastElements(obj)) return elements_kind.fast;
+ if (%HasFastSmiElements(obj)) return elements_kind.fast_smi_only;
+ if (%HasFastObjectElements(obj)) return elements_kind.fast;
if (%HasFastDoubleElements(obj)) return elements_kind.fast_double;
if (%HasDictionaryElements(obj)) return elements_kind.dictionary;
// Every external kind is also an external array.
@@ -116,7 +116,7 @@ if (support_smi_only_arrays) {
assertKind(elements_kind.fast_smi_only, too);
}
-// Make sure the element kind transitions from smionly when a non-smi is stored.
+// Make sure the element kind transitions from smi when a non-smi is stored.
var you = new Array();
assertKind(elements_kind.fast_smi_only, you);
for (var i = 0; i < 1337; i++) {
@@ -224,9 +224,11 @@ if (support_smi_only_arrays) {
for (var i = 0; i < 3; i++) {
convert_mixed(doubles, "three", elements_kind.fast);
}
+ convert_mixed(construct_smis(), "three", elements_kind.fast);
+ convert_mixed(construct_doubles(), "three", elements_kind.fast);
+ %OptimizeFunctionOnNextCall(convert_mixed);
smis = construct_smis();
doubles = construct_doubles();
- %OptimizeFunctionOnNextCall(convert_mixed);
convert_mixed(smis, 1, elements_kind.fast);
convert_mixed(doubles, 1, elements_kind.fast);
assertTrue(%HaveSameMap(smis, doubles));
diff --git a/deps/v8/test/mjsunit/elements-transition-hoisting.js b/deps/v8/test/mjsunit/elements-transition-hoisting.js
index 5e78f10a0b..9ffb67ecf0 100644
--- a/deps/v8/test/mjsunit/elements-transition-hoisting.js
+++ b/deps/v8/test/mjsunit/elements-transition-hoisting.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,7 +31,7 @@
// not hoisted) correctly, don't change the semantics programs and don't trigger
// deopt through hoisting in important situations.
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -58,6 +58,9 @@ if (support_smi_only_arrays) {
}
testDoubleConversion4(new Array(5));
+ testDoubleConversion4(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testDoubleConversion4);
testDoubleConversion4(new Array(5));
testDoubleConversion4(new Array(5));
@@ -73,13 +76,16 @@ if (support_smi_only_arrays) {
a[1] = 1;
var count = 3;
do {
- a.foo = object; // This map check should be hoistable
+ a.foo = object; // This map check should be hoistable
a[1] = object;
result = a.foo == object && a[1] == object;
} while (--count > 0);
}
testExactMapHoisting(new Array(5));
+ testExactMapHoisting(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting);
testExactMapHoisting(new Array(5));
testExactMapHoisting(new Array(5));
@@ -98,19 +104,23 @@ if (support_smi_only_arrays) {
if (a.bar === undefined) {
a[1] = 2.5;
}
- a.foo = object; // This map check should NOT be hoistable because it
- // includes a check for the FAST_ELEMENTS map as well as
- // the FAST_DOUBLE_ELEMENTS map, which depends on the
- // double transition above in the if, which cannot be
- // hoisted.
+ a.foo = object; // This map check should NOT be hoistable because it
+ // includes a check for the FAST_ELEMENTS map as well as
+ // the FAST_DOUBLE_ELEMENTS map, which depends on the
+ // double transition above in the if, which cannot be
+ // hoisted.
} while (--count > 0);
}
testExactMapHoisting2(new Array(5));
+ testExactMapHoisting2(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting2);
testExactMapHoisting2(new Array(5));
testExactMapHoisting2(new Array(5));
- assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
+ // Temporarily disabled - see bug 2176.
+ // assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
// Make sure that non-element related map checks do get hoisted if they use
// the transitioned map for the check and all transitions that they depend
@@ -123,15 +133,18 @@ if (support_smi_only_arrays) {
var count = 3;
do {
a[1] = 2.5;
- a.foo = object; // This map check should be hoistable because all elements
- // transitions in the loop can also be hoisted.
+ a.foo = object; // This map check should be hoistable because all elements
+ // transitions in the loop can also be hoisted.
} while (--count > 0);
}
var add_transition = new Array(5);
add_transition.foo = 0;
- add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
+ add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
testExactMapHoisting3(new Array(5));
+ testExactMapHoisting3(new Array(5)); // Call twice to make sure that second
+ // store is a transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testExactMapHoisting3);
testExactMapHoisting3(new Array(5));
testExactMapHoisting3(new Array(5));
@@ -150,6 +163,10 @@ if (support_smi_only_arrays) {
}
testDominatingTransitionHoisting1(new Array(5));
+ testDominatingTransitionHoisting1(new Array(5)); // Call twice to make sure
+ // that second store is a
+ // transition and not
+ // optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testDominatingTransitionHoisting1);
testDominatingTransitionHoisting1(new Array(5));
testDominatingTransitionHoisting1(new Array(5));
@@ -166,6 +183,9 @@ if (support_smi_only_arrays) {
}
testHoistingWithSideEffect(new Array(5));
+ testHoistingWithSideEffect(new Array(5)); // Call twice to make sure that
+ // second store is a transition and
+ // not optimistically MONOMORPHIC
%OptimizeFunctionOnNextCall(testHoistingWithSideEffect);
testHoistingWithSideEffect(new Array(5));
testHoistingWithSideEffect(new Array(5));
@@ -179,7 +199,7 @@ if (support_smi_only_arrays) {
a[1] = c;
a[2] = d;
assertTrue(true);
- a[3] = e; // TransitionElementsKind should be eliminated despite call.
+ a[3] = e; // TransitionElementsKind should be eliminated despite call.
a[4] = f;
} while (--count > 3);
}
diff --git a/deps/v8/test/mjsunit/elements-transition.js b/deps/v8/test/mjsunit/elements-transition.js
index 60e051b3fa..0dffd3723e 100644
--- a/deps/v8/test/mjsunit/elements-transition.js
+++ b/deps/v8/test/mjsunit/elements-transition.js
@@ -27,7 +27,7 @@
// Flags: --allow-natives-syntax --smi-only-arrays
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+support_smi_only_arrays = %HasFastSmiElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -44,8 +44,8 @@ if (support_smi_only_arrays) {
var array_1 = new Array(length);
var array_2 = new Array(length);
- assertTrue(%HasFastSmiOnlyElements(array_1));
- assertTrue(%HasFastSmiOnlyElements(array_2));
+ assertTrue(%HasFastSmiElements(array_1));
+ assertTrue(%HasFastSmiElements(array_2));
for (var i = 0; i < length; i++) {
if (i == length - 5 && test_double) {
// Trigger conversion to fast double elements at length-5.
@@ -57,8 +57,8 @@ if (support_smi_only_arrays) {
// Trigger conversion to fast object elements at length-3.
set(array_1, i, 'object');
set(array_2, i, 'object');
- assertTrue(%HasFastElements(array_1));
- assertTrue(%HasFastElements(array_2));
+ assertTrue(%HasFastObjectElements(array_1));
+ assertTrue(%HasFastObjectElements(array_2));
} else if (i != length - 7) {
// Set the element to an integer but leave a hole at length-7.
set(array_1, i, 2*i+1);
diff --git a/deps/v8/test/mjsunit/error-constructors.js b/deps/v8/test/mjsunit/error-constructors.js
index 966a1629d7..107164df56 100644
--- a/deps/v8/test/mjsunit/error-constructors.js
+++ b/deps/v8/test/mjsunit/error-constructors.js
@@ -25,39 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-var e = new Error();
-assertFalse(e.hasOwnProperty('message'));
-Error.prototype.toString = Object.prototype.toString;
-assertEquals("[object Error]", Error.prototype.toString());
-assertEquals(Object.prototype, Error.prototype.__proto__);
-
-// Check that error construction does not call setters for the
-// properties on error objects in prototypes.
-function fail() { assertTrue(false); };
-ReferenceError.prototype.__defineSetter__('stack', fail);
-ReferenceError.prototype.__defineSetter__('message', fail);
-ReferenceError.prototype.__defineSetter__('type', fail);
-ReferenceError.prototype.__defineSetter__('arguments', fail);
-var e0 = new ReferenceError();
-var e1 = new ReferenceError('123');
-assertTrue(e1.hasOwnProperty('message'));
-assertTrue(e0.hasOwnProperty('stack'));
-assertTrue(e1.hasOwnProperty('stack'));
-assertTrue(e0.hasOwnProperty('type'));
-assertTrue(e1.hasOwnProperty('type'));
-assertTrue(e0.hasOwnProperty('arguments'));
-assertTrue(e1.hasOwnProperty('arguments'));
-
-// Check that the name property on error prototypes is read-only and
-// dont-delete. This is not specified, but allowing overwriting the
-// name property with a getter can leaks error objects from different
-// script tags in the same context in a browser setting. We therefore
-// disallow changes to the name property on error objects.
-assertEquals("ReferenceError", ReferenceError.prototype.name);
-delete ReferenceError.prototype.name;
-assertEquals("ReferenceError", ReferenceError.prototype.name);
-ReferenceError.prototype.name = "not a reference error";
-assertEquals("ReferenceError", ReferenceError.prototype.name);
+// Flags: --allow-natives-syntax
// Check that message and name are not enumerable on Error objects.
var desc = Object.getOwnPropertyDescriptor(Error.prototype, 'name');
@@ -75,8 +43,75 @@ assertFalse(desc['enumerable']);
desc = Object.getOwnPropertyDescriptor(e, 'stack');
assertFalse(desc['enumerable']);
+var e = new Error();
+assertFalse(e.hasOwnProperty('message'));
+
// name is not tested above, but in addition we should have no enumerable
// properties, so we simply assert that.
for (var v in e) {
assertUnreachable();
}
+
+// Check that error construction does not call setters for the
+// properties on error objects in prototypes.
+function fail() { assertUnreachable(); };
+ReferenceError.prototype.__defineSetter__('name', fail);
+ReferenceError.prototype.__defineSetter__('message', fail);
+ReferenceError.prototype.__defineSetter__('type', fail);
+ReferenceError.prototype.__defineSetter__('arguments', fail);
+ReferenceError.prototype.__defineSetter__('stack', fail);
+
+var e = new ReferenceError();
+assertTrue(e.hasOwnProperty('stack'));
+assertTrue(e.hasOwnProperty('type'));
+assertTrue(e.hasOwnProperty('arguments'));
+
+var e = new ReferenceError('123');
+assertTrue(e.hasOwnProperty('message'));
+assertTrue(e.hasOwnProperty('stack'));
+assertTrue(e.hasOwnProperty('type'));
+assertTrue(e.hasOwnProperty('arguments'));
+
+var e = %MakeReferenceError("my_test_error", [0, 1]);
+assertTrue(e.hasOwnProperty('stack'));
+assertTrue(e.hasOwnProperty('type'));
+assertTrue(e.hasOwnProperty('arguments'));
+assertEquals("my_test_error", e.type)
+
+// Check that intercepting property access from toString is prevented for
+// compiler errors. This is not specified, but allowing interception
+// through a getter can leak error objects from different
+// script tags in the same context in a browser setting.
+var errors = [SyntaxError, ReferenceError, TypeError];
+for (var i in errors) {
+ var name = errors[i].prototype.toString();
+ // Monkey-patch prototype.
+ var props = ["name", "message", "type", "arguments", "stack"];
+ for (var j in props) {
+ errors[i].prototype.__defineGetter__(props[j], fail);
+ }
+ // String conversion should not invoke monkey-patched getters on prototype.
+ var e = new errors[i];
+ assertEquals(name, e.toString());
+ // Custom getters in actual objects are welcome.
+ e.__defineGetter__("name", function() { return "mine"; });
+ assertEquals("mine", e.toString());
+}
+
+// Monkey-patching non-static errors should still be observable.
+function MyError() {}
+MyError.prototype = new Error;
+var errors = [Error, RangeError, EvalError, URIError, MyError];
+for (var i in errors) {
+ errors[i].prototype.__defineGetter__("name", function() { return "my"; });
+ errors[i].prototype.__defineGetter__("message", function() { return "moo"; });
+ var e = new errors[i];
+ assertEquals("my: moo", e.toString());
+}
+
+
+Error.prototype.toString = Object.prototype.toString;
+assertEquals("[object Error]", Error.prototype.toString());
+assertEquals(Object.prototype, Error.prototype.__proto__);
+var e = new Error("foo");
+assertEquals("[object Error]", e.toString());
diff --git a/deps/v8/test/mjsunit/external-array.js b/deps/v8/test/mjsunit/external-array.js
index 32f78a72d4..d02922006a 100644
--- a/deps/v8/test/mjsunit/external-array.js
+++ b/deps/v8/test/mjsunit/external-array.js
@@ -52,13 +52,53 @@ assertThrows(abfunc1);
// Test derivation from an ArrayBuffer
var ab = new ArrayBuffer(12);
var derived_uint8 = new Uint8Array(ab);
+assertSame(ab, derived_uint8.buffer);
assertEquals(12, derived_uint8.length);
+assertEquals(12, derived_uint8.byteLength);
+assertEquals(0, derived_uint8.byteOffset);
+assertEquals(1, derived_uint8.BYTES_PER_ELEMENT);
+var derived_uint8_2 = new Uint8Array(ab,7);
+assertSame(ab, derived_uint8_2.buffer);
+assertEquals(5, derived_uint8_2.length);
+assertEquals(5, derived_uint8_2.byteLength);
+assertEquals(7, derived_uint8_2.byteOffset);
+assertEquals(1, derived_uint8_2.BYTES_PER_ELEMENT);
+var derived_int16 = new Int16Array(ab);
+assertSame(ab, derived_int16.buffer);
+assertEquals(6, derived_int16.length);
+assertEquals(12, derived_int16.byteLength);
+assertEquals(0, derived_int16.byteOffset);
+assertEquals(2, derived_int16.BYTES_PER_ELEMENT);
+var derived_int16_2 = new Int16Array(ab,6);
+assertSame(ab, derived_int16_2.buffer);
+assertEquals(3, derived_int16_2.length);
+assertEquals(6, derived_int16_2.byteLength);
+assertEquals(6, derived_int16_2.byteOffset);
+assertEquals(2, derived_int16_2.BYTES_PER_ELEMENT);
var derived_uint32 = new Uint32Array(ab);
+assertSame(ab, derived_uint32.buffer);
assertEquals(3, derived_uint32.length);
+assertEquals(12, derived_uint32.byteLength);
+assertEquals(0, derived_uint32.byteOffset);
+assertEquals(4, derived_uint32.BYTES_PER_ELEMENT);
var derived_uint32_2 = new Uint32Array(ab,4);
+assertSame(ab, derived_uint32_2.buffer);
assertEquals(2, derived_uint32_2.length);
+assertEquals(8, derived_uint32_2.byteLength);
+assertEquals(4, derived_uint32_2.byteOffset);
+assertEquals(4, derived_uint32_2.BYTES_PER_ELEMENT);
var derived_uint32_3 = new Uint32Array(ab,4,1);
+assertSame(ab, derived_uint32_3.buffer);
assertEquals(1, derived_uint32_3.length);
+assertEquals(4, derived_uint32_3.byteLength);
+assertEquals(4, derived_uint32_3.byteOffset);
+assertEquals(4, derived_uint32_3.BYTES_PER_ELEMENT);
+var derived_float64 = new Float64Array(ab,0,1);
+assertSame(ab, derived_float64.buffer);
+assertEquals(1, derived_float64.length);
+assertEquals(8, derived_float64.byteLength);
+assertEquals(0, derived_float64.byteOffset);
+assertEquals(8, derived_float64.BYTES_PER_ELEMENT);
// If a given byteOffset and length references an area beyond the end of the
// ArrayBuffer an exception is raised.
@@ -87,6 +127,24 @@ function abfunc6() {
}
assertThrows(abfunc6);
+// Test that an array constructed without an array buffer creates one properly.
+a = new Uint8Array(31);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+a = new Int16Array(5);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+a = new Float64Array(7);
+assertEquals(a.byteLength, a.buffer.byteLength);
+assertEquals(a.length * a.BYTES_PER_ELEMENT, a.buffer.byteLength);
+
+// Test that an implicitly created buffer is a valid buffer.
+a = new Float64Array(7);
+assertSame(a.buffer, (new Uint16Array(a.buffer)).buffer);
+assertSame(a.buffer, (new Float32Array(a.buffer,4)).buffer);
+assertSame(a.buffer, (new Int8Array(a.buffer,3,51)).buffer);
+
// Test the correct behavior of the |BYTES_PER_ELEMENT| property (which is
// "constant", but not read-only).
a = new Int32Array(2);
@@ -351,3 +409,25 @@ assertTrue(isNaN(float64_array[0]));
%OptimizeFunctionOnNextCall(store_float64_undefined);
store_float64_undefined(float64_array);
assertTrue(isNaN(float64_array[0]));
+
+
+// Check handling of 0-sized buffers and arrays.
+
+ab = new ArrayBuffer(0);
+assertEquals(0, ab.byteLength);
+a = new Int8Array(ab);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0])
+ab = new ArrayBuffer(16);
+a = new Float32Array(ab,4,0);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0])
+a = new Uint16Array(0);
+assertEquals(0, a.byteLength);
+assertEquals(0, a.length);
+a[0] = 1;
+assertEquals(undefined, a[0])
diff --git a/deps/v8/test/mjsunit/fast-array-length.js b/deps/v8/test/mjsunit/fast-array-length.js
new file mode 100644
index 0000000000..42f2c38f49
--- /dev/null
+++ b/deps/v8/test/mjsunit/fast-array-length.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// This is a regression test for overlapping key and value registers.
+
+
+var a = [0, 1, 2, 3, 4, 5];
+assertTrue(%HasFastSmiElements(a));
+a.length = (1 << 30);
+assertFalse(%HasFastSmiElements(a));
+
diff --git a/deps/v8/test/mjsunit/fast-non-keyed.js b/deps/v8/test/mjsunit/fast-non-keyed.js
new file mode 100644
index 0000000000..c2f7fc7f96
--- /dev/null
+++ b/deps/v8/test/mjsunit/fast-non-keyed.js
@@ -0,0 +1,113 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Check that keyed stores make things go dict mode faster than non-keyed
+// stores.
+
+function AddProps(obj) {
+ for (var i = 0; i < 26; i++) {
+ obj["x" + i] = 0;
+ }
+}
+
+
+function AddPropsNonKeyed(obj) {
+ obj.x0 = 0;
+ obj.x1 = 0;
+ obj.x2 = 0;
+ obj.x3 = 0;
+ obj.x4 = 0;
+ obj.x5 = 0;
+ obj.x6 = 0;
+ obj.x7 = 0;
+ obj.x8 = 0;
+ obj.x9 = 0;
+ obj.x10 = 0;
+ obj.x11 = 0;
+ obj.x12 = 0;
+ obj.x13 = 0;
+ obj.x14 = 0;
+ obj.x15 = 0;
+ obj.x16 = 0;
+ obj.x17 = 0;
+ obj.x18 = 0;
+ obj.x19 = 0;
+ obj.x20 = 0;
+ obj.x21 = 0;
+ obj.x22 = 0;
+ obj.x23 = 0;
+ obj.x24 = 0;
+ obj.x25 = 0;
+}
+
+function AddProps3(obj) {
+ obj["x0"] = 0;
+ obj["x1"] = 0;
+ obj["x2"] = 0;
+ obj["x3"] = 0;
+ obj["x4"] = 0;
+ obj["x5"] = 0;
+ obj["x6"] = 0;
+ obj["x7"] = 0;
+ obj["x8"] = 0;
+ obj["x9"] = 0;
+ obj["x10"] = 0;
+ obj["x11"] = 0;
+ obj["x12"] = 0;
+ obj["x13"] = 0;
+ obj["x14"] = 0;
+ obj["x15"] = 0;
+ obj["x16"] = 0;
+ obj["x17"] = 0;
+ obj["x18"] = 0;
+ obj["x19"] = 0;
+ obj["x20"] = 0;
+ obj["x21"] = 0;
+ obj["x22"] = 0;
+ obj["x23"] = 0;
+ obj["x24"] = 0;
+ obj["x25"] = 0;
+}
+
+
+var keyed = {};
+AddProps(keyed);
+assertFalse(%HasFastProperties(keyed));
+
+var non_keyed = {};
+AddPropsNonKeyed(non_keyed);
+assertTrue(%HasFastProperties(non_keyed));
+
+var obj3 = {};
+AddProps3(obj3);
+assertTrue(%HasFastProperties(obj3));
+
+var bad_name = {};
+bad_name[".foo"] = 0;
+assertFalse(%HasFastProperties(bad_name));
diff --git a/deps/v8/test/mjsunit/fast-prototype.js b/deps/v8/test/mjsunit/fast-prototype.js
new file mode 100644
index 0000000000..f2fc20228b
--- /dev/null
+++ b/deps/v8/test/mjsunit/fast-prototype.js
@@ -0,0 +1,113 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Check that objects that are used for prototypes are in the fast mode.
+
+function Super() {
+}
+
+
+function Sub() {
+}
+
+
+function AddProps(obj) {
+ for (var i = 0; i < 26; i++) {
+ obj["x" + i] = 0;
+ }
+}
+
+
+function DoProtoMagic(proto, set__proto__) {
+ if (set__proto__) {
+ (new Sub()).__proto__ = proto;
+ } else {
+ Sub.prototype = proto;
+ }
+}
+
+
+function test(use_new, add_first, set__proto__, same_map_as) {
+ var proto = use_new ? new Super() : {};
+
+ // New object is fast.
+ assertTrue(%HasFastProperties(proto));
+
+ if (add_first) {
+ AddProps(proto);
+ // Adding this many properties makes it slow.
+ assertFalse(%HasFastProperties(proto));
+ DoProtoMagic(proto, set__proto__);
+ // Making it a prototype makes it fast again.
+ assertTrue(%HasFastProperties(proto));
+ } else {
+ DoProtoMagic(proto, set__proto__);
+ // Still fast
+ assertTrue(%HasFastProperties(proto));
+ AddProps(proto);
+ // Setting the bit means it is still fast with all these properties.
+ assertTrue(%HasFastProperties(proto));
+ }
+ if (same_map_as && !add_first) {
+ assertTrue(%HaveSameMap(same_map_as, proto));
+ }
+ return proto;
+}
+
+
+for (var i = 0; i < 4; i++) {
+ var set__proto__ = ((i & 1) != 0);
+ var use_new = ((i & 2) != 0);
+
+ test(use_new, true, set__proto__);
+
+ var last = test(use_new, false, set__proto__);
+ test(use_new, false, set__proto__, last);
+}
+
+
+var x = {a: 1, b: 2, c: 3};
+var o = { __proto__: x };
+assertTrue(%HasFastProperties(x));
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
+delete x.b;
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
+assertFalse(%HasFastProperties(x));
+x.d = 4;
+assertFalse(%HasFastProperties(x));
+for (key in x) {
+ assertTrue(key == 'a');
+ break;
+}
diff --git a/deps/v8/test/mjsunit/harmony/debug-function-scopes.js b/deps/v8/test/mjsunit/harmony/debug-function-scopes.js
new file mode 100644
index 0000000000..0113be672b
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/debug-function-scopes.js
@@ -0,0 +1,115 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --harmony-scoping
+
+"use strict";
+
+// Get the Debug object exposed from the debug context global object.
+var Debug = debug.Debug;
+
+function CheckScope(scope_mirror, scope_expectations, expected_scope_type) {
+ assertEquals(expected_scope_type, scope_mirror.scopeType());
+
+ var scope_object = scope_mirror.scopeObject().value();
+
+ for (let name in scope_expectations) {
+ let actual = scope_object[name];
+ let expected = scope_expectations[name];
+ assertEquals(expected, actual);
+ }
+}
+
+// A copy of the scope types from mirror-debugger.js.
+var ScopeType = { Global: 0,
+ Local: 1,
+ With: 2,
+ Closure: 3,
+ Catch: 4,
+ Block: 5 };
+
+var f1 = (function F1(x) {
+ function F2(y) {
+ var z = x + y;
+ {
+ var w = 5;
+ var v = "Capybara";
+ var F3 = function(a, b) {
+ function F4(p) {
+ return p + a + b + z + w + v.length;
+ }
+ return F4;
+ }
+ return F3(4, 5);
+ }
+ }
+ return F2(17);
+})(5);
+
+var mirror = Debug.MakeMirror(f1);
+
+assertEquals(4, mirror.scopeCount());
+
+CheckScope(mirror.scope(0), { a: 4, b: 5 }, ScopeType.Closure);
+CheckScope(mirror.scope(1), { z: 22, w: 5, v: "Capybara" }, ScopeType.Closure);
+CheckScope(mirror.scope(2), { x: 5 }, ScopeType.Closure);
+CheckScope(mirror.scope(3), {}, ScopeType.Global);
+
+var f2 = (function() {
+ var v1 = 3;
+ var v2 = 4;
+ let l0 = 0;
+ {
+ var v3 = 5;
+ let l1 = 6;
+ let l2 = 7;
+ {
+ var v4 = 8;
+ let l3 = 9;
+ {
+ var v5 = "Cat";
+ let l4 = 11;
+ var v6 = l4;
+ return function() {
+ return l0 + v1 + v3 + l2 + l3 + v6;
+ };
+ }
+ }
+ }
+})();
+
+var mirror = Debug.MakeMirror(f2);
+
+assertEquals(5, mirror.scopeCount());
+
+// Implementation artifact: l4 isn't used in closure, but still it is saved.
+CheckScope(mirror.scope(0), { l4: 11 }, ScopeType.Block);
+
+CheckScope(mirror.scope(1), { l3: 9 }, ScopeType.Block);
+CheckScope(mirror.scope(2), { l1: 6, l2: 7 }, ScopeType.Block);
+CheckScope(mirror.scope(3), { v1:3, l0: 0, v3: 5, v6: 11 }, ScopeType.Closure);
+CheckScope(mirror.scope(4), {}, ScopeType.Global);
diff --git a/deps/v8/test/mjsunit/harmony/module-linking.js b/deps/v8/test/mjsunit/harmony/module-linking.js
new file mode 100644
index 0000000000..13ca6f782f
--- /dev/null
+++ b/deps/v8/test/mjsunit/harmony/module-linking.js
@@ -0,0 +1,121 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-modules --harmony-scoping
+
+// Test basic module linking.
+
+"use strict";
+
+let log = "";
+
+export let x = (log += "1");
+
+export module B = A.B
+
+export module A {
+ export let x = (log += "2");
+ let y = (log += "3");
+ export function f() { log += "5" };
+ export module B {
+ module BB = B;
+ export BB, x;
+ let x = (log += "4");
+ f();
+ let y = (log += "6");
+ }
+ export let z = (log += "7");
+ export module C {
+ export let z = (log += "8");
+ export module D = B
+ export module C = A.C
+ }
+ module D {}
+}
+
+export module M1 {
+ export module A2 = M2;
+ export let x = (log += "9");
+}
+export module M2 {
+ export module A1 = M1;
+ export let x = (log += "0");
+}
+
+assertEquals("object", typeof A);
+assertTrue('x' in A);
+assertFalse('y' in A);
+assertTrue('f' in A);
+assertTrue('B' in A);
+assertTrue('z' in A);
+assertTrue('C' in A);
+assertFalse('D' in A);
+
+assertEquals("object", typeof B);
+assertTrue('BB' in B);
+assertTrue('x' in B);
+assertFalse('y' in B);
+
+assertEquals("object", typeof A.B);
+assertTrue('BB' in A.B);
+assertTrue('x' in A.B);
+assertFalse('y' in A.B);
+
+assertEquals("object", typeof A.B.BB);
+assertTrue('BB' in A.B.BB);
+assertTrue('x' in A.B.BB);
+assertFalse('y' in A.B.BB);
+
+assertEquals("object", typeof A.C);
+assertTrue('z' in A.C);
+assertTrue('D' in A.C);
+assertTrue('C' in A.C);
+
+assertEquals("object", typeof M1);
+assertEquals("object", typeof M2);
+assertTrue('A2' in M1);
+assertTrue('A1' in M2);
+assertEquals("object", typeof M1.A2);
+assertEquals("object", typeof M2.A1);
+assertTrue('A1' in M1.A2);
+assertTrue('A2' in M2.A1);
+assertEquals("object", typeof M1.A2.A1);
+assertEquals("object", typeof M2.A1.A2);
+
+assertSame(B, A.B);
+assertSame(B, B.BB);
+assertSame(B, A.C.D);
+assertSame(A.C, A.C.C);
+assertFalse(A.D === A.C.D);
+
+assertSame(M1, M2.A1);
+assertSame(M2, M1.A2);
+assertSame(M1, M1.A2.A1);
+assertSame(M2, M2.A1.A2);
+
+// TODO(rossberg): inner declarations are not executed yet.
+// assertEquals("1234567890", log);
diff --git a/deps/v8/test/mjsunit/harmony/module-parsing.js b/deps/v8/test/mjsunit/harmony/module-parsing.js
index 93e69e3ad9..cdd0a2e00d 100644
--- a/deps/v8/test/mjsunit/harmony/module-parsing.js
+++ b/deps/v8/test/mjsunit/harmony/module-parsing.js
@@ -70,7 +70,7 @@ module B {
import i0 from I
import i1, i2, i3, M from I
- import i4, i5 from "http://where"
+ //import i4, i5 from "http://where"
}
module I {
@@ -85,7 +85,7 @@ module D3 = D2
module E1 at "http://where"
module E2 at "http://where";
-module E3 = E1.F
+module E3 = E1
// Check that ASI does not interfere.
@@ -103,11 +103,11 @@ at
"file://local"
import
-x
+vx
,
-y
+vy
from
-"file://local"
+B
module Wrap {
diff --git a/deps/v8/test/mjsunit/harmony/module-resolution.js b/deps/v8/test/mjsunit/harmony/module-resolution.js
index f9f492cffc..a1b991749c 100644
--- a/deps/v8/test/mjsunit/harmony/module-resolution.js
+++ b/deps/v8/test/mjsunit/harmony/module-resolution.js
@@ -129,7 +129,7 @@ export module M2 {
export module External at "external.js"
export module External1 = External
-export module ExternalA = External.A
+//export module ExternalA = External.A
export module InnerExternal {
export module E at "external.js"
}
diff --git a/deps/v8/test/mjsunit/harmony/proxies.js b/deps/v8/test/mjsunit/harmony/proxies.js
index 8d8f83996e..7170ffd9c7 100644
--- a/deps/v8/test/mjsunit/harmony/proxies.js
+++ b/deps/v8/test/mjsunit/harmony/proxies.js
@@ -572,15 +572,16 @@ TestSetThrow(Proxy.create({
}))
+var rec
var key
var val
-function TestSetForDerived(handler) {
- TestWithProxies(TestSetForDerived2, handler)
+function TestSetForDerived(trap) {
+ TestWithProxies(TestSetForDerived2, trap)
}
-function TestSetForDerived2(create, handler) {
- var p = create(handler)
+function TestSetForDerived2(create, trap) {
+ var p = create({getPropertyDescriptor: trap, getOwnPropertyDescriptor: trap})
var o = Object.create(p, {x: {value: 88, writable: true},
'1': {value: 89, writable: true}})
@@ -607,10 +608,16 @@ function TestSetForDerived2(create, handler) {
assertEquals(45, o.p_nonwritable = 45)
assertEquals("p_nonwritable", key)
- assertEquals(45, o.p_nonwritable)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonwritable"))
+
+ assertThrows(function(){ "use strict"; o.p_nonwritable = 45 }, TypeError)
+ assertEquals("p_nonwritable", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonwritable"))
+ val = ""
assertEquals(46, o.p_setter = 46)
assertEquals("p_setter", key)
+ assertSame(o, rec)
assertEquals(46, val) // written to parent
assertFalse(Object.prototype.hasOwnProperty.call(o, "p_setter"))
@@ -624,32 +631,43 @@ function TestSetForDerived2(create, handler) {
assertThrows(function(){ "use strict"; o.p_nosetter = 50 }, TypeError)
assertEquals("p_nosetter", key)
assertEquals("", val) // not written at all
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nosetter"));
assertThrows(function(){ o.p_nonconf = 53 }, TypeError)
assertEquals("p_nonconf", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_nonconf"));
assertThrows(function(){ o.p_throw = 51 }, "myexn")
assertEquals("p_throw", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_throw"));
assertThrows(function(){ o.p_setterthrow = 52 }, "myexn")
assertEquals("p_setterthrow", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "p_setterthrow"));
}
-TestSetForDerived({
- getPropertyDescriptor: function(k) {
+
+TestSetForDerived(
+ function(k) {
key = k;
switch (k) {
case "p_writable": return {writable: true, configurable: true}
case "p_nonwritable": return {writable: false, configurable: true}
- case "p_setter":return {set: function(x) { val = x }, configurable: true}
- case "p_nosetter": return {get: function() { return 1 }, configurable: true}
- case "p_nonconf":return {}
+ case "p_setter": return {
+ set: function(x) { rec = this; val = x },
+ configurable: true
+ }
+ case "p_nosetter": return {
+ get: function() { return 1 },
+ configurable: true
+ }
+ case "p_nonconf": return {}
case "p_throw": throw "myexn"
case "p_setterthrow": return {set: function(x) { throw "myexn" }}
default: return undefined
}
}
-})
+)
// Evil proxy-induced side-effects shouldn't crash.
@@ -1630,8 +1648,8 @@ TestPropertyNames([], {
getOwnPropertyNames: function() { return [] }
})
-TestPropertyNames(["a", "zz", " ", "0"], {
- getOwnPropertyNames: function() { return ["a", "zz", " ", 0] }
+TestPropertyNames(["a", "zz", " ", "0", "toString"], {
+ getOwnPropertyNames: function() { return ["a", "zz", " ", 0, "toString"] }
})
TestPropertyNames(["throw", "function "], {
@@ -1678,8 +1696,8 @@ TestKeys([], {
keys: function() { return [] }
})
-TestKeys(["a", "zz", " ", "0"], {
- keys: function() { return ["a", "zz", " ", 0] }
+TestKeys(["a", "zz", " ", "0", "toString"], {
+ keys: function() { return ["a", "zz", " ", 0, "toString"] }
})
TestKeys(["throw", "function "], {
diff --git a/deps/v8/test/mjsunit/math-floor-of-div.js b/deps/v8/test/mjsunit/math-floor-of-div.js
new file mode 100644
index 0000000000..e917182c71
--- /dev/null
+++ b/deps/v8/test/mjsunit/math-floor-of-div.js
@@ -0,0 +1,216 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --nouse_inlining
+
+// Use this function as reference. Make sure it is not inlined.
+function div(a, b) {
+ return a / b;
+}
+
+var limit = 0x1000000;
+var exhaustive_limit = 100;
+var step = 10;
+var values = [0x10000001,
+ 0x12345678,
+ -0x789abcdf, // 0x87654321
+ 0x01234567,
+ 0x76543210,
+ -0x80000000, // 0x80000000
+ 0x7fffffff,
+ -0x0fffffff, // 0xf0000001
+ 0x00000010,
+ -0x01000000 // 0xff000000
+ ];
+
+function test_div() {
+ var c = 0;
+ for (var k = 0; k <= limit; k++) {
+ if (k > exhaustive_limit) { c += step; k += c; }
+ assertEquals(Math.floor(div(k, 1)), Math.floor(k / 1));
+ assertEquals(Math.floor(div(k, -1)), Math.floor(k / -1));
+ assertEquals(Math.floor(div(k, 2)), Math.floor(k / 2));
+ assertEquals(Math.floor(div(k, -2)), Math.floor(k / -2));
+ assertEquals(Math.floor(div(k, 3)), Math.floor(k / 3));
+ assertEquals(Math.floor(div(k, -3)), Math.floor(k / -3));
+ assertEquals(Math.floor(div(k, 4)), Math.floor(k / 4));
+ assertEquals(Math.floor(div(k, -4)), Math.floor(k / -4));
+ assertEquals(Math.floor(div(k, 5)), Math.floor(k / 5));
+ assertEquals(Math.floor(div(k, -5)), Math.floor(k / -5));
+ assertEquals(Math.floor(div(k, 6)), Math.floor(k / 6));
+ assertEquals(Math.floor(div(k, -6)), Math.floor(k / -6));
+ assertEquals(Math.floor(div(k, 7)), Math.floor(k / 7));
+ assertEquals(Math.floor(div(k, -7)), Math.floor(k / -7));
+ assertEquals(Math.floor(div(k, 8)), Math.floor(k / 8));
+ assertEquals(Math.floor(div(k, -8)), Math.floor(k / -8));
+ assertEquals(Math.floor(div(k, 9)), Math.floor(k / 9));
+ assertEquals(Math.floor(div(k, -9)), Math.floor(k / -9));
+ assertEquals(Math.floor(div(k, 10)), Math.floor(k / 10));
+ assertEquals(Math.floor(div(k, -10)), Math.floor(k / -10));
+ assertEquals(Math.floor(div(k, 11)), Math.floor(k / 11));
+ assertEquals(Math.floor(div(k, -11)), Math.floor(k / -11));
+ assertEquals(Math.floor(div(k, 12)), Math.floor(k / 12));
+ assertEquals(Math.floor(div(k, -12)), Math.floor(k / -12));
+ assertEquals(Math.floor(div(k, 13)), Math.floor(k / 13));
+ assertEquals(Math.floor(div(k, -13)), Math.floor(k / -13));
+ assertEquals(Math.floor(div(k, 14)), Math.floor(k / 14));
+ assertEquals(Math.floor(div(k, -14)), Math.floor(k / -14));
+ assertEquals(Math.floor(div(k, 15)), Math.floor(k / 15));
+ assertEquals(Math.floor(div(k, -15)), Math.floor(k / -15));
+ assertEquals(Math.floor(div(k, 16)), Math.floor(k / 16));
+ assertEquals(Math.floor(div(k, -16)), Math.floor(k / -16));
+ assertEquals(Math.floor(div(k, 17)), Math.floor(k / 17));
+ assertEquals(Math.floor(div(k, -17)), Math.floor(k / -17));
+ assertEquals(Math.floor(div(k, 18)), Math.floor(k / 18));
+ assertEquals(Math.floor(div(k, -18)), Math.floor(k / -18));
+ assertEquals(Math.floor(div(k, 19)), Math.floor(k / 19));
+ assertEquals(Math.floor(div(k, -19)), Math.floor(k / -19));
+ assertEquals(Math.floor(div(k, 20)), Math.floor(k / 20));
+ assertEquals(Math.floor(div(k, -20)), Math.floor(k / -20));
+ assertEquals(Math.floor(div(k, 21)), Math.floor(k / 21));
+ assertEquals(Math.floor(div(k, -21)), Math.floor(k / -21));
+ assertEquals(Math.floor(div(k, 22)), Math.floor(k / 22));
+ assertEquals(Math.floor(div(k, -22)), Math.floor(k / -22));
+ assertEquals(Math.floor(div(k, 23)), Math.floor(k / 23));
+ assertEquals(Math.floor(div(k, -23)), Math.floor(k / -23));
+ assertEquals(Math.floor(div(k, 24)), Math.floor(k / 24));
+ assertEquals(Math.floor(div(k, -24)), Math.floor(k / -24));
+ assertEquals(Math.floor(div(k, 25)), Math.floor(k / 25));
+ assertEquals(Math.floor(div(k, -25)), Math.floor(k / -25));
+ assertEquals(Math.floor(div(k, 125)), Math.floor(k / 125));
+ assertEquals(Math.floor(div(k, -125)), Math.floor(k / -125));
+ assertEquals(Math.floor(div(k, 625)), Math.floor(k / 625));
+ assertEquals(Math.floor(div(k, -625)), Math.floor(k / -625));
+ }
+ c = 0;
+ for (var k = 0; k <= limit; k++) {
+ if (k > exhaustive_limit) { c += step; k += c; }
+ assertEquals(Math.floor(div(-k, 1)), Math.floor(-k / 1));
+ assertEquals(Math.floor(div(-k, -1)), Math.floor(-k / -1));
+ assertEquals(Math.floor(div(-k, 2)), Math.floor(-k / 2));
+ assertEquals(Math.floor(div(-k, -2)), Math.floor(-k / -2));
+ assertEquals(Math.floor(div(-k, 3)), Math.floor(-k / 3));
+ assertEquals(Math.floor(div(-k, -3)), Math.floor(-k / -3));
+ assertEquals(Math.floor(div(-k, 4)), Math.floor(-k / 4));
+ assertEquals(Math.floor(div(-k, -4)), Math.floor(-k / -4));
+ assertEquals(Math.floor(div(-k, 5)), Math.floor(-k / 5));
+ assertEquals(Math.floor(div(-k, -5)), Math.floor(-k / -5));
+ assertEquals(Math.floor(div(-k, 6)), Math.floor(-k / 6));
+ assertEquals(Math.floor(div(-k, -6)), Math.floor(-k / -6));
+ assertEquals(Math.floor(div(-k, 7)), Math.floor(-k / 7));
+ assertEquals(Math.floor(div(-k, -7)), Math.floor(-k / -7));
+ assertEquals(Math.floor(div(-k, 8)), Math.floor(-k / 8));
+ assertEquals(Math.floor(div(-k, -8)), Math.floor(-k / -8));
+ assertEquals(Math.floor(div(-k, 9)), Math.floor(-k / 9));
+ assertEquals(Math.floor(div(-k, -9)), Math.floor(-k / -9));
+ assertEquals(Math.floor(div(-k, 10)), Math.floor(-k / 10));
+ assertEquals(Math.floor(div(-k, -10)), Math.floor(-k / -10));
+ assertEquals(Math.floor(div(-k, 11)), Math.floor(-k / 11));
+ assertEquals(Math.floor(div(-k, -11)), Math.floor(-k / -11));
+ assertEquals(Math.floor(div(-k, 12)), Math.floor(-k / 12));
+ assertEquals(Math.floor(div(-k, -12)), Math.floor(-k / -12));
+ assertEquals(Math.floor(div(-k, 13)), Math.floor(-k / 13));
+ assertEquals(Math.floor(div(-k, -13)), Math.floor(-k / -13));
+ assertEquals(Math.floor(div(-k, 14)), Math.floor(-k / 14));
+ assertEquals(Math.floor(div(-k, -14)), Math.floor(-k / -14));
+ assertEquals(Math.floor(div(-k, 15)), Math.floor(-k / 15));
+ assertEquals(Math.floor(div(-k, -15)), Math.floor(-k / -15));
+ assertEquals(Math.floor(div(-k, 16)), Math.floor(-k / 16));
+ assertEquals(Math.floor(div(-k, -16)), Math.floor(-k / -16));
+ assertEquals(Math.floor(div(-k, 17)), Math.floor(-k / 17));
+ assertEquals(Math.floor(div(-k, -17)), Math.floor(-k / -17));
+ assertEquals(Math.floor(div(-k, 18)), Math.floor(-k / 18));
+ assertEquals(Math.floor(div(-k, -18)), Math.floor(-k / -18));
+ assertEquals(Math.floor(div(-k, 19)), Math.floor(-k / 19));
+ assertEquals(Math.floor(div(-k, -19)), Math.floor(-k / -19));
+ assertEquals(Math.floor(div(-k, 20)), Math.floor(-k / 20));
+ assertEquals(Math.floor(div(-k, -20)), Math.floor(-k / -20));
+ assertEquals(Math.floor(div(-k, 21)), Math.floor(-k / 21));
+ assertEquals(Math.floor(div(-k, -21)), Math.floor(-k / -21));
+ assertEquals(Math.floor(div(-k, 22)), Math.floor(-k / 22));
+ assertEquals(Math.floor(div(-k, -22)), Math.floor(-k / -22));
+ assertEquals(Math.floor(div(-k, 23)), Math.floor(-k / 23));
+ assertEquals(Math.floor(div(-k, -23)), Math.floor(-k / -23));
+ assertEquals(Math.floor(div(-k, 24)), Math.floor(-k / 24));
+ assertEquals(Math.floor(div(-k, -24)), Math.floor(-k / -24));
+ assertEquals(Math.floor(div(-k, 25)), Math.floor(-k / 25));
+ assertEquals(Math.floor(div(-k, -25)), Math.floor(-k / -25));
+ assertEquals(Math.floor(div(-k, 125)), Math.floor(-k / 125));
+ assertEquals(Math.floor(div(-k, -125)), Math.floor(-k / -125));
+ assertEquals(Math.floor(div(-k, 625)), Math.floor(-k / 625));
+ assertEquals(Math.floor(div(-k, -625)), Math.floor(-k / -625));
+ }
+ // Test for edge cases.
+ // Use (values[key] | 0) to force the integer type.
+ for (var i = 0; i < values.length; i++) {
+ for (var j = 0; j < values.length; j++) {
+ assertEquals(Math.floor(div((values[i] | 0), (values[j] | 0))),
+ Math.floor((values[i] | 0) / (values[j] | 0)));
+ assertEquals(Math.floor(div(-(values[i] | 0), (values[j] | 0))),
+ Math.floor(-(values[i] | 0) / (values[j] | 0)));
+ assertEquals(Math.floor(div((values[i] | 0), -(values[j] | 0))),
+ Math.floor((values[i] | 0) / -(values[j] | 0)));
+ assertEquals(Math.floor(div(-(values[i] | 0), -(values[j] | 0))),
+ Math.floor(-(values[i] | 0) / -(values[j] | 0)));
+ }
+ }
+}
+
+test_div();
+%OptimizeFunctionOnNextCall(test_div);
+test_div();
+
+// Test for negative zero and overflow.
+// Separate the tests to prevent deoptimizations from making the other optimized
+// test unreachable.
+
+function IsNegativeZero(x) {
+ assertTrue(x == 0); // Is 0 or -0.
+ var y = 1 / x;
+ assertFalse(isFinite(y));
+ return y < 0;
+}
+
+function test_div_deopt_minus_zero() {
+ var zero_in_array = [0];
+ assertTrue(IsNegativeZero(Math.floor((zero_in_array[0] | 0) / -1)));
+}
+
+function test_div_deopt_overflow() {
+ // We box the value in an array to avoid constant propagation.
+ var min_int_in_array = [-2147483648];
+ // We use '| 0' to force the representation to int32.
+ assertEquals(-min_int_in_array[0],
+ Math.floor((min_int_in_array[0] | 0) / -1));
+}
+
+test_div_deopt_minus_zero();
+test_div_deopt_overflow();
+%OptimizeFunctionOnNextCall(test_div_deopt_minus_zero);
+%OptimizeFunctionOnNextCall(test_div_deopt_overflow);
+test_div_deopt_minus_zero();
+test_div_deopt_overflow();
diff --git a/deps/v8/test/mjsunit/mjsunit.js b/deps/v8/test/mjsunit/mjsunit.js
index 033c78f4b0..65fb301b44 100644
--- a/deps/v8/test/mjsunit/mjsunit.js
+++ b/deps/v8/test/mjsunit/mjsunit.js
@@ -75,7 +75,7 @@ var assertTrue;
// Checks that the found value is false.
var assertFalse;
-// Checks that the found value is null. Kept for historical compatability,
+// Checks that the found value is null. Kept for historical compatibility,
// please just use assertEquals(null, expected).
var assertNull;
diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status
index a1b927097a..e311ffbcba 100644
--- a/deps/v8/test/mjsunit/mjsunit.status
+++ b/deps/v8/test/mjsunit/mjsunit.status
@@ -28,16 +28,20 @@
prefix mjsunit
# All tests in the bug directory are expected to fail.
-bugs: FAIL
+bugs/*: FAIL
##############################################################################
# Fails.
regress/regress-1119: FAIL
-##############################################################################
+# Issue 2177: Debugger on ARM broken due to variable literal pool size.
+debug-liveedit-breakpoints: PASS, SKIP if ($arch == arm)
-# NewGC: BUG(1719) slow to collect arrays over several contexts.
+# Issue 1719: Slow to collect arrays over several contexts.
regress/regress-524: SKIP
+# When that bug is fixed, revert the expectation to:
+# Skip long running test in debug and allow it to timeout in release mode.
+# regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
##############################################################################
# Too slow in debug mode with --stress-opt
@@ -58,12 +62,10 @@ array-constructor: PASS || TIMEOUT
# Very slow on ARM and MIPS, contains no architecture dependent code.
unicode-case-overoptimization: PASS, TIMEOUT if ($arch == arm || $arch == mips)
-# Skip long running test in debug and allow it to timeout in release mode.
-regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
-
# Stack manipulations in LiveEdit are buggy - see bug 915
debug-liveedit-check-stack: SKIP
debug-liveedit-patch-positions-replace: SKIP
+debug-liveedit-stack-padding: SKIP
# Test Crankshaft compilation time. Expected to take too long in debug mode.
regress/regress-1969: PASS, SKIP if $mode == debug
diff --git a/deps/v8/test/mjsunit/override-read-only-property.js b/deps/v8/test/mjsunit/override-read-only-property.js
index b8fa5015a3..2876ae1f84 100644
--- a/deps/v8/test/mjsunit/override-read-only-property.js
+++ b/deps/v8/test/mjsunit/override-read-only-property.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --es5_readonly
+
// According to ECMA-262, sections 8.6.2.2 and 8.6.2.3 you're not
// allowed to override read-only properties, not even if the read-only
// property is in the prototype chain.
@@ -38,19 +40,19 @@ F.prototype = Number;
var original_number_max = Number.MAX_VALUE;
// Assignment to a property which does not exist on the object itself,
-// but is read-only in a prototype takes effect.
+// but is read-only in a prototype does not take effect.
var f = new F();
assertEquals(original_number_max, f.MAX_VALUE);
f.MAX_VALUE = 42;
-assertEquals(42, f.MAX_VALUE);
+assertEquals(original_number_max, f.MAX_VALUE);
// Assignment to a property which does not exist on the object itself,
-// but is read-only in a prototype takes effect.
+// but is read-only in a prototype does not take effect.
f = new F();
with (f) {
MAX_VALUE = 42;
}
-assertEquals(42, f.MAX_VALUE);
+assertEquals(original_number_max, f.MAX_VALUE);
// Assignment to read-only property on the object itself is ignored.
Number.MAX_VALUE = 42;
diff --git a/deps/v8/test/mjsunit/packed-elements.js b/deps/v8/test/mjsunit/packed-elements.js
new file mode 100644
index 0000000000..7f333e56e5
--- /dev/null
+++ b/deps/v8/test/mjsunit/packed-elements.js
@@ -0,0 +1,112 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays --packed-arrays
+
+var has_packed_elements = !%HasFastHoleyElements(Array());
+
+function test1() {
+ var a = Array(8);
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertTrue(%HasFastHoleyElements(a));
+}
+
+function test2() {
+ var a = Array();
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+}
+
+function test3() {
+ var a = Array(1,2,3,4,5,6,7);
+ assertTrue(%HasFastSmiOrObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+}
+
+function test4() {
+ var a = [1, 2, 3, 4];
+ assertTrue(%HasFastSmiElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ var b = [1, 2,, 4];
+ assertTrue(%HasFastSmiElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+}
+
+function test5() {
+ var a = [1, 2, 3, 4.5];
+ assertTrue(%HasFastDoubleElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ var b = [1,, 3.5, 4];
+ assertTrue(%HasFastDoubleElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+ var c = [1, 3.5,, 4];
+ assertTrue(%HasFastDoubleElements(c));
+ assertTrue(%HasFastHoleyElements(c));
+}
+
+function test6() {
+ var x = new Object();
+ var a = [1, 2, 3.5, x];
+ assertTrue(%HasFastObjectElements(a));
+ assertFalse(%HasFastHoleyElements(a));
+ assertEquals(1, a[0]);
+ assertEquals(2, a[1]);
+ assertEquals(3.5, a[2]);
+ assertEquals(x, a[3]);
+ var b = [1,, 3.5, x];
+ assertTrue(%HasFastObjectElements(b));
+ assertTrue(%HasFastHoleyElements(b));
+ assertEquals(1, b[0]);
+ assertEquals(undefined, b[1]);
+ assertEquals(3.5, b[2]);
+ assertEquals(x, b[3]);
+ var c = [1, 3.5, x,,];
+ assertTrue(%HasFastObjectElements(c));
+ assertTrue(%HasFastHoleyElements(c));
+ assertEquals(1, c[0]);
+ assertEquals(3.5, c[1]);
+ assertEquals(x, c[2]);
+ assertEquals(undefined, c[3]);
+}
+
+function test_with_optimization(f) {
+ // Run tests in a loop to make sure that inlined Array() constructor runs out
+ // of new space memory and must fall back on runtime impl.
+ for (i = 0; i < 250000; ++i) f();
+ %OptimizeFunctionOnNextCall(f);
+ for (i = 0; i < 250000; ++i) f(); // Make sure GC happens
+}
+
+if (has_packed_elements) {
+ test_with_optimization(test1);
+ test_with_optimization(test2);
+ test_with_optimization(test3);
+ test_with_optimization(test4);
+ test_with_optimization(test5);
+ test_with_optimization(test6);
+}
+
diff --git a/deps/v8/test/mjsunit/readonly.js b/deps/v8/test/mjsunit/readonly.js
new file mode 100644
index 0000000000..4d06b7cf43
--- /dev/null
+++ b/deps/v8/test/mjsunit/readonly.js
@@ -0,0 +1,228 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --harmony-proxies --es5_readonly
+
+// Different ways to create an object.
+
+function CreateFromLiteral() {
+ return {};
+}
+
+function CreateFromObject() {
+ return new Object;
+}
+
+function CreateDefault() {
+ return Object.create(Object.prototype);
+}
+
+function CreateFromConstructor(proto) {
+ function C() {}
+ (new C).b = 9; // Make sure that we can have an in-object property.
+ C.prototype = proto;
+ return function() { return new C; }
+}
+
+function CreateFromApi(proto) {
+ return function() { return Object.create(proto); }
+}
+
+function CreateWithProperty(proto) {
+ function C() { this.a = -100; }
+ C.prototype = proto;
+ return function() { return new C; }
+}
+
+var bases = [CreateFromLiteral, CreateFromObject, CreateDefault];
+var inherits = [CreateFromConstructor, CreateFromApi, CreateWithProperty];
+var constructs = [CreateFromConstructor, CreateFromApi];
+
+function TestAllCreates(f) {
+ // The depth of the prototype chain up the.
+ for (var depth = 0; depth < 3; ++depth) {
+ // Introduce readonly-ness this far up the chain.
+ for (var up = 0; up <= depth; ++up) {
+ // Try different construction methods.
+ for (var k = 0; k < constructs.length; ++k) {
+ // Construct a fresh prototype chain from above functions.
+ for (var i = 0; i < bases.length; ++i) {
+ var p = bases[i]();
+ // There may be a preexisting property under the insertion point...
+ for (var j = 0; j < depth - up; ++j) {
+ p = inherits[Math.floor(inherits.length * Math.random())](p)();
+ }
+ // ...but not above it.
+ for (var j = 0; j < up; ++j) {
+ p = constructs[Math.floor(constructs.length * Math.random())](p)();
+ }
+ // Create a fresh constructor.
+ var c = constructs[k](p);
+ f(function() {
+ var o = c();
+ o.up = o;
+ for (var j = 0; j < up; ++j) o.up = Object.getPrototypeOf(o.up);
+ return o;
+ })
+ }
+ }
+ }
+ }
+}
+
+
+// Different ways to make a property read-only.
+
+function ReadonlyByNonwritableDataProperty(o, name) {
+ Object.defineProperty(o, name, {value: -41, writable: false});
+}
+
+function ReadonlyByAccessorPropertyWithoutSetter(o, name) {
+ Object.defineProperty(o, name, {get: function() { return -42; }});
+}
+
+function ReadonlyByGetter(o, name) {
+ o.__defineGetter__("a", function() { return -43; });
+}
+
+function ReadonlyByFreeze(o, name) {
+ o[name] = -44;
+ Object.freeze(o);
+}
+
+function ReadonlyByProto(o, name) {
+ var p = Object.create(o.__proto__);
+ Object.defineProperty(p, name, {value: -45, writable: false});
+ o.__proto__ = p;
+}
+
+function ReadonlyByProxy(o, name) {
+ var p = Proxy.create({
+ getPropertyDescriptor: function() {
+ return {value: -46, writable: false, configurable: true};
+ }
+ });
+ o.__proto__ = p;
+}
+
+var readonlys = [
+ ReadonlyByNonwritableDataProperty, ReadonlyByAccessorPropertyWithoutSetter,
+ ReadonlyByGetter, ReadonlyByFreeze, ReadonlyByProto, ReadonlyByProxy
+]
+
+function TestAllReadonlys(f) {
+ // Provide various methods to making a property read-only.
+ for (var i = 0; i < readonlys.length; ++i) {
+ print(" readonly =", i)
+ f(readonlys[i]);
+ }
+}
+
+
+// Different use scenarios.
+
+function Assign(o, x) {
+ o.a = x;
+}
+
+function AssignStrict(o, x) {
+ "use strict";
+ o.a = x;
+}
+
+function TestAllModes(f) {
+ for (var strict = 0; strict < 2; ++strict) {
+ print(" strict =", strict);
+ f(strict);
+ }
+}
+
+function TestAllScenarios(f) {
+ for (var t = 0; t < 100; t = 2*t + 1) {
+ print("t =", t)
+ f(function(strict, create, readonly) {
+ // Make sure that the assignments are monomorphic.
+ %DeoptimizeFunction(Assign);
+ %DeoptimizeFunction(AssignStrict);
+ %ClearFunctionTypeFeedback(Assign);
+ %ClearFunctionTypeFeedback(AssignStrict);
+ for (var i = 0; i < t; ++i) {
+ var o = create();
+ assertFalse("a" in o && !("a" in o.__proto__));
+ if (strict === 0)
+ Assign(o, i);
+ else
+ AssignStrict(o, i);
+ assertEquals(i, o.a);
+ }
+ %OptimizeFunctionOnNextCall(Assign);
+ %OptimizeFunctionOnNextCall(AssignStrict);
+ var o = create();
+ assertFalse("a" in o && !("a" in o.__proto__));
+ readonly(o.up, "a");
+ assertTrue("a" in o);
+ if (strict === 0)
+ Assign(o, t + 1);
+ else
+ assertThrows(function() { AssignStrict(o, t + 1) }, TypeError);
+ assertTrue(o.a < 0);
+ });
+ }
+}
+
+
+// Runner.
+
+TestAllScenarios(function(scenario) {
+ TestAllModes(function(strict) {
+ TestAllReadonlys(function(readonly) {
+ TestAllCreates(function(create) {
+ scenario(strict, create, readonly);
+ });
+ });
+ });
+});
+
+
+// Extra test forcing bailout.
+
+function Assign2(o, x) { o.a = x }
+
+(function() {
+ var p = CreateFromConstructor(Object.prototype)();
+ var c = CreateFromConstructor(p);
+ for (var i = 0; i < 3; ++i) {
+ var o = c();
+ Assign2(o, i);
+ assertEquals(i, o.a);
+ }
+ %OptimizeFunctionOnNextCall(Assign2);
+ ReadonlyByNonwritableDataProperty(p, "a");
+ var o = c();
+ Assign2(o, 0);
+ assertTrue(o.a < 0);
+})();
diff --git a/deps/v8/test/mjsunit/regexp-capture-3.js b/deps/v8/test/mjsunit/regexp-capture-3.js
index 50e423ff30..b676f01c2c 100644
--- a/deps/v8/test/mjsunit/regexp-capture-3.js
+++ b/deps/v8/test/mjsunit/regexp-capture-3.js
@@ -25,6 +25,195 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"abcd".replace(/b/g, function() { });
+function oneMatch(re) {
+ "abcd".replace(re, function() { });
+ assertEquals("abcd", RegExp.input);
+ assertEquals("a", RegExp.leftContext);
+ assertEquals("b", RegExp.lastMatch);
+ assertEquals("", RegExp.lastParen);
+ assertEquals(undefined, RegExp.lastIndex);
+ assertEquals(undefined, RegExp.index);
+ assertEquals("cd", RegExp.rightContext);
+ for (var i = 1; i < 10; i++) {
+ assertEquals("", RegExp['$' + i]);
+ }
+}
+oneMatch(/b/);
+oneMatch(/b/g);
+
+"abcdabcd".replace(/b/g, function() { });
+assertEquals("abcdabcd", RegExp.input);
+assertEquals("abcda", RegExp.leftContext);
+assertEquals("b", RegExp.lastMatch);
+assertEquals("", RegExp.lastParen);
+assertEquals(undefined, RegExp.lastIndex);
+assertEquals(undefined, RegExp.index);
assertEquals("cd", RegExp.rightContext);
+for (var i = 1; i < 10; i++) {
+ assertEquals("", RegExp['$' + i]);
+}
+
+function captureMatch(re) {
+ "abcd".replace(re, function() { });
+ assertEquals("abcd", RegExp.input);
+ assertEquals("a", RegExp.leftContext);
+ assertEquals("bc", RegExp.lastMatch);
+ assertEquals("c", RegExp.lastParen);
+ assertEquals(undefined, RegExp.lastIndex);
+ assertEquals(undefined, RegExp.index);
+ assertEquals("d", RegExp.rightContext);
+ assertEquals('b', RegExp.$1);
+ assertEquals('c', RegExp.$2);
+ for (var i = 3; i < 10; i++) {
+ assertEquals("", RegExp['$' + i]);
+ }
+}
+
+captureMatch(/(b)(c)/);
+captureMatch(/(b)(c)/g);
+
+"abcdabcd".replace(/(b)(c)/g, function() { });
+assertEquals("abcdabcd", RegExp.input);
+assertEquals("abcda", RegExp.leftContext);
+assertEquals("bc", RegExp.lastMatch);
+assertEquals("c", RegExp.lastParen);
+assertEquals(undefined, RegExp.lastIndex);
+assertEquals(undefined, RegExp.index);
+assertEquals("d", RegExp.rightContext);
+assertEquals('b', RegExp.$1);
+assertEquals('c', RegExp.$2);
+for (var i = 3; i < 10; i++) {
+ assertEquals("", RegExp['$' + i]);
+}
+
+
+function Override() {
+ // Set the internal lastMatchInfoOverride. After calling this we do a normal
+ // match and verify the override was cleared and that we record the new
+ // captures.
+ "abcdabcd".replace(/(b)(c)/g, function() { });
+}
+
+
+function TestOverride(input, expect, property, re_src) {
+ var re = new RegExp(re_src);
+ var re_g = new RegExp(re_src, "g");
+
+ function OverrideCase(fn) {
+ Override();
+ fn();
+ assertEquals(expect, RegExp[property]);
+ }
+
+ OverrideCase(function() { return input.replace(re, "x"); });
+ OverrideCase(function() { return input.replace(re_g, "x"); });
+ OverrideCase(function() { return input.replace(re, ""); });
+ OverrideCase(function() { return input.replace(re_g, ""); });
+ OverrideCase(function() { return input.match(re); });
+ OverrideCase(function() { return input.match(re_g); });
+ OverrideCase(function() { return re.test(input); });
+ OverrideCase(function() { return re_g.test(input); });
+}
+
+var input = "bar.foo baz......";
+var re_str = "(ba.).*?f";
+TestOverride(input, "bar", "$1", re_str);
+
+input = "foo bar baz";
+var re_str = "bar";
+TestOverride(input, "bar", "$&", re_str);
+
+
+function no_last_match(fn) {
+ fn();
+ assertEquals("hestfisk", RegExp.$1);
+}
+
+/(hestfisk)/.test("There's no such thing as a hestfisk!");
+
+no_last_match(function() { "foo".replace("f", ""); });
+no_last_match(function() { "foo".replace("f", "f"); });
+no_last_match(function() { "foo".split("o"); });
+
+var base = "In the music. In the music. ";
+var cons = base + base + base + base;
+no_last_match(function() { cons.replace("x", "y"); });
+no_last_match(function() { cons.replace("e", "E"); });
+
+
+// Here's one that matches once, then tries to match again, but fails.
+// Verify that the last match info is from the last match, not from the
+// failure that came after.
+"bar.foo baz......".replace(/(ba.).*?f/g, function() { return "x";});
+assertEquals("bar", RegExp.$1);
+
+
+// A test that initially does a zero width match, but later does a non-zero
+// width match.
+var a = "foo bar baz".replace(/^|bar/g, "");
+assertEquals("foo baz", a);
+
+a = "foo bar baz".replace(/^|bar/g, "*");
+assertEquals("*foo * baz", a);
+
+// We test FilterASCII using regexps that will backtrack forever. Since
+// a regexp with a non-ASCII character in it can never match an ASCII
+// string we can test that the relevant node is removed by verifying that
+// there is no hang.
+function NoHang(re) {
+ "This is an ASCII string that could take forever".match(re);
+}
+
+
+NoHang(/(((.*)*)*x)å/); // Continuation after loop is filtered, so is loop.
+NoHang(/(((.*)*)*å)foo/); // Body of loop filtered.
+NoHang(/å(((.*)*)*x)/); // Everything after a filtered character is filtered.
+NoHang(/(((.*)*)*x)å/); // Everything before a filtered character is filtered.
+NoHang(/[æøå](((.*)*)*x)/); // Everything after a filtered class is filtered.
+NoHang(/(((.*)*)*x)[æøå]/); // Everything before a filtered class is filtered.
+NoHang(/[^\x00-\x7f](((.*)*)*x)/); // After negated class.
+NoHang(/(((.*)*)*x)[^\x00-\x7f]/); // Before negated class.
+NoHang(/(?!(((.*)*)*x)å)foo/); // Negative lookahead is filtered.
+NoHang(/(?!(((.*)*)*x))å/); // Continuation branch of negative lookahead.
+NoHang(/(?=(((.*)*)*x)å)foo/); // Positive lookahead is filtered.
+NoHang(/(?=(((.*)*)*x))å/); // Continuation branch of positive lookahead.
+NoHang(/(?=å)(((.*)*)*x)/); // Positive lookahead also prunes continuation.
+NoHang(/(æ|ø|å)(((.*)*)*x)/); // All branches of alternation are filtered.
+NoHang(/(a|b|(((.*)*)*x))å/); // 1 out of 3 branches pruned.
+NoHang(/(a|(((.*)*)*x)ø|(((.*)*)*x)å)/); // 2 out of 3 branches pruned.
+
+var s = "Don't prune based on a repetition of length 0";
+assertEquals(null, s.match(/å{1,1}prune/));
+assertEquals("prune", (s.match(/å{0,0}prune/)[0]));
+
+// Some very deep regexps where FilterASCII gives up in order not to make the
+// stack overflow.
+var regex6 = /a*\u0100*\w/;
+var input0 = "a";
+regex6.exec(input0);
+
+var re = "\u0100*\\w";
+
+for (var i = 0; i < 200; i++) re = "a*" + re;
+
+var regex7 = new RegExp(re);
+regex7.exec(input0);
+
+var regex8 = new RegExp(re, "i");
+regex8.exec(input0);
+
+re = "[\u0100]*\\w";
+for (var i = 0; i < 200; i++) re = "a*" + re;
+
+var regex9 = new RegExp(re);
+regex9.exec(input0);
+
+var regex10 = new RegExp(re, "i");
+regex10.exec(input0);
+
+var regex11 = /^(?:[^\u0000-\u0080]|[0-9a-z?,.!&\s#()])+$/i;
+regex11.exec(input0);
+
+var regex12 = /u(\xf0{8}?\D*?|( ? !)$h??(|)*?(||)+?\6((?:\W\B|--\d-*-|)?$){0, }?|^Y( ? !1)\d+)+a/;
+regex12.exec("");
diff --git a/deps/v8/test/mjsunit/regexp-capture.js b/deps/v8/test/mjsunit/regexp-capture.js
index 8aae71795a..307309482a 100755
--- a/deps/v8/test/mjsunit/regexp-capture.js
+++ b/deps/v8/test/mjsunit/regexp-capture.js
@@ -56,3 +56,5 @@ assertEquals(["bbc", "b"], /^(b+|a){1,2}?bc/.exec("bbc"));
assertEquals(["bbaa", "a", "", "a"],
/((\3|b)\2(a)){2,}/.exec("bbaababbabaaaaabbaaaabba"));
+// From crbug.com/128821 - don't hang:
+"".match(/((a|i|A|I|u|o|U|O)(s|c|b|c|d|f|g|h|j|k|l|m|n|p|q|r|s|t|v|w|x|y|z|B|C|D|F|G|H|J|K|L|M|N|P|Q|R|S|T|V|W|X|Y|Z)*) de\/da([.,!?\s]|$)/);
diff --git a/deps/v8/test/mjsunit/regexp-global.js b/deps/v8/test/mjsunit/regexp-global.js
new file mode 100644
index 0000000000..cc360d3ce0
--- /dev/null
+++ b/deps/v8/test/mjsunit/regexp-global.js
@@ -0,0 +1,141 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Test that an optional capture is cleared between two matches.
+var str = "ABX X";
+str = str.replace(/(\w)?X/g, function(match, capture) {
+ assertTrue(match.indexOf(capture) >= 0 ||
+ capture === undefined);
+ return capture ? capture.toLowerCase() : "-";
+ });
+assertEquals("Ab -", str);
+
+// Test zero-length matches.
+str = "Als Gregor Samsa eines Morgens";
+str = str.replace(/\b/g, function(match, capture) {
+ return "/";
+ });
+assertEquals("/Als/ /Gregor/ /Samsa/ /eines/ /Morgens/", str);
+
+// Test zero-length matches that have non-zero-length sub-captures.
+str = "It was a pleasure to burn.";
+str = str.replace(/(?=(\w+))\b/g, function(match, capture) {
+ return capture.length;
+ });
+assertEquals("2It 3was 1a 8pleasure 2to 4burn.", str);
+
+// Test multiple captures.
+str = "Try not. Do, or do not. There is no try.";
+str = str.replace(/(not?)|(do)|(try)/gi,
+ function(match, c1, c2, c3) {
+ assertTrue((c1 === undefined && c2 === undefined) ||
+ (c2 === undefined && c3 === undefined) ||
+ (c1 === undefined && c3 === undefined));
+ if (c1) return "-";
+ if (c2) return "+";
+ if (c3) return "="
+ });
+assertEquals("= -. +, or + -. There is - =.", str);
+
+// Test multiple alternate captures.
+str = "FOUR LEGS GOOD, TWO LEGS BAD!";
+str = str.replace(/(FOUR|TWO) LEGS (GOOD|BAD)/g,
+ function(match, num_legs, likeability) {
+ assertTrue(num_legs !== undefined);
+ assertTrue(likeability !== undefined);
+ if (num_legs == "FOUR") assertTrue(likeability == "GOOD");
+ if (num_legs == "TWO") assertTrue(likeability == "BAD");
+ return match.length - 10;
+ });
+assertEquals("4, 2!", str);
+
+
+// The same tests with UC16.
+
+//Test that an optional capture is cleared between two matches.
+str = "AB\u1234 \u1234";
+str = str.replace(/(\w)?\u1234/g,
+ function(match, capture) {
+ assertTrue(match.indexOf(capture) >= 0 ||
+ capture === undefined);
+ return capture ? capture.toLowerCase() : "-";
+ });
+assertEquals("Ab -", str);
+
+// Test zero-length matches.
+str = "Als \u2623\u2642 eines Morgens";
+str = str.replace(/\b/g, function(match, capture) {
+ return "/";
+ });
+assertEquals("/Als/ \u2623\u2642 /eines/ /Morgens/", str);
+
+// Test zero-length matches that have non-zero-length sub-captures.
+str = "It was a pleasure to \u70e7.";
+str = str.replace(/(?=(\w+))\b/g, function(match, capture) {
+ return capture.length;
+ });
+assertEquals("2It 3was 1a 8pleasure 2to \u70e7.", str);
+
+// Test multiple captures.
+str = "Try not. D\u26aa, or d\u26aa not. There is no try.";
+str = str.replace(/(not?)|(d\u26aa)|(try)/gi,
+ function(match, c1, c2, c3) {
+ assertTrue((c1 === undefined && c2 === undefined) ||
+ (c2 === undefined && c3 === undefined) ||
+ (c1 === undefined && c3 === undefined));
+ if (c1) return "-";
+ if (c2) return "+";
+ if (c3) return "="
+ });
+assertEquals("= -. +, or + -. There is - =.", str);
+
+// Test multiple alternate captures.
+str = "FOUR \u817f GOOD, TWO \u817f BAD!";
+str = str.replace(/(FOUR|TWO) \u817f (GOOD|BAD)/g,
+ function(match, num_legs, likeability) {
+ assertTrue(num_legs !== undefined);
+ assertTrue(likeability !== undefined);
+ if (num_legs == "FOUR") assertTrue(likeability == "GOOD");
+ if (num_legs == "TWO") assertTrue(likeability == "BAD");
+ return match.length - 7;
+ });
+assertEquals("4, 2!", str);
+
+// Test capture that is a real substring.
+var str = "Beasts of England, beasts of Ireland";
+str = str.replace(/(.*)/g, function(match) { return '~'; });
+assertEquals("~~", str);
+
+// Test zero-length matches that have non-zero-length sub-captures that do not
+// start at the match start position.
+str = "up up up up";
+str = str.replace(/\b(?=u(p))/g, function(match, capture) {
+ return capture.length;
+ });
+
+assertEquals("1up 1up 1up 1up", str);
diff --git a/deps/v8/test/mjsunit/regexp.js b/deps/v8/test/mjsunit/regexp.js
index ec82c96e09..c2d92823bc 100644
--- a/deps/v8/test/mjsunit/regexp.js
+++ b/deps/v8/test/mjsunit/regexp.js
@@ -705,3 +705,14 @@ assertThrows("RegExp('(?!*)')");
// Test trimmed regular expression for RegExp.test().
assertTrue(/.*abc/.test("abc"));
assertFalse(/.*\d+/.test("q"));
+
+// Test that RegExp.prototype.toString() throws TypeError for
+// incompatible receivers (ES5 section 15.10.6 and 15.10.6.4).
+assertThrows("RegExp.prototype.toString.call(null)", TypeError);
+assertThrows("RegExp.prototype.toString.call(0)", TypeError);
+assertThrows("RegExp.prototype.toString.call('')", TypeError);
+assertThrows("RegExp.prototype.toString.call(false)", TypeError);
+assertThrows("RegExp.prototype.toString.call(true)", TypeError);
+assertThrows("RegExp.prototype.toString.call([])", TypeError);
+assertThrows("RegExp.prototype.toString.call({})", TypeError);
+assertThrows("RegExp.prototype.toString.call(function(){})", TypeError);
diff --git a/deps/v8/test/mjsunit/regress/regress-1119.js b/deps/v8/test/mjsunit/regress/regress-1119.js
index 16b2e4f935..5fd8f369b1 100644
--- a/deps/v8/test/mjsunit/regress/regress-1119.js
+++ b/deps/v8/test/mjsunit/regress/regress-1119.js
@@ -28,17 +28,19 @@
// Test runtime declaration of properties with var which are intercepted
// by JS accessors.
-__proto__.__defineSetter__("x", function() { hasBeenInvoked = true; });
-__proto__.__defineSetter__("y", function() { throw 'exception'; });
+// Flags: --es52_globals
+
+this.__defineSetter__("x", function() { hasBeenInvoked = true; });
+this.__defineSetter__("y", function() { throw 'exception'; });
var hasBeenInvoked = false;
eval("try { } catch (e) { var x = false; }");
assertTrue(hasBeenInvoked);
-var exception;
+// This has to run in global scope, so cannot use assertThrows...
try {
eval("try { } catch (e) { var y = false; }");
+ assertUnreachable();
} catch (e) {
- exception = e;
+ assertEquals('exception', e);
}
-assertEquals('exception', exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-115452.js b/deps/v8/test/mjsunit/regress/regress-115452.js
index 7e424ed88b..dc711581e9 100644
--- a/deps/v8/test/mjsunit/regress/regress-115452.js
+++ b/deps/v8/test/mjsunit/regress/regress-115452.js
@@ -27,22 +27,21 @@
// Test that a function declaration cannot overwrite a read-only property.
-print(0)
+// Flags: --es52_globals
+
function foobl() {}
assertTrue(typeof this.foobl == "function");
assertTrue(Object.getOwnPropertyDescriptor(this, "foobl").writable);
-print(1)
Object.defineProperty(this, "foobl", {value: 1, writable: false});
assertSame(1, this.foobl);
assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
-print(2)
-eval("function foobl() {}");
+// This has to run in global scope, so cannot use assertThrows...
+try {
+ eval("function foobl() {}"); // Should throw.
+ assertUnreachable();
+} catch (e) {
+ assertInstanceof(e, TypeError);
+}
assertSame(1, this.foobl);
-assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
-
-print(3)
-eval("function foobl() {}");
-assertSame(1, this.foobl);
-assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
diff --git a/deps/v8/test/mjsunit/regress/regress-1170.js b/deps/v8/test/mjsunit/regress/regress-1170.js
index 66ed9f29e2..8c5f6f8ab4 100644
--- a/deps/v8/test/mjsunit/regress/regress-1170.js
+++ b/deps/v8/test/mjsunit/regress/regress-1170.js
@@ -25,48 +25,74 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --es52_globals
+
var setter_value = 0;
-__proto__.__defineSetter__("a", function(v) { setter_value = v; });
+this.__defineSetter__("a", function(v) { setter_value = v; });
eval("var a = 1");
assertEquals(1, setter_value);
-assertFalse(this.hasOwnProperty("a"));
+assertFalse("value" in Object.getOwnPropertyDescriptor(this, "a"));
eval("with({}) { eval('var a = 2') }");
assertEquals(2, setter_value);
-assertFalse(this.hasOwnProperty("a"));
+assertFalse("value" in Object.getOwnPropertyDescriptor(this, "a"));
// Function declarations are treated specially to match Safari. We do
// not call setters for them.
+this.__defineSetter__("a", function(v) { assertUnreachable(); });
eval("function a() {}");
-assertTrue(this.hasOwnProperty("a"));
+assertTrue("value" in Object.getOwnPropertyDescriptor(this, "a"));
-__proto__.__defineSetter__("b", function(v) { assertUnreachable(); });
-var exception = false;
+this.__defineSetter__("b", function(v) { setter_value = v; });
try {
- eval("const b = 23");
+ eval("const b = 3");
} catch(e) {
- exception = true;
- assertTrue(/TypeError/.test(e));
+ assertUnreachable();
}
-assertFalse(exception);
+assertEquals(3, setter_value);
-exception = false;
try {
eval("with({}) { eval('const b = 23') }");
} catch(e) {
- exception = true;
- assertTrue(/TypeError/.test(e));
+ assertInstanceof(e, TypeError);
}
-assertTrue(exception);
-__proto__.__defineSetter__("c", function(v) { throw 42; });
-exception = false;
+this.__defineSetter__("c", function(v) { throw 42; });
try {
eval("var c = 1");
+ assertUnreachable();
} catch(e) {
- exception = true;
assertEquals(42, e);
- assertFalse(this.hasOwnProperty("c"));
+ assertFalse("value" in Object.getOwnPropertyDescriptor(this, "c"));
+}
+
+
+
+
+__proto__.__defineSetter__("aa", function(v) { assertUnreachable(); });
+eval("var aa = 1");
+assertTrue(this.hasOwnProperty("aa"));
+
+__proto__.__defineSetter__("bb", function(v) { assertUnreachable(); });
+eval("with({}) { eval('var bb = 2') }");
+assertTrue(this.hasOwnProperty("bb"));
+
+// Function declarations are treated specially to match Safari. We do
+// not call setters for them.
+__proto__.__defineSetter__("cc", function(v) { assertUnreachable(); });
+eval("function cc() {}");
+assertTrue(this.hasOwnProperty("cc"));
+
+__proto__.__defineSetter__("dd", function(v) { assertUnreachable(); });
+try {
+ eval("const dd = 23");
+} catch(e) {
+ assertUnreachable();
+}
+
+try {
+ eval("with({}) { eval('const dd = 23') }");
+} catch(e) {
+ assertInstanceof(e, TypeError);
}
-assertTrue(exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-117409.js b/deps/v8/test/mjsunit/regress/regress-117409.js
index 9222191ae6..98aab5ac2d 100644
--- a/deps/v8/test/mjsunit/regress/regress-117409.js
+++ b/deps/v8/test/mjsunit/regress/regress-117409.js
@@ -36,7 +36,7 @@ var literal = [1.2];
KeyedStoreIC(literal);
KeyedStoreIC(literal);
-// Trruncate array to 0 elements, at which point backing store will be replaced
+// Truncate array to 0 elements, at which point backing store will be replaced
// with empty fixed array.
literal.length = 0;
diff --git a/deps/v8/test/mjsunit/regress/regress-119609.js b/deps/v8/test/mjsunit/regress/regress-119609.js
new file mode 100644
index 0000000000..99041adaf4
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-119609.js
@@ -0,0 +1,71 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+Debug = debug.Debug;
+
+var exception = false;
+
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ function lookup(name) {
+ return exec_state.frame(0).evaluate(name).value();
+ }
+
+ assertEquals(3, lookup("e"));
+ assertEquals(4, lookup("f"));
+ assertEquals(1, lookup("a"));
+
+ try {
+ assertEquals(2, lookup("b"));
+ } catch (e) {
+ assertEquals("ReferenceError: b is not defined", e.toString());
+ }
+ }
+ } catch (e) {
+ exception = e.toString() + e.stack;
+ }
+}
+
+Debug.setListener(listener);
+
+function f(a, b) {
+ var c = 3;
+ function d(e, f) {
+ var g = a;
+ var h = c;
+ debugger;
+ }
+
+ return d;
+}
+
+f(1, 2)(3, 4);
+
+assertFalse(exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-1199637.js b/deps/v8/test/mjsunit/regress/regress-1199637.js
index 9c560a951a..8b02a6559c 100644
--- a/deps/v8/test/mjsunit/regress/regress-1199637.js
+++ b/deps/v8/test/mjsunit/regress/regress-1199637.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --es52_globals
// Make sure that we can introduce global variables (using
// both var and const) that shadow even READ_ONLY variables
@@ -74,5 +74,3 @@ assertEquals(5678, z);
assertEquals(1234, w);
eval("with({}) { const w = 5678; }");
assertEquals(5678, w);
-
-
diff --git a/deps/v8/test/mjsunit/regress/regress-120099.js b/deps/v8/test/mjsunit/regress/regress-120099.js
new file mode 100644
index 0000000000..3b06f4da2c
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-120099.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+'use strict';
+
+var a = Object.create(Object.prototype);
+var b = Object.create(Object.prototype);
+assertFalse(a === b);
+
+Object.defineProperty(a, 'x', { value: 1 });
+assertTrue(a.x === 1);
+assertTrue(b.x === undefined);
+
+b.x = 2;
+assertTrue(a.x === 1);
+assertTrue(b.x === 2);
diff --git a/deps/v8/test/mjsunit/regress/regress-1217.js b/deps/v8/test/mjsunit/regress/regress-1217.js
index 6530549864..e00d5371ad 100644
--- a/deps/v8/test/mjsunit/regress/regress-1217.js
+++ b/deps/v8/test/mjsunit/regress/regress-1217.js
@@ -30,7 +30,7 @@
var proto = RegExp.prototype;
assertEquals("[object RegExp]", Object.prototype.toString.call(proto));
-assertEquals("", proto.source);
+assertEquals("(?:)", proto.source);
assertEquals(false, proto.global);
assertEquals(false, proto.multiline);
assertEquals(false, proto.ignoreCase);
diff --git a/deps/v8/test/mjsunit/regress/regress-123512.js b/deps/v8/test/mjsunit/regress/regress-123512.js
new file mode 100644
index 0000000000..8a747bc5f7
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-123512.js
@@ -0,0 +1,78 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test that boilerplate objects for array literals with non-constant
+// elements (which will contain the hole at non-constant positions) will
+// not cause prototype chain lookups when generating optimized code.
+
+function f(x) {
+ return [x][0];
+}
+
+// Test data element on prototype.
+Object.prototype[0] = 23;
+assertSame(1, f(1));
+assertSame(2, f(2));
+%OptimizeFunctionOnNextCall(f);
+assertSame(3, f(3));
+%DeoptimizeFunction(f);
+
+// Test accessor element on prototype.
+Object.prototype.__defineGetter__(0, function() { throw Error(); });
+assertSame(4, f(4));
+assertSame(5, f(5));
+%OptimizeFunctionOnNextCall(f);
+assertSame(6, f(6));
+%DeoptimizeFunction(f);
+
+// Test the same on boilerplate objects for object literals that contain
+// both non-constant properties and non-constant elements.
+
+function g(x, y) {
+ var o = { foo:x, 0:y };
+ return o.foo + o[0];
+}
+
+// Test data property and element on prototype.
+Object.prototype[0] = 23;
+Object.prototype.foo = 42;
+assertSame(3, g(1, 2));
+assertSame(5, g(2, 3));
+%OptimizeFunctionOnNextCall(g);
+assertSame(7, g(3, 4));
+%DeoptimizeFunction(g);
+
+// Test accessor property and element on prototype.
+Object.prototype.__defineGetter__(0, function() { throw Error(); });
+Object.prototype.__defineGetter__('foo', function() { throw Error(); });
+assertSame(3, g(1, 2));
+assertSame(5, g(2, 3));
+%OptimizeFunctionOnNextCall(g);
+assertSame(7, g(3, 4));
+%DeoptimizeFunction(g);
diff --git a/deps/v8/test/mjsunit/regress/regress-123919.js b/deps/v8/test/mjsunit/regress/regress-123919.js
new file mode 100644
index 0000000000..be3460815b
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-123919.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --gc-global
+
+function g(max,val) {
+ this.x = 0;
+ for (var i = 0; i < max; i++) {
+ this.x = i/100;
+ }
+ this.val = val;
+}
+
+function f(max) {
+ var val = 0.5;
+ var obj = new g(max,val);
+ assertSame(val, obj.val);
+}
+
+f(1);
+f(1);
+%OptimizeFunctionOnNextCall(f);
+f(200000);
diff --git a/deps/v8/test/mjsunit/regress/regress-126412.js b/deps/v8/test/mjsunit/regress/regress-126412.js
new file mode 100644
index 0000000000..0677f70913
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-126412.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"".match(/(A{9999999999}B|C*)*D/);
+"C".match(/(A{9999999999}B|C*)*D/);
+"".match(/(A{9999999999}B|C*)*/ );
+"C".match(/(A{9999999999}B|C*)*/ );
+"".match(/(9u|(2\`shj{2147483649,}\r|3|f|y|3*)+8\B)\W93+/);
+"9u8 ".match(/(9u|(2\`shj{2147483649,}\r|3|f|y|3*)+8\B)\W93+/);
diff --git a/deps/v8/test/mjsunit/regress/regress-128146.js b/deps/v8/test/mjsunit/regress/regress-128146.js
new file mode 100644
index 0000000000..730dd91065
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-128146.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Define accessor properties, resulting in an AccessorPair with 2 transitions.
+Object.defineProperty({},"foo",{set:function(){},configurable:false});
+Object.defineProperty({},"foo",{get:function(){},configurable:false});
+
+// Define a data property under the same name.
+Object.defineProperty({},"foo",{});
diff --git a/deps/v8/test/mjsunit/regress/regress-131923.js b/deps/v8/test/mjsunit/regress/regress-131923.js
new file mode 100644
index 0000000000..58da07cb91
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-131923.js
@@ -0,0 +1,30 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+assertFalse(/\u9999{4}/.test(""));
+assertTrue(/\u9999{0,4}/.test(""));
+assertFalse(/\u9999{4,}/.test(""));
diff --git a/deps/v8/test/mjsunit/regress/regress-1639-2.js b/deps/v8/test/mjsunit/regress/regress-1639-2.js
index c439dd8fff..01f0dc2048 100644
--- a/deps/v8/test/mjsunit/regress/regress-1639-2.js
+++ b/deps/v8/test/mjsunit/regress/regress-1639-2.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,6 +28,7 @@
// Flags: --expose-debug-as debug
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
+var exception = false;
function sendCommand(state, cmd) {
// Get the debug command processor in paused state.
@@ -79,6 +80,7 @@ function listener(event, exec_state, event_data, data) {
}
} catch (e) {
print(e);
+ exception = true;
}
}
@@ -91,3 +93,4 @@ function a() {
// Set a break point and call to invoke the debug event listener.
Debug.setBreakPoint(a, 0, 0);
a();
+assertFalse(exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-1639.js b/deps/v8/test/mjsunit/regress/regress-1639.js
index ed68c97df8..47cdbc43c1 100644
--- a/deps/v8/test/mjsunit/regress/regress-1639.js
+++ b/deps/v8/test/mjsunit/regress/regress-1639.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,7 @@
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
var breaks = 0;
+var exception = false;
function sendCommand(state, cmd) {
// Get the debug command processor in paused state.
@@ -47,15 +48,18 @@ function listener(event, exec_state, event_data, data) {
"should not break on unexpected lines")
assertEquals('BREAK ' + breaks, line.substr(-7));
breaks++;
- sendCommand(exec_state, {
- seq: 0,
- type: "request",
- command: "continue",
- arguments: { stepaction: "next" }
- });
+ if (breaks < 4) {
+ sendCommand(exec_state, {
+ seq: 0,
+ type: "request",
+ command: "continue",
+ arguments: { stepaction: "next" }
+ });
+ }
}
} catch (e) {
print(e);
+ exception = true;
}
}
@@ -82,4 +86,6 @@ function c() {
// Set a break point and call to invoke the debug event listener.
Debug.setBreakPoint(b, 0, 0);
a(b);
-// BREAK 3
+a(); // BREAK 3
+
+assertFalse(exception);
diff --git a/deps/v8/test/mjsunit/regress/regress-1849.js b/deps/v8/test/mjsunit/regress/regress-1849.js
index 176f918b93..5b8fc50f31 100644
--- a/deps/v8/test/mjsunit/regress/regress-1849.js
+++ b/deps/v8/test/mjsunit/regress/regress-1849.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// See: http://code.google.com/p/v8/issues/detail?id=1878
+// See: http://code.google.com/p/v8/issues/detail?id=1849
// Flags: --allow-natives-syntax
@@ -36,4 +36,4 @@ for (var i = 0; i < count; i++) {
arr[i] = 0;
}
assertFalse(%HasFastDoubleElements(arr));
-assertTrue(%HasFastSmiOnlyElements(arr));
+assertTrue(%HasFastSmiElements(arr));
diff --git a/deps/v8/test/mjsunit/regress/regress-1878.js b/deps/v8/test/mjsunit/regress/regress-1878.js
index a1648b1217..fbc47bdd14 100644
--- a/deps/v8/test/mjsunit/regress/regress-1878.js
+++ b/deps/v8/test/mjsunit/regress/regress-1878.js
@@ -34,11 +34,11 @@ var a = Array();
for (var i = 0; i < 1000; i++) {
var ai = natives.InternalArray(10000);
assertFalse(%HaveSameMap(ai, a));
- assertTrue(%HasFastElements(ai));
+ assertTrue(%HasFastObjectElements(ai));
}
for (var i = 0; i < 1000; i++) {
var ai = new natives.InternalArray(10000);
assertFalse(%HaveSameMap(ai, a));
- assertTrue(%HasFastElements(ai));
+ assertTrue(%HasFastObjectElements(ai));
}
diff --git a/deps/v8/test/mjsunit/regress/regress-2030.js b/deps/v8/test/mjsunit/regress/regress-2030.js
new file mode 100644
index 0000000000..fb5a3d0c46
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2030.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function a() {
+ this.x = 1;
+}
+var aa = new a();
+%DebugPrint(aa);
+
+function b() {
+ this.z = 23;
+ this.x = 2;
+}
+var bb = new b();
+%DebugPrint(bb);
+
+function f(o) {
+ return o.x;
+}
+
+assertSame(1, f(aa));
+assertSame(1, f(aa));
+assertSame(2, f(bb));
+assertSame(2, f(bb));
+%OptimizeFunctionOnNextCall(f);
+assertSame(1, f(aa));
+assertSame(2, f(bb));
diff --git a/deps/v8/test/mjsunit/regress/regress-2032.js b/deps/v8/test/mjsunit/regress/regress-2032.js
new file mode 100644
index 0000000000..ad6408d3d6
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2032.js
@@ -0,0 +1,64 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=2032
+
+// Case independent regexp that ends on the first character in a block.
+assertTrue(/[@-A]/i.test("a"));
+assertTrue(/[@-A]/i.test("A"));
+assertTrue(/[@-A]/i.test("@"));
+
+assertFalse(/[@-A]/.test("a"));
+assertTrue(/[@-A]/.test("A"));
+assertTrue(/[@-A]/.test("@"));
+
+assertFalse(/[¿-À]/i.test('¾'));
+assertTrue(/[¿-À]/i.test('¿'));
+assertTrue(/[¿-À]/i.test('À'));
+assertTrue(/[¿-À]/i.test('à'));
+assertFalse(/[¿-À]/i.test('á'));
+assertFalse(/[¿-À]/i.test('Á'));
+
+assertFalse(/[¿-À]/.test('¾'));
+assertTrue(/[¿-À]/.test('¿'));
+assertTrue(/[¿-À]/.test('À'));
+assertFalse(/[¿-À]/.test('à'));
+assertFalse(/[¿-À]/.test('á'));
+assertFalse(/[¿-À]/.test('á'));
+assertFalse(/[¿-À]/i.test('Á'));
+
+assertFalse(/[Ö-×]/i.test('Õ'));
+assertTrue(/[Ö-×]/i.test('Ö'));
+assertTrue(/[Ö-×]/i.test('ö'));
+assertTrue(/[Ö-×]/i.test('×'));
+assertFalse(/[Ö-×]/i.test('Ø'));
+
+assertFalse(/[Ö-×]/.test('Õ'));
+assertTrue(/[Ö-×]/.test('Ö'));
+assertFalse(/[Ö-×]/.test('ö'));
+assertTrue(/[Ö-×]/.test('×'));
+assertFalse(/[Ö-×]/.test('Ø'));
diff --git a/deps/v8/test/mjsunit/regress/regress-2034.js b/deps/v8/test/mjsunit/regress/regress-2034.js
new file mode 100644
index 0000000000..c510f97fc3
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2034.js
@@ -0,0 +1,46 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-collections
+
+var key = {};
+var map = new WeakMap;
+Object.preventExtensions(key);
+
+// Try querying using frozen key.
+assertFalse(map.has(key));
+assertSame(undefined, map.get(key));
+
+// Try adding using frozen key.
+map.set(key, 1);
+assertTrue(map.has(key));
+assertSame(1, map.get(key));
+
+// Try deleting using frozen key.
+map.delete(key, 1);
+assertFalse(map.has(key));
+assertSame(undefined, map.get(key));
diff --git a/deps/v8/test/mjsunit/regress/regress-2054.js b/deps/v8/test/mjsunit/regress/regress-2054.js
new file mode 100644
index 0000000000..97b989c944
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2054.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we can correctly optimize top level code that contains a
+// throw (or return) as it's last statement.
+
+var N = 1e5; // Number of iterations that trigger optimization.
+for (var i = 0; i < N; i++) {
+ if (i > N) throw new Error;
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-2055.js b/deps/v8/test/mjsunit/regress/regress-2055.js
new file mode 100644
index 0000000000..1eaf62c7da
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2055.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that array literal boilerplate objects can be transitioned while
+// existing un-transitioned clones are still being populated.
+
+function test1(depth) {
+ if (--depth < 0) {
+ return [];
+ } else {
+ return [ 0, test1(depth) ];
+ }
+}
+assertEquals([0,[0,[]]], test1(2));
+
+function test2(depth) {
+ if (--depth < 0) {
+ return [];
+ } else {
+ var o = [ 0, test2(depth) ];
+ return (depth == 0) ? 0.5 : o;
+ }
+}
+assertEquals([0,0.5], test2(2));
diff --git a/deps/v8/test/mjsunit/regress/regress-2058.js b/deps/v8/test/mjsunit/regress/regress-2058.js
new file mode 100644
index 0000000000..9a69ea1621
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2058.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// See http://code.google.com/p/v8/issues/detail?id=2058
+
+// A match after a replace with a function argument needs to reset
+// the flag that determines whether we are using indices or substrings
+// to indicate the last match.
+"Now is the".replace(/Now (\w+) the/g, function() {
+ "foo bar".match(/( )/);
+ assertEquals(RegExp.$1, " ");
+})
diff --git a/deps/v8/test/mjsunit/regress/regress-2110.js b/deps/v8/test/mjsunit/regress/regress-2110.js
new file mode 100644
index 0000000000..d7f78d26a7
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2110.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var uint8 = new Uint8Array(1);
+
+function test() {
+ uint8[0] = 0x800000aa;
+ assertEquals(0xaa, uint8[0]);
+}
+
+test();
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+test();
+
+var uint32 = new Uint32Array(1);
+
+function test2() {
+ uint32[0] = 0x80123456789abcde;
+ assertEquals(0x789ac000, uint32[0]);
+}
+
+test2();
+test2();
+%OptimizeFunctionOnNextCall(test2);
+test2();
diff --git a/deps/v8/test/mjsunit/regress/regress-2153.js b/deps/v8/test/mjsunit/regress/regress-2153.js
new file mode 100644
index 0000000000..3170042bed
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2153.js
@@ -0,0 +1,32 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var o = {};
+o.__defineGetter__('foo', function () { return null; });
+var o = {};
+o.foo = 42;
+assertEquals(42, o.foo);
diff --git a/deps/v8/test/mjsunit/regress/regress-2163.js b/deps/v8/test/mjsunit/regress/regress-2163.js
new file mode 100644
index 0000000000..bfce9ff462
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2163.js
@@ -0,0 +1,70 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Handy abbreviation.
+var dp = Object.defineProperty;
+
+function getter() { return 111; }
+function setter(x) { print(222); }
+function anotherGetter() { return 333; }
+function anotherSetter(x) { print(444); }
+var obj1, obj2;
+
+// obj1 and obj2 share the getter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj2 = {}
+dp(obj2, "alpha", { get: getter });
+obj1 = {};
+assertEquals(111, obj2.alpha);
+gc();
+assertEquals(111, obj2.alpha);
+
+// obj1, obj2, and obj3 share the getter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj2 = {}
+dp(obj2, "alpha", { get: getter });
+obj1 = {};
+gc();
+obj3 = {}
+dp(obj3, "alpha", { get: getter });
+
+
+// obj1 and obj2 share the getter and setter accessor.
+obj1 = {};
+dp(obj1, "alpha", { get: getter, set: setter });
+obj1.beta = 10;
+obj2 = {}
+dp(obj2, "alpha", { get: getter, set: setter });
+obj1 = {};
+assertEquals(111, obj2.alpha);
+gc();
+obj2.alpha = 100
+assertEquals(111, obj2.alpha);
diff --git a/deps/v8/test/mjsunit/regress/regress-2170.js b/deps/v8/test/mjsunit/regress/regress-2170.js
new file mode 100644
index 0000000000..01cb1eaf8f
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-2170.js
@@ -0,0 +1,58 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function array_fun() {
+ for (var i = 0; i < 2; i++) {
+ var a = [1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8];
+ var x = new Array();
+ x.fixed$length = true;
+ for (var j = 0; j < a.length; j++) {
+ x.push(a[j]);
+ }
+ for(var j = 0; j < x.length; j++) {
+ if (typeof x[j] != 'number') {
+ throw "foo";
+ }
+ x[j] = x[j];
+ }
+ }
+}
+
+try {
+ for (var i = 0; i < 10; ++i) {
+ array_fun();
+ }
+ %OptimizeFunctionOnNextCall(array_fun);
+ for (var i = 0; i < 10; ++i) {
+ array_fun();
+ }
+} catch (e) {
+ assertUnreachable();
+}
+
diff --git a/deps/v8/test/mjsunit/regress/regress-334.js b/deps/v8/test/mjsunit/regress/regress-334.js
index 024fc9e856..37dd299cf5 100644
--- a/deps/v8/test/mjsunit/regress/regress-334.js
+++ b/deps/v8/test/mjsunit/regress/regress-334.js
@@ -40,7 +40,7 @@ var object = {__proto__:{}};
%SetProperty(object, "foo", func1, DONT_ENUM | DONT_DELETE);
%SetProperty(object, "bar", func1, DONT_ENUM | READ_ONLY);
%SetProperty(object, "baz", func1, DONT_DELETE | READ_ONLY);
-%SetProperty(object.__proto__, "bif", func1, DONT_ENUM | DONT_DELETE | READ_ONLY);
+%SetProperty(object.__proto__, "bif", func1, DONT_ENUM | DONT_DELETE);
object.bif = func2;
function enumerable(obj) {
diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-122271.js b/deps/v8/test/mjsunit/regress/regress-crbug-122271.js
index 3a99a7fa58..8ae91e857a 100644
--- a/deps/v8/test/mjsunit/regress/regress-crbug-122271.js
+++ b/deps/v8/test/mjsunit/regress/regress-crbug-122271.js
@@ -39,11 +39,11 @@ function foo(array) {
array.foo = "bar";
}
-assertTrue(%HasFastSmiOnlyElements(a));
-assertTrue(%HasFastElements(b));
+assertTrue(%HasFastSmiElements(a));
+assertTrue(%HasFastObjectElements(b));
foo(a);
foo(b);
-assertTrue(%HasFastSmiOnlyElements(a));
-assertTrue(%HasFastElements(b));
+assertTrue(%HasFastSmiElements(a));
+assertTrue(%HasFastObjectElements(b));
diff --git a/deps/v8/test/mjsunit/regress/regress-deep-proto.js b/deps/v8/test/mjsunit/regress/regress-deep-proto.js
new file mode 100644
index 0000000000..5d2758cdd6
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-deep-proto.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+function poly(x) {
+ return x.foo;
+}
+
+var one = {foo: 0};
+var two = {foo: 0, bar: 1};
+var three = {bar: 0};
+three.__proto__ = {};
+three.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__.__proto__ = {};
+three.__proto__.__proto__.__proto__.__proto__.__proto__ = {};
+
+for (var i = 0; i < 1e6; i++) {
+ poly(one);
+ poly(two);
+ poly(three);
+}
diff --git a/deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js b/deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js
new file mode 100644
index 0000000000..72110f5be2
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-fast-literal-transition.js
@@ -0,0 +1,62 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --always-opt --expose-gc
+
+// Test that the elements kind of the boilerplate object is sufficiently
+// checked in LFastLiteral, so that unoptimized code can transition the
+// boilerplate. The --always-opt flag makes sure that optimized code is
+// not thrown away at deoptimization.
+
+// The switch statement in f() makes sure that f() is not inlined. If we
+// start inlining switch statements, we will still catch the bug on the
+// final --stress-opt run.
+
+function f(x) {
+ switch(x) {
+ case 1: return 1.4;
+ case 2: return 1.5;
+ case 3: return {};
+ default: gc();
+ }
+}
+
+function g(x) {
+ return [1.1, 1.2, 1.3, f(x)];
+}
+
+// Step 1: Optimize g() to contain a FAST_DOUBLE_ELEMENTS boilerplate.
+assertEquals([1.1, 1.2, 1.3, 1.4], g(1));
+assertEquals([1.1, 1.2, 1.3, 1.5], g(2));
+%OptimizeFunctionOnNextCall(g);
+
+// Step 2: Deoptimize g() and transition to FAST_ELEMENTS boilerplate.
+assertEquals([1.1, 1.2, 1.3, {}], g(3));
+
+// Step 3: Cause a GC while broken clone of boilerplate is on the heap,
+// hence causing heap verification to catch it.
+assertEquals([1.1, 1.2, 1.3, undefined], g(4));
diff --git a/deps/v8/test/mjsunit/regress/regress-iteration-order.js b/deps/v8/test/mjsunit/regress/regress-iteration-order.js
new file mode 100644
index 0000000000..76f5c3ffca
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-iteration-order.js
@@ -0,0 +1,42 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var x = {a: 1, b: 2, c: 3};
+
+x.__proto__ = {};
+
+delete x.b;
+
+x.d = 4;
+
+s = "";
+
+for (key in x) {
+ s += x[key];
+}
+
+assertEquals("134", s);
diff --git a/deps/v8/test/mjsunit/regress/regress-smi-only-concat.js b/deps/v8/test/mjsunit/regress/regress-smi-only-concat.js
index a9a6d89b06..55ca2996ff 100644
--- a/deps/v8/test/mjsunit/regress/regress-smi-only-concat.js
+++ b/deps/v8/test/mjsunit/regress/regress-smi-only-concat.js
@@ -33,5 +33,5 @@
var fast_array = ['a', 'b'];
var array = fast_array.concat(fast_array);
-assertTrue(%HasFastElements(fast_array));
-assertTrue(%HasFastElements(array)); \ No newline at end of file
+assertTrue(%HasFastObjectElements(fast_array));
+assertTrue(%HasFastObjectElements(array));
diff --git a/deps/v8/test/mjsunit/regress/regress-transcendental.js b/deps/v8/test/mjsunit/regress/regress-transcendental.js
new file mode 100644
index 0000000000..b5dbcb48af
--- /dev/null
+++ b/deps/v8/test/mjsunit/regress/regress-transcendental.js
@@ -0,0 +1,49 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc
+
+// Test whether the runtime implementation and generated code of
+// sine and tangens return the same results.
+
+function test(f, x, name) {
+ // Reset transcendental cache.
+ gc();
+ // Initializing cache leads to a runtime call.
+ var runtime_result = f(x);
+ // Flush transcendental cache entries and optimize f.
+ for (var i = 0; i < 100000; i++) f(i);
+ // Calculate using generated code.
+ var gencode_result = f(x);
+ print(name + " runtime function: " + runtime_result);
+ print(name + " generated code : " + gencode_result);
+ assertEquals(gencode_result, runtime_result);
+}
+
+test(Math.tan, -1.57079632679489660000, "Math.tan");
+test(Math.sin, 6.283185307179586, "Math.sin");
+
diff --git a/deps/v8/test/mjsunit/stack-traces.js b/deps/v8/test/mjsunit/stack-traces.js
index 536e71bbb5..438eec979d 100644
--- a/deps/v8/test/mjsunit/stack-traces.js
+++ b/deps/v8/test/mjsunit/stack-traces.js
@@ -111,6 +111,18 @@ function testStrippedCustomError() {
throw new CustomError("hep-hey", CustomError);
}
+MyObj = function() { FAIL; }
+
+MyObjCreator = function() {}
+
+MyObjCreator.prototype.Create = function() {
+ return new MyObj();
+}
+
+function testClassNames() {
+ (new MyObjCreator).Create();
+}
+
// Utility function for testing that the expected strings occur
// in the stack trace produced when running the given function.
function testTrace(name, fun, expected, unexpected) {
@@ -254,6 +266,8 @@ testTrace("testDefaultCustomError", testDefaultCustomError,
["collectStackTrace"]);
testTrace("testStrippedCustomError", testStrippedCustomError, ["hep-hey"],
["new CustomError", "collectStackTrace"]);
+testTrace("testClassNames", testClassNames,
+ ["new MyObj", "MyObjCreator.Create"], ["as Create"]);
testCallerCensorship();
testUnintendedCallerCensorship();
testErrorsDuringFormatting();
diff --git a/deps/v8/test/mjsunit/try-finally-continue.js b/deps/v8/test/mjsunit/try-finally-continue.js
new file mode 100644
index 0000000000..b55e7acc78
--- /dev/null
+++ b/deps/v8/test/mjsunit/try-finally-continue.js
@@ -0,0 +1,72 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we correctly restore the stack when continuing from a
+// finally block inside a for-in.
+
+var f = 0;
+var a = [1, 2, 3];
+
+for (x in a) {
+ try{
+ throw 'error';
+ } finally {
+ f++;
+ continue;
+ }
+}
+assertEquals(3, f);
+
+f = 0;
+for (x in a) {
+ try {
+ f++;
+ } finally {
+ f++;
+ continue;
+ }
+}
+assertEquals(6, f);
+
+f = 0;
+for (x in a) {
+ try {
+ f++;
+ } finally {
+ try {
+ throw 'error'
+ } finally {
+ try {
+ f++;
+ } finally {
+ f++;
+ continue;
+ }
+ }
+ }
+}
+assertEquals(9, f); \ No newline at end of file
diff --git a/deps/v8/test/mjsunit/unbox-double-arrays.js b/deps/v8/test/mjsunit/unbox-double-arrays.js
index fd7db28a0d..ac039930c3 100644
--- a/deps/v8/test/mjsunit/unbox-double-arrays.js
+++ b/deps/v8/test/mjsunit/unbox-double-arrays.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -278,7 +278,8 @@ function testOneArrayType(allocator) {
expected_array_value(7));
%DeoptimizeFunction(test_various_loads6);
- gc();
+ %ClearFunctionTypeFeedback(test_various_stores);
+ %ClearFunctionTypeFeedback(test_various_loads7);
// Test stores for non-NaN.
var large_array = new allocator(large_array_size);
@@ -376,7 +377,7 @@ delete large_array2[5];
// Convert back to fast elements and make sure the contents of the array are
// unchanged.
large_array2[25] = new Object();
-assertTrue(%HasFastElements(large_array2));
+assertTrue(%HasFastObjectElements(large_array2));
for (var i= 0; i < approx_dict_to_elements_threshold; i += 500 ) {
if (i != 25 && i != 5) {
assertEquals(expected_array_value(i), large_array2[i]);
diff --git a/deps/v8/test/mjsunit/unicodelctest-no-optimization.js b/deps/v8/test/mjsunit/unicodelctest-no-optimization.js
new file mode 100644
index 0000000000..3bcb5bf256
--- /dev/null
+++ b/deps/v8/test/mjsunit/unicodelctest-no-optimization.js
@@ -0,0 +1,4914 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --noregexp-optimization
+
+// This regexp should pick up all lower case characters. The non-BMP
+// characters are coded using explicit surrogate pairs.
+var re = /^([a-zªµºß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣա-ևᴀ-ᴫᵢ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷⁱⁿℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱼⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⴀ-ⴥꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌff-stﬓ-ﬗa-z]|\ud801[\udc28-\udc4f]|\ud835[\udc1a-\udc33\udc4e-\udc54\udc56-\udc67\udc82-\udc9b\udcb6-\udcb9\udcbb\udcbd-\udcc3\udcc5-\udccf\udcea-\udd03\udd1e-\udd37\udd52-\udd6b\udd86-\udd9f\uddba-\uddd3\uddee-\ude07\ude22-\ude3b\ude56-\ude6f\ude8a-\udea5\udec2-\udeda\udedc-\udee1\udefc-\udf14\udf16-\udf1b\udf36-\udf4e\udf50-\udf55\udf70-\udf88\udf8a-\udf8f\udfaa-\udfc2\udfc4-\udfc9\udfcb])$/;
+
+
+var answer = get_answer();
+var fuzz_answer = get_fuzz_answer();
+
+
+for (var i = 0; i < 0x10000; i++) {
+ var s = String.fromCharCode(i);
+ assertTrue(!!re.test(s) == !!answer[i]);
+}
+
+
+function BuildSurrogatePair(c) {
+ return String.fromCharCode(+0xd800 + (c >> 10)) +
+ String.fromCharCode(+0xdc00 + (c & 0x3ff));
+}
+
+fuzz_index = 0;
+fuzz();
+
+for (var i = 0x10000; i < 0x110000 && i < answer.length + 256; i++) {
+ var c = i - 0x10000;
+ assertTrue(!!re.test(BuildSurrogatePair(c)) == !!answer[i]);
+}
+
+var seed = 49734321;
+
+function rand() {
+ // To make the test results predictable, we use a 100% deterministic
+ // alternative.
+ // Robert Jenkins' 32 bit integer hash function.
+ seed = ((seed + 0x7ed55d16) + (seed << 12)) & 0xffffffff;
+ seed = ((seed ^ 0xc761c23c) ^ (seed >>> 19)) & 0xffffffff;
+ seed = ((seed + 0x165667b1) + (seed << 5)) & 0xffffffff;
+ seed = ((seed + 0xd3a2646c) ^ (seed << 9)) & 0xffffffff;
+ seed = ((seed + 0xfd7046c5) + (seed << 3)) & 0xffffffff;
+ seed = ((seed ^ 0xb55a4f09) ^ (seed >>> 16)) & 0xffffffff;
+ return (seed & 0xffff)
+}
+
+
+// Random character.
+function rc(last) {
+ var c = rand();
+ // Increase the concentration of problematic values around the page
+ // edges.
+ if (rand() & 1) {
+ c = (c & 0xff80) + (c & 3) - 2;
+ }
+ // Increase the concentration of problematic values around the ends.
+ if (rand() & 31 == 0) c = 0xfff8 + (rand() & 7)
+ if (rand() & 31 == 0) c = (rand() & 7)
+
+ // Increase the concentration of values near each other.
+ if (rand() & 1) c = last + (rand() & 15) - 8;
+ return c & 0xffff; // Only code unit values.
+}
+
+
+function fuzz() {
+ fuzz_index = 0;
+ seed = 49734321;
+ for (var i = 0; i < 1000; i++) {
+ print(i);
+ var len = rand() & 0x1f;
+ var ranges = new Array(len);
+ var last = rand();
+ for (var j = 0; j < len; j++) {
+ ranges.push(last);
+ last = rc(last);
+ }
+ ranges.sort(function (a, b) { return a - b });
+ var cc = "";
+ for (var j = 0; j < len; j++) {
+ var ch = String.fromCharCode(ranges[j]);
+ if (ch == '\\' || ch == ']') ch = '\\' + ch;
+ cc += ch;
+ if (j < len - 1 && rand() & 1) cc += '-';
+ }
+ var negated = (last & 2) != 0;
+ var prefix = negated ? "[^" : "[";
+ var re = new RegExp(prefix + cc + "]");
+ for (var j = 0; j < len; j++) {
+ retest(re, (ranges[j] - 1), negated);
+ retest(re, (ranges[j]), negated);
+ retest(re, (ranges[j] + 1), negated);
+ }
+ }
+}
+
+
+function retest(re, code, negated) {
+ var s = String.fromCharCode(code >>> 0);
+ assertTrue(negated != (!!re.test(s) == !!fuzz_answer[fuzz_index++]));
+}
+
+
+function get_fuzz_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+
+
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+];
+}
+
+
+function get_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , , , , , , , , ,1, , , , ,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,
+ 1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , ,1, ,1, ,1,1,
+ 1, , ,1, ,1, , ,1, , , ,1,1, , , , ,1, , ,1, , , ,1,1,1, , ,1, ,
+ ,1, ,1, ,1, , ,1, ,1,1, ,1, , ,1, , , ,1, ,1, , ,1,1, , ,1,1,1,
+ , , , , , ,1, , ,1, , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, , ,1, ,1, , , ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1, , ,1, , ,1,
+ 1, ,1, , , , ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, ,1, , , ,1, , , ,1,1,1, , ,
+ , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1, , , ,1,1,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1, ,1, , ,1, , ,1,1, , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, , , , , , , , , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1, , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1, , , , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1, ,1,1, , , , , , ,1, ,
+ , ,1,1,1, ,1,1, , , , , , , , ,1,1,1,1, , ,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , ,1,1,1, ,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , ,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , ,1,1, , , ,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , ,1, , , , ,1, , , , ,1, , ,1,1, , ,
+ , , , , , ,1,1,1,1, , , , ,1, , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,
+ ,1, , , ,1,1, ,1, ,1, ,1, , , , ,1, ,1,1, ,1,1,1,1,1,1,1, , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1, ,1, ,1, , ,1,
+ ,1, ,1, ,1, ,1, , , , ,1, , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1, , , , , , , , , , , , ,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1, ,1, ,1,1,1,
+ 1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,
+ 1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1, ,1,1,1,1,1,1, ,1];
+}
diff --git a/deps/v8/test/mjsunit/unicodelctest.js b/deps/v8/test/mjsunit/unicodelctest.js
new file mode 100644
index 0000000000..2caaabdcbe
--- /dev/null
+++ b/deps/v8/test/mjsunit/unicodelctest.js
@@ -0,0 +1,4912 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This regexp should pick up all lower case characters. The non-BMP
+// characters are coded using explicit surrogate pairs.
+var re = /^([a-zªµºß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣա-ևᴀ-ᴫᵢ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷⁱⁿℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱼⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⴀ-ⴥꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌff-stﬓ-ﬗa-z]|\ud801[\udc28-\udc4f]|\ud835[\udc1a-\udc33\udc4e-\udc54\udc56-\udc67\udc82-\udc9b\udcb6-\udcb9\udcbb\udcbd-\udcc3\udcc5-\udccf\udcea-\udd03\udd1e-\udd37\udd52-\udd6b\udd86-\udd9f\uddba-\uddd3\uddee-\ude07\ude22-\ude3b\ude56-\ude6f\ude8a-\udea5\udec2-\udeda\udedc-\udee1\udefc-\udf14\udf16-\udf1b\udf36-\udf4e\udf50-\udf55\udf70-\udf88\udf8a-\udf8f\udfaa-\udfc2\udfc4-\udfc9\udfcb])$/;
+
+
+var answer = get_answer();
+var fuzz_answer = get_fuzz_answer();
+
+
+for (var i = 0; i < 0x10000; i++) {
+ var s = String.fromCharCode(i);
+ assertTrue(!!re.test(s) == !!answer[i]);
+}
+
+
+function BuildSurrogatePair(c) {
+ return String.fromCharCode(+0xd800 + (c >> 10)) +
+ String.fromCharCode(+0xdc00 + (c & 0x3ff));
+}
+
+fuzz_index = 0;
+fuzz();
+
+for (var i = 0x10000; i < 0x110000 && i < answer.length + 256; i++) {
+ var c = i - 0x10000;
+ assertTrue(!!re.test(BuildSurrogatePair(c)) == !!answer[i]);
+}
+
+var seed = 49734321;
+
+function rand() {
+ // To make the test results predictable, we use a 100% deterministic
+ // alternative.
+ // Robert Jenkins' 32 bit integer hash function.
+ seed = ((seed + 0x7ed55d16) + (seed << 12)) & 0xffffffff;
+ seed = ((seed ^ 0xc761c23c) ^ (seed >>> 19)) & 0xffffffff;
+ seed = ((seed + 0x165667b1) + (seed << 5)) & 0xffffffff;
+ seed = ((seed + 0xd3a2646c) ^ (seed << 9)) & 0xffffffff;
+ seed = ((seed + 0xfd7046c5) + (seed << 3)) & 0xffffffff;
+ seed = ((seed ^ 0xb55a4f09) ^ (seed >>> 16)) & 0xffffffff;
+ return (seed & 0xffff)
+}
+
+
+// Random character.
+function rc(last) {
+ var c = rand();
+ // Increase the concentration of problematic values around the page
+ // edges.
+ if (rand() & 1) {
+ c = (c & 0xff80) + (c & 3) - 2;
+ }
+ // Increase the concentration of problematic values around the ends.
+ if (rand() & 31 == 0) c = 0xfff8 + (rand() & 7)
+ if (rand() & 31 == 0) c = (rand() & 7)
+
+ // Increase the concentration of values near each other.
+ if (rand() & 1) c = last + (rand() & 15) - 8;
+ return c & 0xffff; // Only code unit values.
+}
+
+
+function fuzz() {
+ fuzz_index = 0;
+ seed = 49734321;
+ for (var i = 0; i < 1000; i++) {
+ var len = rand() & 0x1f;
+ var ranges = new Array(len);
+ var last = rand();
+ for (var j = 0; j < len; j++) {
+ ranges.push(last);
+ last = rc(last);
+ }
+ ranges.sort(function (a, b) { return a - b });
+ var cc = "";
+ for (var j = 0; j < len; j++) {
+ var ch = String.fromCharCode(ranges[j]);
+ if (ch == '\\' || ch == ']') ch = '\\' + ch;
+ cc += ch;
+ if (j < len - 1 && rand() & 1) cc += '-';
+ }
+ var negated = (last & 2) != 0;
+ var prefix = negated ? "[^" : "[";
+ var re = new RegExp(prefix + cc + "]");
+ for (var j = 0; j < len; j++) {
+ retest(re, (ranges[j] - 1), negated);
+ retest(re, (ranges[j]), negated);
+ retest(re, (ranges[j] + 1), negated);
+ }
+ }
+}
+
+
+function retest(re, code, negated) {
+ var s = String.fromCharCode(code >>> 0);
+ assertTrue(negated != (!!re.test(s) == !!fuzz_answer[fuzz_index++]));
+}
+
+
+function get_fuzz_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+
+
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+];
+}
+
+
+function get_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , , , , , , , , ,1, , , , ,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,
+ 1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , ,1, ,1, ,1,1,
+ 1, , ,1, ,1, , ,1, , , ,1,1, , , , ,1, , ,1, , , ,1,1,1, , ,1, ,
+ ,1, ,1, ,1, , ,1, ,1,1, ,1, , ,1, , , ,1, ,1, , ,1,1, , ,1,1,1,
+ , , , , , ,1, , ,1, , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, , ,1, ,1, , , ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1, , ,1, , ,1,
+ 1, ,1, , , , ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, ,1, , , ,1, , , ,1,1,1, , ,
+ , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1, , , ,1,1,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1, ,1, , ,1, , ,1,1, , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, , , , , , , , , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1, , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1, , , , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1, ,1,1, , , , , , ,1, ,
+ , ,1,1,1, ,1,1, , , , , , , , ,1,1,1,1, , ,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , ,1,1,1, ,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , ,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , ,1,1, , , ,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , ,1, , , , ,1, , , , ,1, , ,1,1, , ,
+ , , , , , ,1,1,1,1, , , , ,1, , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,
+ ,1, , , ,1,1, ,1, ,1, ,1, , , , ,1, ,1,1, ,1,1,1,1,1,1,1, , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1, ,1, ,1, , ,1,
+ ,1, ,1, ,1, ,1, , , , ,1, , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1, , , , , , , , , , , , ,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1, ,1, ,1,1,1,
+ 1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,
+ 1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1, ,1,1,1,1,1,1, ,1];
+}
diff --git a/deps/v8/test/mjsunit/with-readonly.js b/deps/v8/test/mjsunit/with-readonly.js
index e29520a4dc..29982b3474 100644
--- a/deps/v8/test/mjsunit/with-readonly.js
+++ b/deps/v8/test/mjsunit/with-readonly.js
@@ -27,6 +27,8 @@
// Test that readonly variables are treated correctly.
+// Flags: --es5_readonly
+
// Create an object with a read-only length property in the prototype
// chain by putting the string split function in the prototype chain.
var o = {};
@@ -36,8 +38,8 @@ function f() {
with (o) {
length = 23;
length = 24;
- assertEquals(24, length);
+ assertEquals(2, length);
}
+ assertEquals(2, o.length);
}
f();
-
diff --git a/deps/v8/test/mozilla/mozilla.status b/deps/v8/test/mozilla/mozilla.status
index e64959acfc..87d7bd2908 100644
--- a/deps/v8/test/mozilla/mozilla.status
+++ b/deps/v8/test/mozilla/mozilla.status
@@ -126,13 +126,13 @@ ecma/Date/15.9.2.2-5: PASS || FAIL
ecma/Date/15.9.2.2-6: PASS || FAIL
# 1026139: These date tests fail on arm and mips
-ecma/Date/15.9.5.29-1: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.34-1: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.28-1: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Date/15.9.5.29-1: PASS || FAIL if ($arch == arm || $arch == mips)
+ecma/Date/15.9.5.34-1: PASS || FAIL if ($arch == arm || $arch == mips)
+ecma/Date/15.9.5.28-1: PASS || FAIL if ($arch == arm || $arch == mips)
# 1050186: Arm/MIPS vm is broken; probably unrelated to dates
-ecma/Array/15.4.4.5-3: PASS || (($ARM || $MIPS) && FAIL)
-ecma/Date/15.9.5.22-2: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Array/15.4.4.5-3: PASS || FAIL if ($arch == arm || $arch == mips)
+ecma/Date/15.9.5.22-2: PASS || FAIL if ($arch == arm || $arch == mips)
# Flaky test that fails due to what appears to be a bug in the test.
# Occurs depending on current time
@@ -245,9 +245,6 @@ js1_5/Function/regress-338121-03: FAIL_OK
# Expectes 'prototype' property of functions to be enumerable.
js1_5/Function/10.1.6-01: FAIL_OK
-# Length of objects whose prototype chain includes a function
-ecma_3/Function/regress-313570: FAIL_OK
-
# toPrecision argument restricted to range 1..21 in JSC/V8
js1_5/Regress/regress-452346: FAIL_OK
ecma_3/Number/15.7.4.7-1: FAIL_OK
@@ -592,6 +589,20 @@ js1_5/Regress/regress-416737-01: FAIL_OK
js1_5/Regress/regress-416737-02: FAIL_OK
+# Illegal escape-sequences in string literals. Has already been fixed
+# by most engines (i.e. V8, JSC, Opera and FF).
+ecma/Array/15.4.5.1-1: FAIL_OK
+ecma/LexicalConventions/7.7.4: FAIL_OK
+ecma_2/RegExp/hex-001: FAIL_OK
+js1_2/regexp/hexadecimal: FAIL_OK
+
+
+# The source field of RegExp objects is properly escaped. We match JSC.
+ecma_2/RegExp/constructor-001: FAIL_OK
+ecma_2/RegExp/function-001: FAIL_OK
+ecma_2/RegExp/properties-001: FAIL_OK
+
+
##################### FAILING TESTS #####################
# This section is for tests that fail in V8 and pass in JSC.
@@ -737,7 +748,6 @@ js1_5/extensions/regress-90596-001: FAIL_OK
js1_5/extensions/regress-90596-002: FAIL_OK
js1_5/extensions/regress-96284-001: FAIL_OK
js1_5/extensions/regress-96284-002: FAIL_OK
-js1_5/extensions/scope-001: FAIL_OK
js1_5/extensions/toLocaleFormat-01: FAIL_OK
js1_5/extensions/toLocaleFormat-02: FAIL_OK
diff --git a/deps/v8/test/mozilla/testcfg.py b/deps/v8/test/mozilla/testcfg.py
index 587781d11f..e88164d22c 100644
--- a/deps/v8/test/mozilla/testcfg.py
+++ b/deps/v8/test/mozilla/testcfg.py
@@ -76,6 +76,7 @@ class MozillaTestCase(test.TestCase):
def GetCommand(self):
result = self.context.GetVmCommand(self, self.mode) + \
[ '--expose-gc', join(self.root, 'mozilla-shell-emulation.js') ]
+ result += [ '--es5_readonly' ] # Temporary hack until we can remove flag
result += self.framework
result.append(self.filename)
return result
diff --git a/deps/v8/test/sputnik/sputnik.status b/deps/v8/test/sputnik/sputnik.status
index a4c7d57ff0..52d126e65b 100644
--- a/deps/v8/test/sputnik/sputnik.status
+++ b/deps/v8/test/sputnik/sputnik.status
@@ -52,36 +52,14 @@ S15.10.2.11_A1_T3: FAIL
# We are more lenient in which string character escapes we allow than
# the spec (7.8.4 p. 19) wants us to be. This is for compatibility.
-S7.8.4_A4.3_T2: FAIL_OK
-S7.8.4_A4.3_T2: FAIL_OK
-S7.8.4_A6.2_T2: FAIL_OK
-S7.8.4_A6.1_T4: FAIL_OK
-S7.8.4_A4.3_T4: FAIL_OK
-S7.8.4_A7.2_T2: FAIL_OK
-S7.8.4_A7.1_T4: FAIL_OK
-S7.8.4_A6.4_T2: FAIL_OK
-S7.8.4_A7.4_T2: FAIL_OK
-S7.8.4_A7.2_T4: FAIL_OK
-S7.8.4_A4.3_T6: FAIL_OK
-S7.8.4_A7.2_T6: FAIL_OK
-S7.8.4_A4.3_T1: FAIL_OK
-S7.8.4_A6.2_T1: FAIL_OK
-S7.8.4_A4.3_T3: FAIL_OK
-S7.8.4_A7.2_T1: FAIL_OK
-S7.8.4_A6.4_T1: FAIL_OK
-S7.8.4_A7.2_T3: FAIL_OK
-S7.8.4_A7.4_T1: FAIL_OK
-S7.8.4_A4.3_T5: FAIL_OK
-S7.8.4_A7.2_T5: FAIL_OK
S7.8.4_A4.3_T1: FAIL_OK
-S7.8.4_A6.2_T1: FAIL_OK
+S7.8.4_A4.3_T2: FAIL_OK
S7.8.4_A4.3_T3: FAIL_OK
-S7.8.4_A7.2_T1: FAIL_OK
+S7.8.4_A4.3_T4: FAIL_OK
S7.8.4_A6.4_T1: FAIL_OK
-S7.8.4_A7.2_T3: FAIL_OK
+S7.8.4_A6.4_T2: FAIL_OK
S7.8.4_A7.4_T1: FAIL_OK
-S7.8.4_A4.3_T5: FAIL_OK
-S7.8.4_A7.2_T5: FAIL_OK
+S7.8.4_A7.4_T2: FAIL_OK
# Sputnik expects unicode escape sequences in RegExp flags to be interpreted.
# The specification requires them to be passed uninterpreted to the RegExp
@@ -146,6 +124,16 @@ S15.3.4.2_A1_T1: FAIL_OK
S8.5_A2.2: PASS, FAIL if $system == linux, FAIL if $system == macos
S8.5_A2.1: PASS, FAIL if $system == linux, FAIL if $system == macos
+# The source field of RegExp objects is properly escaped. We match JSC.
+S15.10.4.1_A3_T1: FAIL_OK
+S15.10.4.1_A3_T2: FAIL_OK
+S15.10.4.1_A3_T3: FAIL_OK
+S15.10.4.1_A3_T4: FAIL_OK
+S15.10.4.1_A3_T5: FAIL_OK
+S15.10.4.1_A4_T2: FAIL_OK
+S15.10.4.1_A4_T3: FAIL_OK
+S15.10.4.1_A4_T5: FAIL_OK
+
##################### ES3 TESTS #########################
# These tests check for ES3 semantics, and differ from ES5.
# When we follow ES5 semantics, it's ok to fail the test.
diff --git a/deps/v8/test/test262/README b/deps/v8/test/test262/README
index dae18433a5..59e7f5eb8b 100644
--- a/deps/v8/test/test262/README
+++ b/deps/v8/test/test262/README
@@ -4,11 +4,11 @@ tests from
http://hg.ecmascript.org/tests/test262
-at revision 309 as 'data' in this directory. Using later version
+at revision 334 as 'data' in this directory. Using later version
may be possible but the tests are only known to pass (and indeed run)
with that revision.
-hg clone -r 309 http://hg.ecmascript.org/tests/test262 data
+hg clone -r 334 http://hg.ecmascript.org/tests/test262 data
If you do update to a newer revision you may have to change the test
harness adapter code since it uses internal functionality from the
diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status
index 3f395bdcd1..567a78ec84 100644
--- a/deps/v8/test/test262/test262.status
+++ b/deps/v8/test/test262/test262.status
@@ -33,11 +33,11 @@ def FAIL_OK = FAIL, OKAY
# '__proto__' should be treated as a normal property in JSON.
S15.12.2_A1: FAIL
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1475
-15.2.3.6-4-405: FAIL
-15.2.3.6-4-410: FAIL
-15.2.3.6-4-415: FAIL
-15.2.3.6-4-420: FAIL
+# Sequencing of getter side effects on receiver and argument properties
+# is wrong. The receiver callback should be called before any arguments
+# are evaluated.
+# V8 Bug: http://code.google.com/p/v8/issues/detail?id=691
+11.2.3-3_3: FAIL
##################### DELIBERATE INCOMPATIBILITIES #####################
@@ -52,19 +52,6 @@ S15.1.2.2_A5.1_T1: FAIL_OK
S15.8.2.16_A7: PASS || FAIL_OK
S15.8.2.18_A7: PASS || FAIL_OK
-# We are more lenient in which string character escapes we allow than
-# the spec (7.8.4 p. 19) wants us to be. This is for compatibility.
-S7.8.4_A6.1_T4: FAIL_OK
-S7.8.4_A6.2_T1: FAIL_OK
-S7.8.4_A6.2_T2: FAIL_OK
-S7.8.4_A7.1_T4: FAIL_OK
-S7.8.4_A7.2_T1: FAIL_OK
-S7.8.4_A7.2_T2: FAIL_OK
-S7.8.4_A7.2_T3: FAIL_OK
-S7.8.4_A7.2_T4: FAIL_OK
-S7.8.4_A7.2_T5: FAIL_OK
-S7.8.4_A7.2_T6: FAIL_OK
-
# Linux for ia32 (and therefore simulators) default to extended 80 bit floating
# point formats, so these tests checking 64-bit FP precision fail. The other
# platforms/arch's pass these tests.
diff --git a/deps/v8/test/test262/testcfg.py b/deps/v8/test/test262/testcfg.py
index b05b205dd6..c394cc8a5f 100644
--- a/deps/v8/test/test262/testcfg.py
+++ b/deps/v8/test/test262/testcfg.py
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -35,8 +35,8 @@ import sys
import tarfile
-TEST_262_ARCHIVE_REVISION = '3a890174343c' # This is the r309 revision.
-TEST_262_ARCHIVE_MD5 = 'be5d4cfbe69cef70430907b8f3a92b50'
+TEST_262_ARCHIVE_REVISION = 'fb327c439e20' # This is the r334 revision.
+TEST_262_ARCHIVE_MD5 = '307acd166ec34629592f240dc12d57ed'
TEST_262_URL = 'http://hg.ecmascript.org/tests/test262/archive/%s.tar.bz2'
TEST_262_HARNESS = ['sta.js']
@@ -62,6 +62,7 @@ class Test262TestCase(test.TestCase):
def GetCommand(self):
result = self.context.GetVmCommand(self, self.mode)
+ result += [ '--es5_readonly' ] # Temporary hack until we can remove flag
result += self.framework
result.append(self.filename)
return result
@@ -104,27 +105,29 @@ class Test262TestConfiguration(test.TestConfiguration):
revision = TEST_262_ARCHIVE_REVISION
archive_url = TEST_262_URL % revision
archive_name = join(self.root, 'test262-%s.tar.bz2' % revision)
- directory_name = join(self.root, "test262-%s" % revision)
- if not exists(directory_name) or not exists(archive_name):
- if not exists(archive_name):
- print "Downloading test data from %s ..." % archive_url
- urllib.urlretrieve(archive_url, archive_name)
- if not exists(directory_name):
- print "Extracting test262-%s.tar.bz2 ..." % revision
- md5 = hashlib.md5()
- with open(archive_name,'rb') as f:
- for chunk in iter(lambda: f.read(8192), ''):
- md5.update(chunk)
- if md5.hexdigest() != TEST_262_ARCHIVE_MD5:
- raise Exception("Hash mismatch of test data file")
- archive = tarfile.open(archive_name, 'r:bz2')
- if sys.platform in ('win32', 'cygwin'):
- # Magic incantation to allow longer path names on Windows.
- archive.extractall(u'\\\\?\\%s' % self.root)
- else:
- archive.extractall(self.root)
- if not exists(join(self.root, 'data')):
- os.symlink(directory_name, join(self.root, 'data'))
+ directory_name = join(self.root, 'data')
+ directory_old_name = join(self.root, 'data.old')
+ if not exists(archive_name):
+ print "Downloading test data from %s ..." % archive_url
+ urllib.urlretrieve(archive_url, archive_name)
+ if exists(directory_name):
+ os.rename(directory_name, directory_old_name)
+ if not exists(directory_name):
+ print "Extracting test262-%s.tar.bz2 ..." % revision
+ md5 = hashlib.md5()
+ with open(archive_name,'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ md5.update(chunk)
+ if md5.hexdigest() != TEST_262_ARCHIVE_MD5:
+ os.remove(archive_name)
+ raise Exception("Hash mismatch of test data file")
+ archive = tarfile.open(archive_name, 'r:bz2')
+ if sys.platform in ('win32', 'cygwin'):
+ # Magic incantation to allow longer path names on Windows.
+ archive.extractall(u'\\\\?\\%s' % self.root)
+ else:
+ archive.extractall(self.root)
+ os.rename(join(self.root, 'test262-%s' % revision), directory_name)
def GetBuildRequirements(self):
return ['d8']
diff --git a/deps/v8/tools/fuzz-harness.sh b/deps/v8/tools/fuzz-harness.sh
new file mode 100644
index 0000000000..efbf8646ce
--- /dev/null
+++ b/deps/v8/tools/fuzz-harness.sh
@@ -0,0 +1,92 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# A simple harness that downloads and runs 'jsfunfuzz' against d8. This
+# takes a long time because it runs many iterations and is intended for
+# automated usage. The package containing 'jsfunfuzz' can be found as an
+# attachment to this bug:
+# https://bugzilla.mozilla.org/show_bug.cgi?id=jsfunfuzz
+
+JSFUNFUZZ_URL="https://bugzilla.mozilla.org/attachment.cgi?id=310631"
+JSFUNFUZZ_MD5="d0e497201c5cd7bffbb1cdc1574f4e32"
+
+v8_root=$(readlink -f $(dirname $BASH_SOURCE)/../)
+
+if [ -n "$1" ]; then
+ d8="${v8_root}/$1"
+else
+ d8="${v8_root}/d8"
+fi
+
+if [ ! -f "$d8" ]; then
+ echo "Failed to find d8 binary: $d8"
+ exit 1
+fi
+
+jsfunfuzz_file="$v8_root/tools/jsfunfuzz.zip"
+if [ ! -f "$jsfunfuzz_file" ]; then
+ echo "Downloading $jsfunfuzz_file ..."
+ wget -q -O "$jsfunfuzz_file" $JSFUNFUZZ_URL || exit 1
+fi
+
+jsfunfuzz_sum=$(md5sum "$jsfunfuzz_file" | awk '{ print $1 }')
+if [ $jsfunfuzz_sum != $JSFUNFUZZ_MD5 ]; then
+ echo "Failed to verify checksum!"
+ exit 1
+fi
+
+jsfunfuzz_dir="$v8_root/tools/jsfunfuzz"
+if [ ! -d "$jsfunfuzz_dir" ]; then
+ echo "Unpacking into $jsfunfuzz_dir ..."
+ unzip "$jsfunfuzz_file" -d "$jsfunfuzz_dir" || exit 1
+ echo "Patching runner ..."
+ cat << EOF | patch -s -p0 -d "$v8_root"
+--- tools/jsfunfuzz/jsfunfuzz/multi_timed_run.py~
++++ tools/jsfunfuzz/jsfunfuzz/multi_timed_run.py
+@@ -125,7 +125,7 @@
+
+ def many_timed_runs():
+ iteration = 0
+- while True:
++ while iteration < 100:
+ iteration += 1
+ logfilename = "w%d" % iteration
+ one_timed_run(logfilename)
+EOF
+fi
+
+flags='--debug-code --expose-gc --verify-gc'
+python -u "$jsfunfuzz_dir/jsfunfuzz/multi_timed_run.py" 300 \
+ "$d8" $flags "$jsfunfuzz_dir/jsfunfuzz/jsfunfuzz.js"
+exit_code=$(cat w* | grep " looking good" -c)
+exit_code=$((100-exit_code))
+tar -cjf fuzz-results-$(date +%y%m%d).tar.bz2 err-* w*
+rm -f err-* w*
+
+echo "Total failures: $exit_code"
+exit $exit_code
diff --git a/deps/v8/tools/grokdump.py b/deps/v8/tools/grokdump.py
index 9977289872..a9f0cb9ddc 100755
--- a/deps/v8/tools/grokdump.py
+++ b/deps/v8/tools/grokdump.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -27,6 +27,7 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import cmd
import ctypes
import mmap
import optparse
@@ -36,21 +37,20 @@ import sys
import types
import codecs
import re
+import struct
-USAGE="""usage: %prog [OPTION]...
+USAGE="""usage: %prog [OPTIONS] [DUMP-FILE]
Minidump analyzer.
Shows the processor state at the point of exception including the
stack of the active thread and the referenced objects in the V8
heap. Code objects are disassembled and the addresses linked from the
-stack (pushed return addresses) are marked with "=>".
-
+stack (e.g. pushed return addresses) are marked with "=>".
Examples:
- $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp
-"""
+ $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp"""
DEBUG=False
@@ -106,6 +106,62 @@ class Descriptor(object):
return Raw
+def FullDump(reader, heap):
+ """Dump all available memory regions."""
+ def dump_region(reader, start, size, location):
+ print
+ while start & 3 != 0:
+ start += 1
+ size -= 1
+ location += 1
+ is_executable = reader.IsProbableExecutableRegion(location, size)
+ is_ascii = reader.IsProbableASCIIRegion(location, size)
+
+ if is_executable is not False:
+ lines = reader.GetDisasmLines(start, size)
+ for line in lines:
+ print FormatDisasmLine(start, heap, line)
+ print
+
+ if is_ascii is not False:
+ # Output in the same format as the Unix hd command
+ addr = start
+ for slot in xrange(location, location + size, 16):
+ hex_line = ""
+ asc_line = ""
+ for i in xrange(0, 16):
+ if slot + i < location + size:
+ byte = ctypes.c_uint8.from_buffer(reader.minidump, slot + i).value
+ if byte >= 0x20 and byte < 0x7f:
+ asc_line += chr(byte)
+ else:
+ asc_line += "."
+ hex_line += " %02x" % (byte)
+ else:
+ hex_line += " "
+ if i == 7:
+ hex_line += " "
+ print "%s %s |%s|" % (reader.FormatIntPtr(addr),
+ hex_line,
+ asc_line)
+ addr += 16
+
+ if is_executable is not True and is_ascii is not True:
+ print "%s - %s" % (reader.FormatIntPtr(start),
+ reader.FormatIntPtr(start + size))
+ for slot in xrange(start,
+ start + size,
+ reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ heap_object = heap.FindObject(maybe_address)
+ print "%s: %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(maybe_address))
+ if heap_object:
+ heap_object.Print(Printer())
+ print
+
+ reader.ForEachMemoryRegion(dump_region)
+
# Set of structures and constants that describe the layout of minidump
# files. Based on MSDN and Google Breakpad.
@@ -362,7 +418,7 @@ class MinidumpReader(object):
self.minidump = mmap.mmap(self.minidump_file.fileno(), 0, mmap.MAP_PRIVATE)
self.header = MINIDUMP_HEADER.Read(self.minidump, 0)
if self.header.signature != MinidumpReader._HEADER_MAGIC:
- print >>sys.stderr, "Warning: unsupported minidump header magic"
+ print >>sys.stderr, "Warning: Unsupported minidump header magic!"
DebugPrint(self.header)
directories = []
offset = self.header.stream_directories_rva
@@ -406,7 +462,7 @@ class MinidumpReader(object):
DebugPrint(thread)
self.thread_map[thread.id] = thread
elif d.stream_type == MD_MEMORY_LIST_STREAM:
- print >>sys.stderr, "Warning: not a full minidump"
+ print >>sys.stderr, "Warning: This is not a full minidump!"
assert self.memory_list is None
self.memory_list = MINIDUMP_MEMORY_LIST.Read(
self.minidump, d.location.rva)
@@ -444,6 +500,91 @@ class MinidumpReader(object):
location = self.FindLocation(address)
return self.minidump[location:location + size]
+ def _ReadWord(self, location):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return ctypes.c_uint64.from_buffer(self.minidump, location).value
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return ctypes.c_uint32.from_buffer(self.minidump, location).value
+
+ def IsProbableASCIIRegion(self, location, length):
+ ascii_bytes = 0
+ non_ascii_bytes = 0
+ for loc in xrange(location, location + length):
+ byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
+ if byte >= 0x7f:
+ non_ascii_bytes += 1
+ if byte < 0x20 and byte != 0:
+ non_ascii_bytes += 1
+ if byte < 0x7f and byte >= 0x20:
+ ascii_bytes += 1
+ if byte == 0xa: # newline
+ ascii_bytes += 1
+ if ascii_bytes * 10 <= length:
+ return False
+ if length > 0 and ascii_bytes > non_ascii_bytes * 7:
+ return True
+ if ascii_bytes > non_ascii_bytes * 3:
+ return None # Maybe
+ return False
+
+ def IsProbableExecutableRegion(self, location, length):
+ opcode_bytes = 0
+ sixty_four = self.arch == MD_CPU_ARCHITECTURE_AMD64
+ for loc in xrange(location, location + length):
+ byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
+ if (byte == 0x8b or # mov
+ byte == 0x89 or # mov reg-reg
+ (byte & 0xf0) == 0x50 or # push/pop
+ (sixty_four and (byte & 0xf0) == 0x40) or # rex prefix
+ byte == 0xc3 or # return
+ byte == 0x74 or # jeq
+ byte == 0x84 or # jeq far
+ byte == 0x75 or # jne
+ byte == 0x85 or # jne far
+ byte == 0xe8 or # call
+ byte == 0xe9 or # jmp far
+ byte == 0xeb): # jmp near
+ opcode_bytes += 1
+ opcode_percent = (opcode_bytes * 100) / length
+ threshold = 20
+ if opcode_percent > threshold + 2:
+ return True
+ if opcode_percent > threshold - 2:
+ return None # Maybe
+ return False
+
+ def FindRegion(self, addr):
+ answer = [-1, -1]
+ def is_in(reader, start, size, location):
+ if addr >= start and addr < start + size:
+ answer[0] = start
+ answer[1] = size
+ self.ForEachMemoryRegion(is_in)
+ if answer[0] == -1:
+ return None
+ return answer
+
+ def ForEachMemoryRegion(self, cb):
+ if self.memory_list64 is not None:
+ for r in self.memory_list64.ranges:
+ location = self.memory_list64.base_rva + offset
+ cb(self, r.start, r.size, location)
+ offset += r.size
+
+ if self.memory_list is not None:
+ for r in self.memory_list.ranges:
+ cb(self, r.start, r.memory.data_size, r.memory.rva)
+
+ def FindWord(self, word, alignment=0):
+ def search_inside_region(reader, start, size, location):
+ location = (location + alignment) & ~alignment
+ for loc in xrange(location, location + size - self.PointerSize()):
+ if reader._ReadWord(loc) == word:
+ slot = start + (loc - location)
+ print "%s: %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(word))
+ self.ForEachMemoryRegion(search_inside_region)
+
def FindLocation(self, address):
offset = 0
if self.memory_list64 is not None:
@@ -567,24 +708,182 @@ INSTANCE_TYPES = {
156: "SCRIPT_TYPE",
157: "CODE_CACHE_TYPE",
158: "POLYMORPHIC_CODE_CACHE_TYPE",
- 161: "FIXED_ARRAY_TYPE",
+ 159: "TYPE_FEEDBACK_INFO_TYPE",
+ 160: "ALIASED_ARGUMENTS_ENTRY_TYPE",
+ 163: "FIXED_ARRAY_TYPE",
145: "FIXED_DOUBLE_ARRAY_TYPE",
- 162: "SHARED_FUNCTION_INFO_TYPE",
- 163: "JS_MESSAGE_OBJECT_TYPE",
- 166: "JS_VALUE_TYPE",
- 167: "JS_OBJECT_TYPE",
- 168: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
- 169: "JS_GLOBAL_OBJECT_TYPE",
- 170: "JS_BUILTINS_OBJECT_TYPE",
- 171: "JS_GLOBAL_PROXY_TYPE",
- 172: "JS_ARRAY_TYPE",
- 165: "JS_PROXY_TYPE",
- 175: "JS_WEAK_MAP_TYPE",
- 176: "JS_REGEXP_TYPE",
- 177: "JS_FUNCTION_TYPE",
- 164: "JS_FUNCTION_PROXY_TYPE",
- 159: "DEBUG_INFO_TYPE",
- 160: "BREAK_POINT_INFO_TYPE",
+ 164: "SHARED_FUNCTION_INFO_TYPE",
+ 165: "JS_MESSAGE_OBJECT_TYPE",
+ 168: "JS_VALUE_TYPE",
+ 169: "JS_DATE_TYPE",
+ 170: "JS_OBJECT_TYPE",
+ 171: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+ 172: "JS_MODULE_TYPE",
+ 173: "JS_GLOBAL_OBJECT_TYPE",
+ 174: "JS_BUILTINS_OBJECT_TYPE",
+ 175: "JS_GLOBAL_PROXY_TYPE",
+ 176: "JS_ARRAY_TYPE",
+ 167: "JS_PROXY_TYPE",
+ 179: "JS_WEAK_MAP_TYPE",
+ 180: "JS_REGEXP_TYPE",
+ 181: "JS_FUNCTION_TYPE",
+ 166: "JS_FUNCTION_PROXY_TYPE",
+ 161: "DEBUG_INFO_TYPE",
+ 162: "BREAK_POINT_INFO_TYPE",
+}
+
+
+# List of known V8 maps. Used to determine the instance type and name
+# for maps that are part of the root-set and hence on the first page of
+# the map-space. Obtained by adding the code below to an IA32 release
+# build with enabled snapshots to the end of the Isolate::Init method.
+#
+# #define ROOT_LIST_CASE(type, name, camel_name) \
+# if (o == heap_.name()) n = #camel_name;
+# #define STRUCT_LIST_CASE(upper_name, camel_name, name) \
+# if (o == heap_.name##_map()) n = #camel_name "Map";
+# HeapObjectIterator it(heap_.map_space());
+# printf("KNOWN_MAPS = {\n");
+# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
+# Map* m = Map::cast(o);
+# const char* n = "";
+# intptr_t p = reinterpret_cast<intptr_t>(m) & 0xfffff;
+# int t = m->instance_type();
+# ROOT_LIST(ROOT_LIST_CASE)
+# STRUCT_LIST(STRUCT_LIST_CASE)
+# printf(" 0x%05x: (%d, \"%s\"),\n", p, t, n);
+# }
+# printf("}\n");
+KNOWN_MAPS = {
+ 0x08081: (134, "ByteArrayMap"),
+ 0x080a1: (128, "MetaMap"),
+ 0x080c1: (130, "OddballMap"),
+ 0x080e1: (163, "FixedArrayMap"),
+ 0x08101: (68, "AsciiSymbolMap"),
+ 0x08121: (132, "HeapNumberMap"),
+ 0x08141: (135, "FreeSpaceMap"),
+ 0x08161: (146, "OnePointerFillerMap"),
+ 0x08181: (146, "TwoPointerFillerMap"),
+ 0x081a1: (131, "GlobalPropertyCellMap"),
+ 0x081c1: (164, "SharedFunctionInfoMap"),
+ 0x081e1: (4, "AsciiStringMap"),
+ 0x08201: (163, "GlobalContextMap"),
+ 0x08221: (129, "CodeMap"),
+ 0x08241: (163, "ScopeInfoMap"),
+ 0x08261: (163, "FixedCOWArrayMap"),
+ 0x08281: (145, "FixedDoubleArrayMap"),
+ 0x082a1: (163, "HashTableMap"),
+ 0x082c1: (0, "StringMap"),
+ 0x082e1: (64, "SymbolMap"),
+ 0x08301: (1, "ConsStringMap"),
+ 0x08321: (5, "ConsAsciiStringMap"),
+ 0x08341: (3, "SlicedStringMap"),
+ 0x08361: (7, "SlicedAsciiStringMap"),
+ 0x08381: (65, "ConsSymbolMap"),
+ 0x083a1: (69, "ConsAsciiSymbolMap"),
+ 0x083c1: (66, "ExternalSymbolMap"),
+ 0x083e1: (74, "ExternalSymbolWithAsciiDataMap"),
+ 0x08401: (70, "ExternalAsciiSymbolMap"),
+ 0x08421: (2, "ExternalStringMap"),
+ 0x08441: (10, "ExternalStringWithAsciiDataMap"),
+ 0x08461: (6, "ExternalAsciiStringMap"),
+ 0x08481: (82, "ShortExternalSymbolMap"),
+ 0x084a1: (90, "ShortExternalSymbolWithAsciiDataMap"),
+ 0x084c1: (86, "ShortExternalAsciiSymbolMap"),
+ 0x084e1: (18, "ShortExternalStringMap"),
+ 0x08501: (26, "ShortExternalStringWithAsciiDataMap"),
+ 0x08521: (22, "ShortExternalAsciiStringMap"),
+ 0x08541: (0, "UndetectableStringMap"),
+ 0x08561: (4, "UndetectableAsciiStringMap"),
+ 0x08581: (144, "ExternalPixelArrayMap"),
+ 0x085a1: (136, "ExternalByteArrayMap"),
+ 0x085c1: (137, "ExternalUnsignedByteArrayMap"),
+ 0x085e1: (138, "ExternalShortArrayMap"),
+ 0x08601: (139, "ExternalUnsignedShortArrayMap"),
+ 0x08621: (140, "ExternalIntArrayMap"),
+ 0x08641: (141, "ExternalUnsignedIntArrayMap"),
+ 0x08661: (142, "ExternalFloatArrayMap"),
+ 0x08681: (143, "ExternalDoubleArrayMap"),
+ 0x086a1: (163, "NonStrictArgumentsElementsMap"),
+ 0x086c1: (163, "FunctionContextMap"),
+ 0x086e1: (163, "CatchContextMap"),
+ 0x08701: (163, "WithContextMap"),
+ 0x08721: (163, "BlockContextMap"),
+ 0x08741: (163, "ModuleContextMap"),
+ 0x08761: (165, "JSMessageObjectMap"),
+ 0x08781: (133, "ForeignMap"),
+ 0x087a1: (170, "NeanderMap"),
+ 0x087c1: (158, "PolymorphicCodeCacheMap"),
+ 0x087e1: (156, "ScriptMap"),
+ 0x08801: (147, "AccessorInfoMap"),
+ 0x08821: (148, "AccessorPairMap"),
+ 0x08841: (149, "AccessCheckInfoMap"),
+ 0x08861: (150, "InterceptorInfoMap"),
+ 0x08881: (151, "CallHandlerInfoMap"),
+ 0x088a1: (152, "FunctionTemplateInfoMap"),
+ 0x088c1: (153, "ObjectTemplateInfoMap"),
+ 0x088e1: (154, "SignatureInfoMap"),
+ 0x08901: (155, "TypeSwitchInfoMap"),
+ 0x08921: (157, "CodeCacheMap"),
+ 0x08941: (159, "TypeFeedbackInfoMap"),
+ 0x08961: (160, "AliasedArgumentsEntryMap"),
+ 0x08981: (161, "DebugInfoMap"),
+ 0x089a1: (162, "BreakPointInfoMap"),
+}
+
+
+# List of known V8 objects. Used to determine name for objects that are
+# part of the root-set and hence on the first page of various old-space
+# paged. Obtained by adding the code below to an IA32 release build with
+# enabled snapshots to the end of the Isolate::Init method.
+#
+# #define ROOT_LIST_CASE(type, name, camel_name) \
+# if (o == heap_.name()) n = #camel_name;
+# OldSpaces spit;
+# printf("KNOWN_OBJECTS = {\n");
+# for (PagedSpace* s = spit.next(); s != NULL; s = spit.next()) {
+# HeapObjectIterator it(s);
+# const char* sname = AllocationSpaceName(s->identity());
+# for (Object* o = it.Next(); o != NULL; o = it.Next()) {
+# const char* n = NULL;
+# intptr_t p = reinterpret_cast<intptr_t>(o) & 0xfffff;
+# ROOT_LIST(ROOT_LIST_CASE)
+# if (n != NULL) {
+# printf(" (\"%s\", 0x%05x): \"%s\",\n", sname, p, n);
+# }
+# }
+# }
+# printf("}\n");
+KNOWN_OBJECTS = {
+ ("OLD_POINTER_SPACE", 0x08081): "NullValue",
+ ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
+ ("OLD_POINTER_SPACE", 0x080a1): "InstanceofCacheMap",
+ ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
+ ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
+ ("OLD_POINTER_SPACE", 0x080d1): "NoInterceptorResultSentinel",
+ ("OLD_POINTER_SPACE", 0x080e1): "ArgumentsMarker",
+ ("OLD_POINTER_SPACE", 0x080f1): "NumberStringCache",
+ ("OLD_POINTER_SPACE", 0x088f9): "SingleCharacterStringCache",
+ ("OLD_POINTER_SPACE", 0x08b01): "StringSplitCache",
+ ("OLD_POINTER_SPACE", 0x08f09): "TerminationException",
+ ("OLD_POINTER_SPACE", 0x08f19): "MessageListeners",
+ ("OLD_POINTER_SPACE", 0x08f35): "CodeStubs",
+ ("OLD_POINTER_SPACE", 0x09b61): "NonMonomorphicCache",
+ ("OLD_POINTER_SPACE", 0x0a175): "PolymorphicCodeCache",
+ ("OLD_POINTER_SPACE", 0x0a17d): "NativesSourceCache",
+ ("OLD_POINTER_SPACE", 0x0a1bd): "EmptyScript",
+ ("OLD_POINTER_SPACE", 0x0a1f9): "IntrinsicFunctionNames",
+ ("OLD_POINTER_SPACE", 0x24a49): "SymbolTable",
+ ("OLD_DATA_SPACE", 0x08081): "EmptyFixedArray",
+ ("OLD_DATA_SPACE", 0x080a1): "NanValue",
+ ("OLD_DATA_SPACE", 0x0811d): "EmptyByteArray",
+ ("OLD_DATA_SPACE", 0x08125): "EmptyString",
+ ("OLD_DATA_SPACE", 0x08131): "EmptyDescriptorArray",
+ ("OLD_DATA_SPACE", 0x08259): "InfinityValue",
+ ("OLD_DATA_SPACE", 0x08265): "MinusZeroValue",
+ ("OLD_DATA_SPACE", 0x08271): "PrototypeAccessors",
+ ("CODE_SPACE", 0x12b81): "JsEntryCode",
+ ("CODE_SPACE", 0x12c61): "JsConstructEntryCode",
}
@@ -745,7 +1044,10 @@ class ConsString(String):
self.right = self.ObjectField(self.RightOffset())
def GetChars(self):
- return self.left.GetChars() + self.right.GetChars()
+ try:
+ return self.left.GetChars() + self.right.GetChars()
+ except:
+ return "***CAUGHT EXCEPTION IN GROKDUMP***"
class Oddball(HeapObject):
@@ -760,7 +1062,10 @@ class Oddball(HeapObject):
p.Print(str(self))
def __str__(self):
- return "<%s>" % self.to_string.GetChars()
+ if self.to_string:
+ return "Oddball(%08x, <%s>)" % (self.address, self.to_string.GetChars())
+ else:
+ return "Oddball(%08x, kind=%s)" % (self.address, "???")
class FixedArray(HeapObject):
@@ -886,6 +1191,27 @@ class Script(HeapObject):
self.name = self.ObjectField(self.NameOffset())
+class CodeCache(HeapObject):
+ def DefaultCacheOffset(self):
+ return self.heap.PointerSize()
+
+ def NormalTypeCacheOffset(self):
+ return self.DefaultCacheOffset() + self.heap.PointerSize()
+
+ def __init__(self, heap, map, address):
+ HeapObject.__init__(self, heap, map, address)
+ self.default_cache = self.ObjectField(self.DefaultCacheOffset())
+ self.normal_type_cache = self.ObjectField(self.NormalTypeCacheOffset())
+
+ def Print(self, p):
+ p.Print("CodeCache(%s) {" % self.heap.reader.FormatIntPtr(self.address))
+ p.Indent()
+ p.Print("default cache: %s" % self.default_cache)
+ p.Print("normal type cache: %s" % self.normal_type_cache)
+ p.Dedent()
+ p.Print("}")
+
+
class Code(HeapObject):
CODE_ALIGNMENT_MASK = (1 << 5) - 1
@@ -936,14 +1262,14 @@ class V8Heap(object):
"EXTERNAL_STRING_TYPE": ExternalString,
"EXTERNAL_STRING_WITH_ASCII_DATA_TYPE": ExternalString,
"EXTERNAL_ASCII_STRING_TYPE": ExternalString,
-
"MAP_TYPE": Map,
"ODDBALL_TYPE": Oddball,
"FIXED_ARRAY_TYPE": FixedArray,
"JS_FUNCTION_TYPE": JSFunction,
"SHARED_FUNCTION_INFO_TYPE": SharedFunctionInfo,
"SCRIPT_TYPE": Script,
- "CODE_TYPE": Code
+ "CODE_CACHE_TYPE": CodeCache,
+ "CODE_TYPE": Code,
}
def __init__(self, reader, stack_map):
@@ -1001,6 +1327,250 @@ class V8Heap(object):
elif self.reader.arch == MD_CPU_ARCHITECTURE_X86:
return (1 << 5) - 1
+ def PageAlignmentMask(self):
+ return (1 << 20) - 1
+
+
+class KnownObject(HeapObject):
+ def __init__(self, heap, known_name):
+ HeapObject.__init__(self, heap, None, None)
+ self.known_name = known_name
+
+ def __str__(self):
+ return "<%s>" % self.known_name
+
+
+class KnownMap(HeapObject):
+ def __init__(self, heap, known_name, instance_type):
+ HeapObject.__init__(self, heap, None, None)
+ self.instance_type = instance_type
+ self.known_name = known_name
+
+ def __str__(self):
+ return "<%s>" % self.known_name
+
+
+class InspectionPadawan(object):
+ """The padawan can improve annotations by sensing well-known objects."""
+ def __init__(self, reader, heap):
+ self.reader = reader
+ self.heap = heap
+ self.known_first_map_page = 0
+ self.known_first_data_page = 0
+ self.known_first_pointer_page = 0
+
+ def __getattr__(self, name):
+ """An InspectionPadawan can be used instead of V8Heap, even though
+ it does not inherit from V8Heap (aka. mixin)."""
+ return getattr(self.heap, name)
+
+ def GetPageOffset(self, tagged_address):
+ return tagged_address & self.heap.PageAlignmentMask()
+
+ def IsInKnownMapSpace(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ return page_address == self.known_first_map_page
+
+ def IsInKnownOldSpace(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ return page_address in [self.known_first_data_page,
+ self.known_first_pointer_page]
+
+ def ContainingKnownOldSpaceName(self, tagged_address):
+ page_address = tagged_address & ~self.heap.PageAlignmentMask()
+ if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
+ if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
+ return None
+
+ def SenseObject(self, tagged_address):
+ if self.IsInKnownOldSpace(tagged_address):
+ offset = self.GetPageOffset(tagged_address)
+ lookup_key = (self.ContainingKnownOldSpaceName(tagged_address), offset)
+ known_obj_name = KNOWN_OBJECTS.get(lookup_key)
+ if known_obj_name:
+ return KnownObject(self, known_obj_name)
+ if self.IsInKnownMapSpace(tagged_address):
+ known_map = self.SenseMap(tagged_address)
+ if known_map:
+ return known_map
+ found_obj = self.heap.FindObject(tagged_address)
+ if found_obj: return found_ob
+ address = tagged_address - 1
+ if self.reader.IsValidAddress(address):
+ map_tagged_address = self.reader.ReadUIntPtr(address)
+ map = self.SenseMap(map_tagged_address)
+ if map is None: return None
+ instance_type_name = INSTANCE_TYPES.get(map.instance_type)
+ if instance_type_name is None: return None
+ cls = V8Heap.CLASS_MAP.get(instance_type_name, HeapObject)
+ return cls(self, map, address)
+ return None
+
+ def SenseMap(self, tagged_address):
+ if self.IsInKnownMapSpace(tagged_address):
+ offset = self.GetPageOffset(tagged_address)
+ known_map_info = KNOWN_MAPS.get(offset)
+ if known_map_info:
+ known_map_type, known_map_name = known_map_info
+ return KnownMap(self, known_map_name, known_map_type)
+ found_map = self.heap.FindMap(tagged_address)
+ if found_map: return found_map
+ return None
+
+ def FindObjectOrSmi(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ found_obj = self.SenseObject(tagged_address)
+ if found_obj: return found_obj
+ if (tagged_address & 1) == 0:
+ return "Smi(%d)" % (tagged_address / 2)
+ else:
+ return "Unknown(%s)" % self.reader.FormatIntPtr(tagged_address)
+
+ def FindObject(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ raise NotImplementedError
+
+ def FindMap(self, tagged_address):
+ """When used as a mixin in place of V8Heap."""
+ raise NotImplementedError
+
+ def PrintKnowledge(self):
+ print " known_first_map_page = %s\n"\
+ " known_first_data_page = %s\n"\
+ " known_first_pointer_page = %s" % (
+ self.reader.FormatIntPtr(self.known_first_map_page),
+ self.reader.FormatIntPtr(self.known_first_data_page),
+ self.reader.FormatIntPtr(self.known_first_pointer_page))
+
+
+class InspectionShell(cmd.Cmd):
+ def __init__(self, reader, heap):
+ cmd.Cmd.__init__(self)
+ self.reader = reader
+ self.heap = heap
+ self.padawan = InspectionPadawan(reader, heap)
+ self.prompt = "(grok) "
+
+ def do_dd(self, address):
+ """
+ Interpret memory at the given address (if available) as a sequence
+ of words. Automatic alignment is not performed.
+ """
+ start = int(address, 16)
+ if (start & self.heap.ObjectAlignmentMask()) != 0:
+ print "Warning: Dumping un-aligned memory, is this what you had in mind?"
+ for slot in xrange(start,
+ start + self.reader.PointerSize() * 10,
+ self.reader.PointerSize()):
+ if not self.reader.IsValidAddress(slot):
+ print "Address is not contained within the minidump!"
+ return
+ maybe_address = self.reader.ReadUIntPtr(slot)
+ heap_object = self.padawan.SenseObject(maybe_address)
+ print "%s: %s %s" % (self.reader.FormatIntPtr(slot),
+ self.reader.FormatIntPtr(maybe_address),
+ heap_object or '')
+
+ def do_do(self, address):
+ """
+ Interpret memory at the given address as a V8 object. Automatic
+ alignment makes sure that you can pass tagged as well as un-tagged
+ addresses.
+ """
+ address = int(address, 16)
+ if (address & self.heap.ObjectAlignmentMask()) == 0:
+ address = address + 1
+ elif (address & self.heap.ObjectAlignmentMask()) != 1:
+ print "Address doesn't look like a valid pointer!"
+ return
+ heap_object = self.padawan.SenseObject(address)
+ if heap_object:
+ heap_object.Print(Printer())
+ else:
+ print "Address cannot be interpreted as object!"
+
+ def do_dp(self, address):
+ """
+ Interpret memory at the given address as being on a V8 heap page
+ and print information about the page header (if available).
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ if self.reader.IsValidAddress(page_address):
+ raise NotImplementedError
+ else:
+ print "Page header is not available!"
+
+ def do_k(self, arguments):
+ """
+ Teach V8 heap layout information to the inspector. This increases
+ the amount of annotations the inspector can produce while dumping
+ data. The first page of each heap space is of particular interest
+ because it contains known objects that do not move.
+ """
+ self.padawan.PrintKnowledge()
+
+ def do_km(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ map-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_map_page = page_address
+
+ def do_kd(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ data-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_data_page = page_address
+
+ def do_kp(self, address):
+ """
+ Teach V8 heap layout information to the inspector. Set the first
+ pointer-space page by passing any pointer into that page.
+ """
+ address = int(address, 16)
+ page_address = address & ~self.heap.PageAlignmentMask()
+ self.padawan.known_first_pointer_page = page_address
+
+ def do_s(self, word):
+ """
+ Search for a given word in available memory regions. The given word
+ is expanded to full pointer size and searched at aligned as well as
+ un-aligned memory locations. Use 'sa' to search aligned locations
+ only.
+ """
+ try:
+ word = int(word, 0)
+ except ValueError:
+ print "Malformed word, prefix with '0x' to use hexadecimal format."
+ return
+ print "Searching for word %d/0x%s:" % (word, self.reader.FormatIntPtr(word))
+ self.reader.FindWord(word)
+
+ def do_sh(self, none):
+ """
+ Search for the V8 Heap object in all available memory regions. You
+ might get lucky and find this rare treasure full of invaluable
+ information.
+ """
+ raise NotImplementedError
+
+ def do_list(self, smth):
+ """
+ List all available memory regions.
+ """
+ def print_region(reader, start, size, location):
+ print " %s - %s (%d bytes)" % (reader.FormatIntPtr(start),
+ reader.FormatIntPtr(start + size),
+ size)
+ print "Available memory regions:"
+ self.reader.ForEachMemoryRegion(print_region)
+
EIP_PROXIMITY = 64
@@ -1011,55 +1581,79 @@ CONTEXT_FOR_ARCH = {
['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
}
+
def AnalyzeMinidump(options, minidump_name):
reader = MinidumpReader(options, minidump_name)
+ heap = None
DebugPrint("========================================")
if reader.exception is None:
print "Minidump has no exception info"
- return
- print "Exception info:"
- exception_thread = reader.thread_map[reader.exception.thread_id]
- print " thread id: %d" % exception_thread.id
- print " code: %08X" % reader.exception.exception.code
- print " context:"
- for r in CONTEXT_FOR_ARCH[reader.arch]:
- print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
- # TODO(vitalyr): decode eflags.
- print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
- print
-
- stack_top = reader.ExceptionSP()
- stack_bottom = exception_thread.stack.start + \
- exception_thread.stack.memory.data_size
- stack_map = {reader.ExceptionIP(): -1}
- for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
- maybe_address = reader.ReadUIntPtr(slot)
- if not maybe_address in stack_map:
- stack_map[maybe_address] = slot
- heap = V8Heap(reader, stack_map)
-
- print "Disassembly around exception.eip:"
- start = reader.ExceptionIP() - EIP_PROXIMITY
- lines = reader.GetDisasmLines(start, 2 * EIP_PROXIMITY)
- for line in lines:
- print FormatDisasmLine(start, heap, line)
- print
-
- print "Annotated stack (from exception.esp to bottom):"
- for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
- maybe_address = reader.ReadUIntPtr(slot)
- heap_object = heap.FindObject(maybe_address)
- print "%s: %s" % (reader.FormatIntPtr(slot),
- reader.FormatIntPtr(maybe_address))
- if heap_object:
- heap_object.Print(Printer())
- print
+ else:
+ print "Exception info:"
+ exception_thread = reader.thread_map[reader.exception.thread_id]
+ print " thread id: %d" % exception_thread.id
+ print " code: %08X" % reader.exception.exception.code
+ print " context:"
+ for r in CONTEXT_FOR_ARCH[reader.arch]:
+ print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
+ # TODO(vitalyr): decode eflags.
+ print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
+ print
+
+ stack_top = reader.ExceptionSP()
+ stack_bottom = exception_thread.stack.start + \
+ exception_thread.stack.memory.data_size
+ stack_map = {reader.ExceptionIP(): -1}
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ if not maybe_address in stack_map:
+ stack_map[maybe_address] = slot
+ heap = V8Heap(reader, stack_map)
+
+ print "Disassembly around exception.eip:"
+ disasm_start = reader.ExceptionIP() - EIP_PROXIMITY
+ disasm_bytes = 2 * EIP_PROXIMITY
+ if (options.full):
+ full_range = reader.FindRegion(reader.ExceptionIP())
+ if full_range is not None:
+ disasm_start = full_range[0]
+ disasm_bytes = full_range[1]
+
+ lines = reader.GetDisasmLines(disasm_start, disasm_bytes)
+
+ for line in lines:
+ print FormatDisasmLine(disasm_start, heap, line)
+ print
+
+ if heap is None:
+ heap = V8Heap(reader, None)
+
+ if options.full:
+ FullDump(reader, heap)
+
+ if options.shell:
+ InspectionShell(reader, heap).cmdloop("type help to get help")
+ else:
+ if reader.exception is not None:
+ print "Annotated stack (from exception.esp to bottom):"
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
+ heap_object = heap.FindObject(maybe_address)
+ print "%s: %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(maybe_address))
+ if heap_object:
+ heap_object.Print(Printer())
+ print
reader.Dispose()
if __name__ == "__main__":
parser = optparse.OptionParser(USAGE)
+ parser.add_option("-s", "--shell", dest="shell", action="store_true",
+ help="start an interactive inspector shell")
+ parser.add_option("-f", "--full", dest="full", action="store_true",
+ help="dump all information contained in the minidump")
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
diff --git a/deps/v8/tools/gyp/v8.gyp b/deps/v8/tools/gyp/v8.gyp
index 538b7ef5c3..ea82d31814 100644
--- a/deps/v8/tools/gyp/v8.gyp
+++ b/deps/v8/tools/gyp/v8.gyp
@@ -58,24 +58,20 @@
# has some sources to link into the component.
'../../src/v8dll-main.cc',
],
+ 'defines': [
+ 'V8_SHARED',
+ 'BUILDING_V8_SHARED',
+ ],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'V8_SHARED',
+ 'USING_V8_SHARED',
+ ],
+ },
'conditions': [
- ['OS=="win"', {
- 'defines': [
- 'BUILDING_V8_SHARED',
- ],
- 'direct_dependent_settings': {
- 'defines': [
- 'USING_V8_SHARED',
- ],
- },
- }, {
- 'defines': [
- 'V8_SHARED',
- ],
- 'direct_dependent_settings': {
- 'defines': [
- 'V8_SHARED',
- ],
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']
},
}],
['soname_version!=""', {
@@ -105,27 +101,16 @@
'dependencies': ['mksnapshot', 'js2c'],
}],
['component=="shared_library"', {
- 'conditions': [
- ['OS=="win"', {
- 'defines': [
- 'BUILDING_V8_SHARED',
- ],
- 'direct_dependent_settings': {
- 'defines': [
- 'USING_V8_SHARED',
- ],
- },
- }, {
- 'defines': [
- 'V8_SHARED',
- ],
- 'direct_dependent_settings': {
- 'defines': [
- 'V8_SHARED',
- ],
- },
- }],
+ 'defines': [
+ 'V8_SHARED',
+ 'BUILDING_V8_SHARED',
],
+ 'direct_dependent_settings': {
+ 'defines': [
+ 'V8_SHARED',
+ 'USING_V8_SHARED',
+ ],
+ },
}],
],
'dependencies': [
@@ -310,6 +295,8 @@
'../../src/dtoa.h',
'../../src/elements.cc',
'../../src/elements.h',
+ '../../src/elements-kind.cc',
+ '../../src/elements-kind.h',
'../../src/execution.cc',
'../../src/execution.h',
'../../src/factory.cc',
@@ -360,7 +347,7 @@
'../../src/jsregexp.h',
'../../src/isolate.cc',
'../../src/isolate.h',
- '../../src/lazy-instance.h'
+ '../../src/lazy-instance.h',
'../../src/list-inl.h',
'../../src/list.h',
'../../src/lithium.cc',
diff --git a/deps/v8/tools/js2c.py b/deps/v8/tools/js2c.py
index fa559f362c..d06cbe47a9 100644
--- a/deps/v8/tools/js2c.py
+++ b/deps/v8/tools/js2c.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2006-2008 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -195,14 +195,14 @@ def ReadMacros(lines):
macro_match = MACRO_PATTERN.match(line)
if macro_match:
name = macro_match.group(1)
- args = map(string.strip, macro_match.group(2).split(','))
+ args = [match.strip() for match in macro_match.group(2).split(',')]
body = macro_match.group(3).strip()
macros.append((re.compile("\\b%s\\(" % name), TextMacro(args, body)))
else:
python_match = PYTHON_MACRO_PATTERN.match(line)
if python_match:
name = python_match.group(1)
- args = map(string.strip, python_match.group(2).split(','))
+ args = [match.strip() for match in python_match.group(2).split(',')]
body = python_match.group(3).strip()
fun = eval("lambda " + ",".join(args) + ': ' + body)
macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun)))
diff --git a/deps/v8/tools/jsmin.py b/deps/v8/tools/jsmin.py
index e82f3d031e..250dea9d72 100644
--- a/deps/v8/tools/jsmin.py
+++ b/deps/v8/tools/jsmin.py
@@ -1,6 +1,6 @@
#!/usr/bin/python2.4
-# Copyright 2009 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -154,7 +154,7 @@ class JavaScriptMinifier(object):
return var_name
while True:
identifier_first_char = self.identifier_counter % 52
- identifier_second_char = self.identifier_counter / 52
+ identifier_second_char = self.identifier_counter // 52
new_identifier = self.CharFromNumber(identifier_first_char)
if identifier_second_char != 0:
new_identifier = (
diff --git a/deps/v8/tools/presubmit.py b/deps/v8/tools/presubmit.py
index b923dd0438..a0b81e85f4 100755
--- a/deps/v8/tools/presubmit.py
+++ b/deps/v8/tools/presubmit.py
@@ -114,12 +114,15 @@ def CppLintWorker(command):
while True:
out_line = process.stderr.readline()
if out_line == '' and process.poll() != None:
+ if error_count == -1:
+ print "Failed to process %s" % command.pop()
+ return 1
break
m = LINT_OUTPUT_PATTERN.match(out_line)
if m:
out_lines += out_line
error_count += 1
- sys.stderr.write(out_lines)
+ sys.stdout.write(out_lines)
return error_count
except KeyboardInterrupt:
process.kill()
diff --git a/deps/v8/tools/push-to-trunk.sh b/deps/v8/tools/push-to-trunk.sh
index 3fb5b34ed3..ff6dd1d776 100755
--- a/deps/v8/tools/push-to-trunk.sh
+++ b/deps/v8/tools/push-to-trunk.sh
@@ -130,6 +130,7 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
| grep "^BUG=" | grep -v "BUG=$" | grep -v "BUG=none$" \
| sed -e 's/^/ /' \
| sed -e 's/BUG=v8:\(.*\)$/(issue \1)/' \
+ | sed -e 's/BUG=chromium:\(.*\)$/(Chromium issue \1)/' \
| sed -e 's/BUG=\(.*\)$/(Chromium issue \1)/' \
>> "$CHANGELOG_ENTRY_FILE"
# Append the commit's author for reference.
@@ -320,6 +321,14 @@ if [ $START_STEP -le $CURRENT_STEP ] ; then
|| die "'git svn tag' failed."
fi
+if [ -z "$CHROME_PATH" ] ; then
+ echo ">>> (asking for Chromium checkout)"
+ echo -n "Do you have a \"NewGit\" Chromium checkout and want this script \
+to automate creation of the roll CL? If yes, enter the path to (and including) \
+the \"src\" directory here, otherwise just press <Return>: "
+ read CHROME_PATH
+fi
+
if [ -n "$CHROME_PATH" ] ; then
let CURRENT_STEP+=1
diff --git a/deps/v8/tools/test-wrapper-gypbuild.py b/deps/v8/tools/test-wrapper-gypbuild.py
index eda2459173..d99d055e50 100755
--- a/deps/v8/tools/test-wrapper-gypbuild.py
+++ b/deps/v8/tools/test-wrapper-gypbuild.py
@@ -112,9 +112,6 @@ def BuildOptions():
result.add_option("--nostress",
help="Don't run crankshaft --always-opt --stress-op test",
default=False, action="store_true")
- result.add_option("--crankshaft",
- help="Run with the --crankshaft flag",
- default=False, action="store_true")
result.add_option("--shard-count",
help="Split testsuites into this number of shards",
default=1, type="int")
@@ -199,8 +196,6 @@ def PassOnOptions(options):
result += ['--stress-only']
if options.nostress:
result += ['--nostress']
- if options.crankshaft:
- result += ['--crankshaft']
if options.shard_count != 1:
result += ['--shard-count=%s' % options.shard_count]
if options.shard_run != 1:
@@ -224,7 +219,8 @@ def Main():
print ">>> running presubmit tests"
returncodes += subprocess.call([workspace + '/tools/presubmit.py'])
- args_for_children = [workspace + '/tools/test.py'] + PassOnOptions(options)
+ args_for_children = ['python']
+ args_for_children += [workspace + '/tools/test.py'] + PassOnOptions(options)
args_for_children += ['--no-build', '--build-system=gyp']
for arg in args:
args_for_children += [arg]
@@ -240,10 +236,11 @@ def Main():
shellpath = workspace + '/' + options.outdir + '/' + arch + '.' + mode
env['LD_LIBRARY_PATH'] = shellpath + '/lib.target'
shell = shellpath + "/d8"
- child = subprocess.Popen(' '.join(args_for_children +
- ['--arch=' + arch] +
- ['--mode=' + mode] +
- ['--shell=' + shell]),
+ cmdline = ' '.join(args_for_children +
+ ['--arch=' + arch] +
+ ['--mode=' + mode] +
+ ['--shell=' + shell])
+ child = subprocess.Popen(cmdline,
shell=True,
cwd=workspace,
env=env)
diff --git a/deps/v8/tools/test.py b/deps/v8/tools/test.py
index 0aacd993f3..5131ad7617 100755
--- a/deps/v8/tools/test.py
+++ b/deps/v8/tools/test.py
@@ -1246,9 +1246,6 @@ def BuildOptions():
result.add_option("--nostress",
help="Don't run crankshaft --always-opt --stress-op test",
default=False, action="store_true")
- result.add_option("--crankshaft",
- help="Run with the --crankshaft flag",
- default=False, action="store_true")
result.add_option("--shard-count",
help="Split testsuites into this number of shards",
default=1, type="int")
@@ -1300,11 +1297,6 @@ def ProcessOptions(options):
VARIANT_FLAGS = [['--stress-opt', '--always-opt']]
if options.nostress:
VARIANT_FLAGS = [[],['--nocrankshaft']]
- if options.crankshaft:
- if options.special_command:
- options.special_command += " --crankshaft"
- else:
- options.special_command = "@ --crankshaft"
if options.shell.endswith("d8"):
if options.special_command:
options.special_command += " --test"
@@ -1493,7 +1485,6 @@ def Main():
'system': utils.GuessOS(),
'arch': options.arch,
'simulator': options.simulator,
- 'crankshaft': options.crankshaft,
'isolates': options.isolates
}
test_list = root.ListTests([], path, context, mode, [])