summaryrefslogtreecommitdiff
path: root/deps/v8/test/unittests
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2017-03-21 10:16:54 +0100
committerMichaël Zasso <targos@protonmail.com>2017-03-25 09:44:10 +0100
commitc459d8ea5d402c702948c860d9497b2230ff7e8a (patch)
tree56c282fc4d40e5cb613b47cf7be3ea0526ed5b6f /deps/v8/test/unittests
parente0bc5a7361b1d29c3ed034155fd779ce6f44fb13 (diff)
downloadandroid-node-v8-c459d8ea5d402c702948c860d9497b2230ff7e8a.tar.gz
android-node-v8-c459d8ea5d402c702948c860d9497b2230ff7e8a.tar.bz2
android-node-v8-c459d8ea5d402c702948c860d9497b2230ff7e8a.zip
deps: update V8 to 5.7.492.69
PR-URL: https://github.com/nodejs/node/pull/11752 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Diffstat (limited to 'deps/v8/test/unittests')
-rw-r--r--deps/v8/test/unittests/BUILD.gn18
-rw-r--r--deps/v8/test/unittests/api/access-check-unittest.cc79
-rw-r--r--deps/v8/test/unittests/base/logging-unittest.cc56
-rw-r--r--deps/v8/test/unittests/cancelable-tasks-unittest.cc45
-rw-r--r--deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.cc28
-rw-r--r--deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h23
-rw-r--r--deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-job-unittest.cc104
-rw-r--r--deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc12
-rw-r--r--deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc806
-rw-r--r--deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc418
-rw-r--r--deps/v8/test/unittests/compiler/common-operator-reducer-unittest.cc27
-rw-r--r--deps/v8/test/unittests/compiler/escape-analysis-unittest.cc26
-rw-r--r--deps/v8/test/unittests/compiler/graph-unittest.cc6
-rw-r--r--deps/v8/test/unittests/compiler/graph-unittest.h1
-rw-r--r--deps/v8/test/unittests/compiler/instruction-selector-unittest.cc90
-rw-r--r--deps/v8/test/unittests/compiler/instruction-sequence-unittest.cc20
-rw-r--r--deps/v8/test/unittests/compiler/int64-lowering-unittest.cc25
-rw-r--r--deps/v8/test/unittests/compiler/js-create-lowering-unittest.cc29
-rw-r--r--deps/v8/test/unittests/compiler/js-intrinsic-lowering-unittest.cc31
-rw-r--r--deps/v8/test/unittests/compiler/js-typed-lowering-unittest.cc78
-rw-r--r--deps/v8/test/unittests/compiler/liveness-analyzer-unittest.cc6
-rw-r--r--deps/v8/test/unittests/compiler/load-elimination-unittest.cc96
-rw-r--r--deps/v8/test/unittests/compiler/machine-operator-reducer-unittest.cc45
-rw-r--r--deps/v8/test/unittests/compiler/mips/instruction-selector-mips-unittest.cc164
-rw-r--r--deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc203
-rw-r--r--deps/v8/test/unittests/compiler/node-test-utils.cc55
-rw-r--r--deps/v8/test/unittests/compiler/node-test-utils.h8
-rw-r--r--deps/v8/test/unittests/compiler/regalloc/OWNERS5
-rw-r--r--deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc (renamed from deps/v8/test/unittests/compiler/live-range-unittest.cc)30
-rw-r--r--deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc (renamed from deps/v8/test/unittests/compiler/move-optimizer-unittest.cc)8
-rw-r--r--deps/v8/test/unittests/compiler/regalloc/register-allocator-unittest.cc (renamed from deps/v8/test/unittests/compiler/register-allocator-unittest.cc)34
-rw-r--r--deps/v8/test/unittests/compiler/simplified-operator-reducer-unittest.cc16
-rw-r--r--deps/v8/test/unittests/compiler/state-values-utils-unittest.cc90
-rw-r--r--deps/v8/test/unittests/compiler/typer-unittest.cc4
-rw-r--r--deps/v8/test/unittests/counters-unittest.cc310
-rw-r--r--deps/v8/test/unittests/heap/embedder-tracing-unittest.cc163
-rw-r--r--deps/v8/test/unittests/heap/gc-idle-time-handler-unittest.cc13
-rw-r--r--deps/v8/test/unittests/heap/heap-unittest.cc1
-rw-r--r--deps/v8/test/unittests/heap/memory-reducer-unittest.cc67
-rw-r--r--deps/v8/test/unittests/heap/unmapper-unittest.cc88
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc69
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc7
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc1011
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc47
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc24
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc37
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc19
-rw-r--r--deps/v8/test/unittests/interpreter/bytecodes-unittest.cc120
-rw-r--r--deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc1
-rw-r--r--deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc189
-rw-r--r--deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h16
-rw-r--r--deps/v8/test/unittests/libplatform/default-platform-unittest.cc34
-rw-r--r--deps/v8/test/unittests/object-unittest.cc57
-rw-r--r--deps/v8/test/unittests/run-all-unittests.cc3
-rw-r--r--deps/v8/test/unittests/unittests.gyp18
-rw-r--r--deps/v8/test/unittests/unittests.status9
-rw-r--r--deps/v8/test/unittests/value-serializer-unittest.cc107
-rw-r--r--deps/v8/test/unittests/wasm/OWNERS1
-rw-r--r--deps/v8/test/unittests/wasm/control-transfer-unittest.cc146
-rw-r--r--deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc (renamed from deps/v8/test/unittests/wasm/ast-decoder-unittest.cc)1238
-rw-r--r--deps/v8/test/unittests/wasm/loop-assignment-analysis-unittest.cc12
-rw-r--r--deps/v8/test/unittests/wasm/module-decoder-unittest.cc166
-rw-r--r--deps/v8/test/unittests/wasm/wasm-macro-gen-unittest.cc7
-rw-r--r--deps/v8/test/unittests/wasm/wasm-module-builder-unittest.cc4
-rw-r--r--deps/v8/test/unittests/zone/zone-chunk-list-unittest.cc1
67 files changed, 5219 insertions, 1454 deletions
diff --git a/deps/v8/test/unittests/BUILD.gn b/deps/v8/test/unittests/BUILD.gn
index 2e13f04b57..221ca74cab 100644
--- a/deps/v8/test/unittests/BUILD.gn
+++ b/deps/v8/test/unittests/BUILD.gn
@@ -10,6 +10,7 @@ v8_executable("unittests") {
sources = [
"../../testing/gmock-support.h",
"../../testing/gtest-support.h",
+ "api/access-check-unittest.cc",
"base/atomic-utils-unittest.cc",
"base/bits-unittest.cc",
"base/cpu-unittest.cc",
@@ -28,9 +29,13 @@ v8_executable("unittests") {
"base/utils/random-number-generator-unittest.cc",
"cancelable-tasks-unittest.cc",
"char-predicates-unittest.cc",
+ "compiler-dispatcher/compiler-dispatcher-helper.cc",
+ "compiler-dispatcher/compiler-dispatcher-helper.h",
"compiler-dispatcher/compiler-dispatcher-job-unittest.cc",
"compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc",
+ "compiler-dispatcher/compiler-dispatcher-unittest.cc",
"compiler/branch-elimination-unittest.cc",
+ "compiler/bytecode-analysis-unittest.cc",
"compiler/checkpoint-elimination-unittest.cc",
"compiler/common-operator-reducer-unittest.cc",
"compiler/common-operator-unittest.cc",
@@ -59,13 +64,11 @@ v8_executable("unittests") {
"compiler/js-typed-lowering-unittest.cc",
"compiler/linkage-tail-call-unittest.cc",
"compiler/live-range-builder.h",
- "compiler/live-range-unittest.cc",
"compiler/liveness-analyzer-unittest.cc",
"compiler/load-elimination-unittest.cc",
"compiler/loop-peeling-unittest.cc",
"compiler/machine-operator-reducer-unittest.cc",
"compiler/machine-operator-unittest.cc",
- "compiler/move-optimizer-unittest.cc",
"compiler/node-cache-unittest.cc",
"compiler/node-matchers-unittest.cc",
"compiler/node-properties-unittest.cc",
@@ -73,7 +76,9 @@ v8_executable("unittests") {
"compiler/node-test-utils.h",
"compiler/node-unittest.cc",
"compiler/opcodes-unittest.cc",
- "compiler/register-allocator-unittest.cc",
+ "compiler/regalloc/live-range-unittest.cc",
+ "compiler/regalloc/move-optimizer-unittest.cc",
+ "compiler/regalloc/register-allocator-unittest.cc",
"compiler/schedule-unittest.cc",
"compiler/scheduler-rpo-unittest.cc",
"compiler/scheduler-unittest.cc",
@@ -89,6 +94,7 @@ v8_executable("unittests") {
"eh-frame-iterator-unittest.cc",
"eh-frame-writer-unittest.cc",
"heap/bitmap-unittest.cc",
+ "heap/embedder-tracing-unittest.cc",
"heap/gc-idle-time-handler-unittest.cc",
"heap/gc-tracer-unittest.cc",
"heap/heap-unittest.cc",
@@ -96,11 +102,14 @@ v8_executable("unittests") {
"heap/memory-reducer-unittest.cc",
"heap/scavenge-job-unittest.cc",
"heap/slot-set-unittest.cc",
+ "heap/unmapper-unittest.cc",
"interpreter/bytecode-array-builder-unittest.cc",
"interpreter/bytecode-array-iterator-unittest.cc",
+ "interpreter/bytecode-array-random-iterator-unittest.cc",
"interpreter/bytecode-array-writer-unittest.cc",
"interpreter/bytecode-dead-code-optimizer-unittest.cc",
"interpreter/bytecode-decoder-unittest.cc",
+ "interpreter/bytecode-operands-unittest.cc",
"interpreter/bytecode-peephole-optimizer-unittest.cc",
"interpreter/bytecode-pipeline-unittest.cc",
"interpreter/bytecode-register-allocator-unittest.cc",
@@ -114,6 +123,7 @@ v8_executable("unittests") {
"libplatform/task-queue-unittest.cc",
"libplatform/worker-thread-unittest.cc",
"locked-queue-unittest.cc",
+ "object-unittest.cc",
"register-configuration-unittest.cc",
"run-all-unittests.cc",
"source-position-table-unittest.cc",
@@ -122,9 +132,9 @@ v8_executable("unittests") {
"unicode-unittest.cc",
"value-serializer-unittest.cc",
"wasm/asm-types-unittest.cc",
- "wasm/ast-decoder-unittest.cc",
"wasm/control-transfer-unittest.cc",
"wasm/decoder-unittest.cc",
+ "wasm/function-body-decoder-unittest.cc",
"wasm/leb-helper-unittest.cc",
"wasm/loop-assignment-analysis-unittest.cc",
"wasm/module-decoder-unittest.cc",
diff --git a/deps/v8/test/unittests/api/access-check-unittest.cc b/deps/v8/test/unittests/api/access-check-unittest.cc
new file mode 100644
index 0000000000..05913de62a
--- /dev/null
+++ b/deps/v8/test/unittests/api/access-check-unittest.cc
@@ -0,0 +1,79 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "include/v8.h"
+#include "test/unittests/test-utils.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+
+using AccessCheckTest = TestWithIsolate;
+
+namespace {
+
+bool AccessCheck(Local<Context> accessing_context,
+ Local<Object> accessed_object, Local<Value> data) {
+ return false;
+}
+
+MaybeLocal<Value> CompileRun(Isolate* isolate, const char* source) {
+ Local<String> source_string =
+ String::NewFromUtf8(isolate, source, NewStringType::kNormal)
+ .ToLocalChecked();
+ Local<Context> context = isolate->GetCurrentContext();
+ Local<Script> script =
+ Script::Compile(context, source_string).ToLocalChecked();
+ return script->Run(context);
+}
+
+} // namespace
+
+TEST_F(AccessCheckTest, GetOwnPropertyDescriptor) {
+ isolate()->SetFailedAccessCheckCallbackFunction(
+ [](v8::Local<v8::Object> host, v8::AccessType type,
+ v8::Local<v8::Value> data) {});
+ Local<ObjectTemplate> global_template = ObjectTemplate::New(isolate());
+ global_template->SetAccessCheckCallback(AccessCheck);
+
+ Local<FunctionTemplate> getter_template = FunctionTemplate::New(
+ isolate(), [](const FunctionCallbackInfo<Value>& info) {
+ FAIL() << "This should never be called.";
+ info.GetReturnValue().Set(42);
+ });
+ getter_template->SetAcceptAnyReceiver(false);
+ Local<FunctionTemplate> setter_template = FunctionTemplate::New(
+ isolate(), [](const FunctionCallbackInfo<v8::Value>& info) {
+ FAIL() << "This should never be called.";
+ });
+ setter_template->SetAcceptAnyReceiver(false);
+ global_template->SetAccessorProperty(
+ String::NewFromUtf8(isolate(), "property", NewStringType::kNormal)
+ .ToLocalChecked(),
+ getter_template, setter_template);
+
+ Local<Context> target_context =
+ Context::New(isolate(), nullptr, global_template);
+ Local<Context> accessing_context =
+ Context::New(isolate(), nullptr, global_template);
+
+ accessing_context->Global()
+ ->Set(accessing_context,
+ String::NewFromUtf8(isolate(), "other", NewStringType::kNormal)
+ .ToLocalChecked(),
+ target_context->Global())
+ .FromJust();
+
+ Context::Scope context_scope(accessing_context);
+ Local<Value> result =
+ CompileRun(isolate(),
+ "Object.getOwnPropertyDescriptor(this, 'property')"
+ " .get.call(other);")
+ .ToLocalChecked();
+ EXPECT_TRUE(result->IsUndefined());
+ CompileRun(isolate(),
+ "Object.getOwnPropertyDescriptor(this, 'property')"
+ " .set.call(other, 42);");
+}
+
+} // namespace v8
diff --git a/deps/v8/test/unittests/base/logging-unittest.cc b/deps/v8/test/unittests/base/logging-unittest.cc
index 918feb114b..5c0ca007c4 100644
--- a/deps/v8/test/unittests/base/logging-unittest.cc
+++ b/deps/v8/test/unittests/base/logging-unittest.cc
@@ -2,17 +2,65 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <cstdint>
+
#include "src/base/logging.h"
+#include "src/objects.h"
#include "testing/gtest-support.h"
namespace v8 {
namespace base {
+namespace {
+
+#define CHECK_SUCCEED(NAME, lhs, rhs) \
+ { \
+ std::string* error_message = \
+ Check##NAME##Impl<decltype(lhs), decltype(rhs)>((lhs), (rhs), ""); \
+ EXPECT_EQ(nullptr, error_message); \
+ }
+
+#define CHECK_FAIL(NAME, lhs, rhs) \
+ { \
+ std::string* error_message = \
+ Check##NAME##Impl<decltype(lhs), decltype(rhs)>((lhs), (rhs), ""); \
+ EXPECT_NE(nullptr, error_message); \
+ delete error_message; \
+ }
+
+} // namespace
+
TEST(LoggingTest, CheckEQImpl) {
- EXPECT_EQ(nullptr, CheckEQImpl(0.0, 0.0, ""));
- EXPECT_EQ(nullptr, CheckEQImpl(0.0, -0.0, ""));
- EXPECT_EQ(nullptr, CheckEQImpl(-0.0, 0.0, ""));
- EXPECT_EQ(nullptr, CheckEQImpl(-0.0, -0.0, ""));
+ CHECK_SUCCEED(EQ, 0.0, 0.0)
+ CHECK_SUCCEED(EQ, 0.0, -0.0)
+ CHECK_SUCCEED(EQ, -0.0, 0.0)
+ CHECK_SUCCEED(EQ, -0.0, -0.0)
+}
+
+TEST(LoggingTest, CompareSignedMismatch) {
+ CHECK_SUCCEED(EQ, static_cast<int32_t>(14), static_cast<uint32_t>(14))
+ CHECK_FAIL(EQ, static_cast<int32_t>(14), static_cast<uint32_t>(15))
+ CHECK_FAIL(EQ, static_cast<int32_t>(-1), static_cast<uint32_t>(-1))
+ CHECK_SUCCEED(LT, static_cast<int32_t>(-1), static_cast<uint32_t>(0))
+ CHECK_SUCCEED(LT, static_cast<int32_t>(-1), static_cast<uint32_t>(-1))
+ CHECK_SUCCEED(LE, static_cast<int32_t>(-1), static_cast<uint32_t>(0))
+ CHECK_SUCCEED(LE, static_cast<int32_t>(55), static_cast<uint32_t>(55))
+ CHECK_SUCCEED(LT, static_cast<int32_t>(55), static_cast<uint32_t>(0x7fffff00))
+ CHECK_SUCCEED(LE, static_cast<int32_t>(55), static_cast<uint32_t>(0x7fffff00))
+ CHECK_SUCCEED(GE, static_cast<uint32_t>(0x7fffff00), static_cast<int32_t>(55))
+ CHECK_SUCCEED(GT, static_cast<uint32_t>(0x7fffff00), static_cast<int32_t>(55))
+ CHECK_SUCCEED(GT, static_cast<uint32_t>(-1), static_cast<int32_t>(-1))
+ CHECK_SUCCEED(GE, static_cast<uint32_t>(0), static_cast<int32_t>(-1))
+ CHECK_SUCCEED(LT, static_cast<int8_t>(-1), static_cast<uint32_t>(0))
+ CHECK_SUCCEED(GT, static_cast<uint64_t>(0x7f01010101010101), 0)
+ CHECK_SUCCEED(LE, static_cast<int64_t>(0xff01010101010101),
+ static_cast<uint8_t>(13))
+}
+
+TEST(LoggingTest, CompareAgainstStaticConstPointer) {
+ // These used to produce link errors before http://crrev.com/2524093002.
+ CHECK_FAIL(EQ, v8::internal::Smi::kZero, v8::internal::Smi::FromInt(17));
+ CHECK_SUCCEED(GT, 0, v8::internal::Smi::kMinValue);
}
} // namespace base
diff --git a/deps/v8/test/unittests/cancelable-tasks-unittest.cc b/deps/v8/test/unittests/cancelable-tasks-unittest.cc
index 37690aaf80..eb5dd91589 100644
--- a/deps/v8/test/unittests/cancelable-tasks-unittest.cc
+++ b/deps/v8/test/unittests/cancelable-tasks-unittest.cc
@@ -214,5 +214,50 @@ TEST(CancelableTask, RemoveUnmanagedId) {
EXPECT_FALSE(manager.TryAbort(3));
}
+TEST(CancelableTask, EmptyTryAbortAll) {
+ CancelableTaskManager manager;
+ EXPECT_EQ(manager.TryAbortAll(), CancelableTaskManager::kTaskRemoved);
+}
+
+TEST(CancelableTask, ThreadedMultipleTasksNotRunTryAbortAll) {
+ CancelableTaskManager manager;
+ ResultType result1 = 0;
+ ResultType result2 = 0;
+ TestTask* task1 = new TestTask(&manager, &result1, TestTask::kCheckNotRun);
+ TestTask* task2 = new TestTask(&manager, &result2, TestTask::kCheckNotRun);
+ ThreadedRunner runner1(task1);
+ ThreadedRunner runner2(task2);
+ EXPECT_EQ(manager.TryAbortAll(), CancelableTaskManager::kTaskAborted);
+ // Tasks are canceled, hence the runner will bail out and not update result.
+ runner1.Start();
+ runner2.Start();
+ runner1.Join();
+ runner2.Join();
+ EXPECT_EQ(GetValue(&result1), 0);
+ EXPECT_EQ(GetValue(&result2), 0);
+}
+
+TEST(CancelableTask, ThreadedMultipleTasksStartedTryAbortAll) {
+ CancelableTaskManager manager;
+ ResultType result1 = 0;
+ ResultType result2 = 0;
+ TestTask* task1 =
+ new TestTask(&manager, &result1, TestTask::kWaitTillCanceledAgain);
+ TestTask* task2 =
+ new TestTask(&manager, &result2, TestTask::kWaitTillCanceledAgain);
+ ThreadedRunner runner1(task1);
+ ThreadedRunner runner2(task2);
+ runner1.Start();
+ // Busy wait on result to make sure task1 is done.
+ while (GetValue(&result1) == 0) {
+ }
+ EXPECT_EQ(manager.TryAbortAll(), CancelableTaskManager::kTaskRunning);
+ runner2.Start();
+ runner1.Join();
+ runner2.Join();
+ EXPECT_EQ(GetValue(&result1), 1);
+ EXPECT_EQ(GetValue(&result2), 0);
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.cc b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.cc
new file mode 100644
index 0000000000..16fa160aec
--- /dev/null
+++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.cc
@@ -0,0 +1,28 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h"
+
+#include <memory>
+
+#include "include/v8.h"
+#include "src/api.h"
+#include "src/objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+Handle<Object> RunJS(v8::Isolate* isolate, const char* script) {
+ return Utils::OpenHandle(
+ *v8::Script::Compile(
+ isolate->GetCurrentContext(),
+ v8::String::NewFromUtf8(isolate, script, v8::NewStringType::kNormal)
+ .ToLocalChecked())
+ .ToLocalChecked()
+ ->Run(isolate->GetCurrentContext())
+ .ToLocalChecked());
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h
new file mode 100644
index 0000000000..97084bd31f
--- /dev/null
+++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h
@@ -0,0 +1,23 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_UNITTESTS_COMPILER_DISPATCHER_COMPILER_DISPATCHER_HELPER_H_
+#define V8_UNITTESTS_COMPILER_DISPATCHER_COMPILER_DISPATCHER_HELPER_H_
+
+namespace v8 {
+
+class Isolate;
+
+namespace internal {
+
+class Object;
+template <typename T>
+class Handle;
+
+Handle<Object> RunJS(v8::Isolate* isolate, const char* script);
+
+} // namespace internal
+} // namespace v8
+
+#endif // V8_UNITTESTS_COMPILER_DISPATCHER_COMPILER_DISPATCHER_HELPER_H_
diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-job-unittest.cc b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-job-unittest.cc
index d4c54247e2..ca9f44725b 100644
--- a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-job-unittest.cc
+++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-job-unittest.cc
@@ -10,27 +10,28 @@
#include "src/ast/scopes.h"
#include "src/base/platform/semaphore.h"
#include "src/compiler-dispatcher/compiler-dispatcher-job.h"
+#include "src/compiler-dispatcher/compiler-dispatcher-tracer.h"
#include "src/flags.h"
#include "src/isolate-inl.h"
#include "src/parsing/parse-info.h"
#include "src/v8.h"
+#include "test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h"
#include "test/unittests/test-utils.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
-typedef TestWithContext CompilerDispatcherJobTest;
-
-class IgnitionCompilerDispatcherJobTest : public TestWithContext {
+class CompilerDispatcherJobTest : public TestWithContext {
public:
- IgnitionCompilerDispatcherJobTest() {}
- ~IgnitionCompilerDispatcherJobTest() override {}
+ CompilerDispatcherJobTest() : tracer_(i_isolate()) {}
+ ~CompilerDispatcherJobTest() override {}
+
+ CompilerDispatcherTracer* tracer() { return &tracer_; }
static void SetUpTestCase() {
old_flag_ = i::FLAG_ignition;
i::FLAG_ignition = true;
- i::FLAG_never_compact = true;
TestWithContext::SetUpTestCase();
}
@@ -40,11 +41,13 @@ class IgnitionCompilerDispatcherJobTest : public TestWithContext {
}
private:
+ CompilerDispatcherTracer tracer_;
static bool old_flag_;
- DISALLOW_COPY_AND_ASSIGN(IgnitionCompilerDispatcherJobTest);
+
+ DISALLOW_COPY_AND_ASSIGN(CompilerDispatcherJobTest);
};
-bool IgnitionCompilerDispatcherJobTest::old_flag_;
+bool CompilerDispatcherJobTest::old_flag_;
namespace {
@@ -78,53 +81,29 @@ Handle<SharedFunctionInfo> CreateSharedFunctionInfo(
source = isolate->factory()->NewStringFromAsciiChecked(test_script);
}
Handle<Script> script = isolate->factory()->NewScript(source);
+ Handle<FixedArray> infos = isolate->factory()->NewFixedArray(3);
+ script->set_shared_function_infos(*infos);
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
isolate->factory()->NewStringFromAsciiChecked("f"),
isolate->builtins()->CompileLazy(), false);
- SharedFunctionInfo::SetScript(shared, script);
shared->set_end_position(source->length());
shared->set_outer_scope_info(ScopeInfo::Empty(isolate));
+ shared->set_function_literal_id(1);
+ SharedFunctionInfo::SetScript(shared, script);
return scope.CloseAndEscape(shared);
}
-Handle<Object> RunJS(v8::Isolate* isolate, const char* script) {
- return Utils::OpenHandle(
- *v8::Script::Compile(
- isolate->GetCurrentContext(),
- v8::String::NewFromUtf8(isolate, script, v8::NewStringType::kNormal)
- .ToLocalChecked())
- .ToLocalChecked()
- ->Run(isolate->GetCurrentContext())
- .ToLocalChecked());
-}
-
} // namespace
TEST_F(CompilerDispatcherJobTest, Construct) {
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), nullptr),
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), nullptr),
FLAG_stack_size));
}
-TEST_F(CompilerDispatcherJobTest, CanParseOnBackgroundThread) {
- {
- std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), nullptr),
- FLAG_stack_size));
- ASSERT_FALSE(job->can_parse_on_background_thread());
- }
- {
- ScriptResource script(test_script, strlen(test_script));
- std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), &script),
- FLAG_stack_size));
- ASSERT_TRUE(job->can_parse_on_background_thread());
- }
-}
-
TEST_F(CompilerDispatcherJobTest, StateTransitions) {
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), nullptr),
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), nullptr),
FLAG_stack_size));
ASSERT_TRUE(job->status() == CompileJobStatus::kInitial);
@@ -147,7 +126,7 @@ TEST_F(CompilerDispatcherJobTest, StateTransitions) {
TEST_F(CompilerDispatcherJobTest, SyntaxError) {
ScriptResource script("^^^", strlen("^^^"));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), &script),
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), &script),
FLAG_stack_size));
job->PrepareToParseOnMainThread();
@@ -169,7 +148,7 @@ TEST_F(CompilerDispatcherJobTest, ScopeChain) {
Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), handle(f->shared()), FLAG_stack_size));
+ i_isolate(), tracer(), handle(f->shared()), FLAG_stack_size));
job->PrepareToParseOnMainThread();
job->Parse();
@@ -205,7 +184,7 @@ TEST_F(CompilerDispatcherJobTest, CompileAndRun) {
"g();";
Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), handle(f->shared()), FLAG_stack_size));
+ i_isolate(), tracer(), handle(f->shared()), FLAG_stack_size));
job->PrepareToParseOnMainThread();
job->Parse();
@@ -230,7 +209,8 @@ TEST_F(CompilerDispatcherJobTest, CompileFailureToPrepare) {
raw_script += " 'x'; }";
ScriptResource script(raw_script.c_str(), strlen(raw_script.c_str()));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), &script), 100));
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), &script),
+ 100));
job->PrepareToParseOnMainThread();
job->Parse();
@@ -252,7 +232,8 @@ TEST_F(CompilerDispatcherJobTest, CompileFailureToFinalize) {
raw_script += " 'x'; }";
ScriptResource script(raw_script.c_str(), strlen(raw_script.c_str()));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), &script), 50));
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), &script),
+ 50));
job->PrepareToParseOnMainThread();
job->Parse();
@@ -285,7 +266,7 @@ class CompileTask : public Task {
DISALLOW_COPY_AND_ASSIGN(CompileTask);
};
-TEST_F(IgnitionCompilerDispatcherJobTest, CompileOnBackgroundThread) {
+TEST_F(CompilerDispatcherJobTest, CompileOnBackgroundThread) {
const char* raw_script =
"(a, b) {\n"
" var c = a + b;\n"
@@ -295,13 +276,13 @@ TEST_F(IgnitionCompilerDispatcherJobTest, CompileOnBackgroundThread) {
"}";
ScriptResource script(raw_script, strlen(raw_script));
std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
- i_isolate(), CreateSharedFunctionInfo(i_isolate(), &script), 100));
+ i_isolate(), tracer(), CreateSharedFunctionInfo(i_isolate(), &script),
+ 100));
job->PrepareToParseOnMainThread();
job->Parse();
job->FinalizeParsingOnMainThread();
job->PrepareToCompileOnMainThread();
- ASSERT_TRUE(job->can_compile_on_background_thread());
base::Semaphore semaphore(0);
CompileTask* background_task = new CompileTask(job.get(), &semaphore);
@@ -315,5 +296,36 @@ TEST_F(IgnitionCompilerDispatcherJobTest, CompileOnBackgroundThread) {
ASSERT_TRUE(job->status() == CompileJobStatus::kInitial);
}
+TEST_F(CompilerDispatcherJobTest, LazyInnerFunctions) {
+ const char script[] =
+ "function g() {\n"
+ " f = function() {\n"
+ " e = (function() { return 42; });\n"
+ " return e;\n"
+ " };\n"
+ " return f;\n"
+ "}\n"
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+
+ std::unique_ptr<CompilerDispatcherJob> job(new CompilerDispatcherJob(
+ i_isolate(), tracer(), handle(f->shared()), FLAG_stack_size));
+
+ job->PrepareToParseOnMainThread();
+ job->Parse();
+ ASSERT_TRUE(job->FinalizeParsingOnMainThread());
+ ASSERT_TRUE(job->PrepareToCompileOnMainThread());
+ job->Compile();
+ ASSERT_TRUE(job->FinalizeCompilingOnMainThread());
+ ASSERT_TRUE(job->status() == CompileJobStatus::kDone);
+
+ Handle<JSFunction> e = Handle<JSFunction>::cast(RunJS(isolate(), "f();"));
+
+ ASSERT_FALSE(e->shared()->HasBaselineCode());
+
+ job->ResetOnMainThread();
+ ASSERT_TRUE(job->status() == CompileJobStatus::kInitial);
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc
index 997765ff83..21ffe76210 100644
--- a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc
+++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc
@@ -8,16 +8,16 @@
namespace v8 {
namespace internal {
-TEST(CompilerDispatcherTracerTest, EstimateZeroWithoutSamples) {
+TEST(CompilerDispatcherTracerTest, EstimateWithoutSamples) {
CompilerDispatcherTracer tracer(nullptr);
EXPECT_EQ(0.0, tracer.EstimatePrepareToParseInMs());
- EXPECT_EQ(0.0, tracer.EstimateParseInMs(0));
- EXPECT_EQ(0.0, tracer.EstimateParseInMs(42));
+ EXPECT_EQ(1.0, tracer.EstimateParseInMs(0));
+ EXPECT_EQ(1.0, tracer.EstimateParseInMs(42));
EXPECT_EQ(0.0, tracer.EstimateFinalizeParsingInMs());
EXPECT_EQ(0.0, tracer.EstimatePrepareToCompileInMs());
- EXPECT_EQ(0.0, tracer.EstimateCompileInMs(0));
- EXPECT_EQ(0.0, tracer.EstimateCompileInMs(42));
+ EXPECT_EQ(1.0, tracer.EstimateCompileInMs(0));
+ EXPECT_EQ(1.0, tracer.EstimateCompileInMs(42));
EXPECT_EQ(0.0, tracer.EstimateFinalizeCompilingInMs());
}
@@ -36,7 +36,7 @@ TEST(CompilerDispatcherTracerTest, Average) {
TEST(CompilerDispatcherTracerTest, SizeBasedAverage) {
CompilerDispatcherTracer tracer(nullptr);
- EXPECT_EQ(0.0, tracer.EstimateParseInMs(100));
+ EXPECT_EQ(1.0, tracer.EstimateParseInMs(100));
// All three samples parse 100 units/ms.
tracer.RecordParse(1.0, 100);
diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc
new file mode 100644
index 0000000000..8e47c48866
--- /dev/null
+++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc
@@ -0,0 +1,806 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler-dispatcher/compiler-dispatcher.h"
+
+#include "include/v8-platform.h"
+#include "src/base/platform/semaphore.h"
+#include "src/compiler-dispatcher/compiler-dispatcher-job.h"
+#include "src/compiler-dispatcher/compiler-dispatcher-tracer.h"
+#include "src/flags.h"
+#include "src/handles.h"
+#include "src/objects-inl.h"
+#include "src/v8.h"
+#include "test/unittests/compiler-dispatcher/compiler-dispatcher-helper.h"
+#include "test/unittests/test-utils.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+namespace internal {
+
+class CompilerDispatcherTest : public TestWithContext {
+ public:
+ CompilerDispatcherTest() = default;
+ ~CompilerDispatcherTest() override = default;
+
+ static void SetUpTestCase() {
+ old_flag_ = i::FLAG_ignition;
+ i::FLAG_compiler_dispatcher = true;
+ old_ignition_flag_ = i::FLAG_ignition;
+ i::FLAG_ignition = true;
+ TestWithContext::SetUpTestCase();
+ }
+
+ static void TearDownTestCase() {
+ TestWithContext::TearDownTestCase();
+ i::FLAG_compiler_dispatcher = old_flag_;
+ i::FLAG_ignition = old_ignition_flag_;
+ }
+
+ private:
+ static bool old_flag_;
+ static bool old_ignition_flag_;
+
+ DISALLOW_COPY_AND_ASSIGN(CompilerDispatcherTest);
+};
+
+bool CompilerDispatcherTest::old_flag_;
+bool CompilerDispatcherTest::old_ignition_flag_;
+
+namespace {
+
+class MockPlatform : public v8::Platform {
+ public:
+ MockPlatform() : time_(0.0), time_step_(0.0), idle_task_(nullptr), sem_(0) {}
+ ~MockPlatform() override {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ EXPECT_TRUE(foreground_tasks_.empty());
+ EXPECT_TRUE(background_tasks_.empty());
+ EXPECT_TRUE(idle_task_ == nullptr);
+ }
+
+ size_t NumberOfAvailableBackgroundThreads() override { return 1; }
+
+ void CallOnBackgroundThread(Task* task,
+ ExpectedRuntime expected_runtime) override {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ background_tasks_.push_back(task);
+ }
+
+ void CallOnForegroundThread(v8::Isolate* isolate, Task* task) override {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ foreground_tasks_.push_back(task);
+ }
+
+ void CallDelayedOnForegroundThread(v8::Isolate* isolate, Task* task,
+ double delay_in_seconds) override {
+ UNREACHABLE();
+ }
+
+ void CallIdleOnForegroundThread(v8::Isolate* isolate,
+ IdleTask* task) override {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ ASSERT_TRUE(idle_task_ == nullptr);
+ idle_task_ = task;
+ }
+
+ bool IdleTasksEnabled(v8::Isolate* isolate) override { return true; }
+
+ double MonotonicallyIncreasingTime() override {
+ time_ += time_step_;
+ return time_;
+ }
+
+ void RunIdleTask(double deadline_in_seconds, double time_step) {
+ time_step_ = time_step;
+ IdleTask* task;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ task = idle_task_;
+ ASSERT_TRUE(idle_task_ != nullptr);
+ idle_task_ = nullptr;
+ }
+ task->Run(deadline_in_seconds);
+ delete task;
+ }
+
+ bool IdleTaskPending() {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ return idle_task_;
+ }
+
+ bool BackgroundTasksPending() {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ return !background_tasks_.empty();
+ }
+
+ bool ForegroundTasksPending() {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ return !foreground_tasks_.empty();
+ }
+
+ void RunBackgroundTasksAndBlock(Platform* platform) {
+ std::vector<Task*> tasks;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ tasks.swap(background_tasks_);
+ }
+ platform->CallOnBackgroundThread(new TaskWrapper(this, tasks, true),
+ kShortRunningTask);
+ sem_.Wait();
+ }
+
+ void RunBackgroundTasks(Platform* platform) {
+ std::vector<Task*> tasks;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ tasks.swap(background_tasks_);
+ }
+ platform->CallOnBackgroundThread(new TaskWrapper(this, tasks, false),
+ kShortRunningTask);
+ }
+
+ void RunForegroundTasks() {
+ std::vector<Task*> tasks;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ tasks.swap(foreground_tasks_);
+ }
+ for (auto& task : tasks) {
+ task->Run();
+ delete task;
+ }
+ }
+
+ void ClearBackgroundTasks() {
+ std::vector<Task*> tasks;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ tasks.swap(background_tasks_);
+ }
+ for (auto& task : tasks) {
+ delete task;
+ }
+ }
+
+ void ClearForegroundTasks() {
+ std::vector<Task*> tasks;
+ {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ tasks.swap(foreground_tasks_);
+ }
+ for (auto& task : tasks) {
+ delete task;
+ }
+ }
+
+ void ClearIdleTask() {
+ base::LockGuard<base::Mutex> lock(&mutex_);
+ ASSERT_TRUE(idle_task_ != nullptr);
+ delete idle_task_;
+ idle_task_ = nullptr;
+ }
+
+ private:
+ class TaskWrapper : public Task {
+ public:
+ TaskWrapper(MockPlatform* platform, const std::vector<Task*>& tasks,
+ bool signal)
+ : platform_(platform), tasks_(tasks), signal_(signal) {}
+ ~TaskWrapper() = default;
+
+ void Run() override {
+ for (auto& task : tasks_) {
+ task->Run();
+ delete task;
+ }
+ if (signal_) platform_->sem_.Signal();
+ }
+
+ private:
+ MockPlatform* platform_;
+ std::vector<Task*> tasks_;
+ bool signal_;
+
+ DISALLOW_COPY_AND_ASSIGN(TaskWrapper);
+ };
+
+ double time_;
+ double time_step_;
+
+ // Protects all *_tasks_.
+ base::Mutex mutex_;
+
+ IdleTask* idle_task_;
+ std::vector<Task*> background_tasks_;
+ std::vector<Task*> foreground_tasks_;
+
+ base::Semaphore sem_;
+
+ DISALLOW_COPY_AND_ASSIGN(MockPlatform);
+};
+
+} // namespace
+
+TEST_F(CompilerDispatcherTest, Construct) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+}
+
+TEST_F(CompilerDispatcherTest, IsEnqueued) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f1(x) { return x * y }; return f1; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ dispatcher.AbortAll(CompilerDispatcher::BlockingBehavior::kBlock);
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, FinishNow) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f2(x) { return x * y }; return f2; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(dispatcher.FinishNow(shared));
+ // Finishing removes the SFI from the queue.
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(shared->is_compiled());
+ ASSERT_TRUE(platform.IdleTaskPending());
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, IdleTask) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f3(x) { return x * y }; return f3; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ // Since time doesn't progress on the MockPlatform, this is enough idle time
+ // to finish compiling the function.
+ platform.RunIdleTask(1000.0, 0.0);
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(shared->is_compiled());
+}
+
+TEST_F(CompilerDispatcherTest, IdleTaskSmallIdleTime) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f4(x) { return x * y }; return f4; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ // The job should be scheduled for the main thread.
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Only grant a little idle time and have time advance beyond it in one step.
+ platform.RunIdleTask(2.0, 1.0);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ // The job should be still scheduled for the main thread, but ready for
+ // parsing.
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToParse);
+
+ // Now grant a lot of idle time and freeze time.
+ platform.RunIdleTask(1000.0, 0.0);
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+}
+
+TEST_F(CompilerDispatcherTest, IdleTaskException) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, 50);
+
+ std::string script("function g() { function f5(x) { var a = ");
+ for (int i = 0; i < 1000; i++) {
+ script += "'x' + ";
+ }
+ script += " 'x'; }; return f5; } g();";
+ Handle<JSFunction> f =
+ Handle<JSFunction>::cast(RunJS(isolate(), script.c_str()));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ // Since time doesn't progress on the MockPlatform, this is enough idle time
+ // to finish compiling the function.
+ platform.RunIdleTask(1000.0, 0.0);
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_FALSE(i_isolate()->has_pending_exception());
+}
+
+TEST_F(CompilerDispatcherTest, CompileOnBackgroundThread) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f6(x) { return x * y }; return f6; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Make compiling super expensive, and advance job as much as possible on the
+ // foreground thread.
+ dispatcher.tracer_->RecordCompile(50000.0, 1);
+ platform.RunIdleTask(10.0, 0.0);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToCompile);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+
+ platform.RunBackgroundTasksAndBlock(V8::GetCurrentPlatform());
+
+ ASSERT_TRUE(platform.IdleTaskPending());
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kCompiled);
+
+ // Now grant a lot of idle time and freeze time.
+ platform.RunIdleTask(1000.0, 0.0);
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+}
+
+TEST_F(CompilerDispatcherTest, FinishNowWithBackgroundTask) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f7(x) { return x * y }; return f7; } "
+ "g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Make compiling super expensive, and advance job as much as possible on the
+ // foreground thread.
+ dispatcher.tracer_->RecordCompile(50000.0, 1);
+ platform.RunIdleTask(10.0, 0.0);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToCompile);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+
+ // This does not block, but races with the FinishNow() call below.
+ platform.RunBackgroundTasks(V8::GetCurrentPlatform());
+
+ ASSERT_TRUE(dispatcher.FinishNow(shared));
+ // Finishing removes the SFI from the queue.
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(shared->is_compiled());
+ if (platform.IdleTaskPending()) platform.ClearIdleTask();
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+}
+
+TEST_F(CompilerDispatcherTest, IdleTaskMultipleJobs) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script1[] =
+ "function g() { var y = 1; function f8(x) { return x * y }; return f8; } "
+ "g();";
+ Handle<JSFunction> f1 = Handle<JSFunction>::cast(RunJS(isolate(), script1));
+ Handle<SharedFunctionInfo> shared1(f1->shared(), i_isolate());
+
+ const char script2[] =
+ "function g() { var y = 1; function f9(x) { return x * y }; return f9; } "
+ "g();";
+ Handle<JSFunction> f2 = Handle<JSFunction>::cast(RunJS(isolate(), script2));
+ Handle<SharedFunctionInfo> shared2(f2->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared1));
+ ASSERT_TRUE(dispatcher.Enqueue(shared2));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ // Since time doesn't progress on the MockPlatform, this is enough idle time
+ // to finish compiling the function.
+ platform.RunIdleTask(1000.0, 0.0);
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared1));
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared2));
+ ASSERT_TRUE(shared1->is_compiled());
+ ASSERT_TRUE(shared2->is_compiled());
+}
+
+TEST_F(CompilerDispatcherTest, FinishNowException) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, 50);
+
+ std::string script("function g() { function f10(x) { var a = ");
+ for (int i = 0; i < 1000; i++) {
+ script += "'x' + ";
+ }
+ script += " 'x'; }; return f10; } g();";
+ Handle<JSFunction> f =
+ Handle<JSFunction>::cast(RunJS(isolate(), script.c_str()));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_FALSE(dispatcher.FinishNow(shared));
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_TRUE(i_isolate()->has_pending_exception());
+
+ i_isolate()->clear_pending_exception();
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, AsyncAbortAllPendingBackgroundTask) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f11(x) { return x * y }; return f11; "
+ "} g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Make compiling super expensive, and advance job as much as possible on the
+ // foreground thread.
+ dispatcher.tracer_->RecordCompile(50000.0, 1);
+ platform.RunIdleTask(10.0, 0.0);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToCompile);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+
+ // The background task hasn't yet started, so we can just cancel it.
+ dispatcher.AbortAll(CompilerDispatcher::BlockingBehavior::kDontBlock);
+ ASSERT_FALSE(platform.ForegroundTasksPending());
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+
+ platform.RunBackgroundTasksAndBlock(V8::GetCurrentPlatform());
+
+ if (platform.IdleTaskPending()) platform.ClearIdleTask();
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+ ASSERT_FALSE(platform.ForegroundTasksPending());
+}
+
+TEST_F(CompilerDispatcherTest, AsyncAbortAllRunningBackgroundTask) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script1[] =
+ "function g() { var y = 1; function f11(x) { return x * y }; return f11; "
+ "} g();";
+ Handle<JSFunction> f1 = Handle<JSFunction>::cast(RunJS(isolate(), script1));
+ Handle<SharedFunctionInfo> shared1(f1->shared(), i_isolate());
+
+ const char script2[] =
+ "function g() { var y = 1; function f12(x) { return x * y }; return f12; "
+ "} g();";
+ Handle<JSFunction> f2 = Handle<JSFunction>::cast(RunJS(isolate(), script2));
+ Handle<SharedFunctionInfo> shared2(f2->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared1));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Make compiling super expensive, and advance job as much as possible on the
+ // foreground thread.
+ dispatcher.tracer_->RecordCompile(50000.0, 1);
+ platform.RunIdleTask(10.0, 0.0);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToCompile);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared1));
+ ASSERT_FALSE(shared1->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+
+ // Kick off background tasks and freeze them.
+ dispatcher.block_for_testing_.SetValue(true);
+ platform.RunBackgroundTasks(V8::GetCurrentPlatform());
+
+ // Busy loop until the background task started running.
+ while (dispatcher.block_for_testing_.Value()) {
+ }
+ dispatcher.AbortAll(CompilerDispatcher::BlockingBehavior::kDontBlock);
+ ASSERT_TRUE(platform.ForegroundTasksPending());
+
+ // We can't schedule new tasks while we're aborting.
+ ASSERT_FALSE(dispatcher.Enqueue(shared2));
+
+ // Run the first AbortTask. Since the background job is still pending, it
+ // can't do anything.
+ platform.RunForegroundTasks();
+ {
+ base::LockGuard<base::Mutex> lock(&dispatcher.mutex_);
+ ASSERT_TRUE(dispatcher.abort_);
+ }
+
+ // Release background task.
+ dispatcher.semaphore_for_testing_.Signal();
+
+ // Busy loop until the background task scheduled another AbortTask task.
+ while (!platform.ForegroundTasksPending()) {
+ }
+
+ platform.RunForegroundTasks();
+ ASSERT_TRUE(dispatcher.jobs_.empty());
+ {
+ base::LockGuard<base::Mutex> lock(&dispatcher.mutex_);
+ ASSERT_FALSE(dispatcher.abort_);
+ }
+
+ ASSERT_TRUE(platform.IdleTaskPending());
+ platform.RunIdleTask(5.0, 1.0);
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+ ASSERT_FALSE(platform.ForegroundTasksPending());
+
+ // Now it's possible to enqueue new functions again.
+ ASSERT_TRUE(dispatcher.Enqueue(shared2));
+ ASSERT_TRUE(platform.IdleTaskPending());
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+ ASSERT_FALSE(platform.ForegroundTasksPending());
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, FinishNowDuringAbortAll) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f13(x) { return x * y }; return f13; "
+ "} g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ ASSERT_TRUE(platform.IdleTaskPending());
+
+ ASSERT_EQ(dispatcher.jobs_.size(), 1u);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kInitial);
+
+ // Make compiling super expensive, and advance job as much as possible on the
+ // foreground thread.
+ dispatcher.tracer_->RecordCompile(50000.0, 1);
+ platform.RunIdleTask(10.0, 0.0);
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToCompile);
+
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+ ASSERT_FALSE(platform.IdleTaskPending());
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+
+ // Kick off background tasks and freeze them.
+ dispatcher.block_for_testing_.SetValue(true);
+ platform.RunBackgroundTasks(V8::GetCurrentPlatform());
+
+ // Busy loop until the background task started running.
+ while (dispatcher.block_for_testing_.Value()) {
+ }
+ dispatcher.AbortAll(CompilerDispatcher::BlockingBehavior::kDontBlock);
+ ASSERT_TRUE(platform.ForegroundTasksPending());
+
+ // Run the first AbortTask. Since the background job is still pending, it
+ // can't do anything.
+ platform.RunForegroundTasks();
+ {
+ base::LockGuard<base::Mutex> lock(&dispatcher.mutex_);
+ ASSERT_TRUE(dispatcher.abort_);
+ }
+
+ // While the background thread holds on to a job, it is still enqueud.
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+
+ // Release background task.
+ dispatcher.semaphore_for_testing_.Signal();
+
+ // Force the compilation to finish, even while aborting.
+ ASSERT_TRUE(dispatcher.FinishNow(shared));
+ ASSERT_TRUE(dispatcher.jobs_.empty());
+ {
+ base::LockGuard<base::Mutex> lock(&dispatcher.mutex_);
+ ASSERT_FALSE(dispatcher.abort_);
+ }
+
+ ASSERT_TRUE(platform.ForegroundTasksPending());
+ ASSERT_TRUE(platform.IdleTaskPending());
+ ASSERT_FALSE(platform.BackgroundTasksPending());
+ platform.ClearForegroundTasks();
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, MemoryPressure) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f14(x) { return x * y }; return f14; "
+ "} g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ // Can't enqueue tasks under memory pressure.
+ dispatcher.MemoryPressureNotification(v8::MemoryPressureLevel::kCritical,
+ true);
+ ASSERT_FALSE(dispatcher.Enqueue(shared));
+
+ dispatcher.MemoryPressureNotification(v8::MemoryPressureLevel::kNone, true);
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+
+ // Memory pressure cancels current jobs.
+ dispatcher.MemoryPressureNotification(v8::MemoryPressureLevel::kCritical,
+ true);
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ platform.ClearIdleTask();
+}
+
+namespace {
+
+class PressureNotificationTask : public CancelableTask {
+ public:
+ PressureNotificationTask(Isolate* isolate, CompilerDispatcher* dispatcher,
+ base::Semaphore* sem)
+ : CancelableTask(isolate), dispatcher_(dispatcher), sem_(sem) {}
+ ~PressureNotificationTask() override {}
+
+ void RunInternal() override {
+ dispatcher_->MemoryPressureNotification(v8::MemoryPressureLevel::kCritical,
+ false);
+ sem_->Signal();
+ }
+
+ private:
+ CompilerDispatcher* dispatcher_;
+ base::Semaphore* sem_;
+
+ DISALLOW_COPY_AND_ASSIGN(PressureNotificationTask);
+};
+
+} // namespace
+
+TEST_F(CompilerDispatcherTest, MemoryPressureFromBackground) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f15(x) { return x * y }; return f15; "
+ "} g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_TRUE(dispatcher.Enqueue(shared));
+ base::Semaphore sem(0);
+ V8::GetCurrentPlatform()->CallOnBackgroundThread(
+ new PressureNotificationTask(i_isolate(), &dispatcher, &sem),
+ v8::Platform::kShortRunningTask);
+
+ sem.Wait();
+
+ // A memory pressure task is pending, and running it will cancel the job.
+ ASSERT_TRUE(platform.ForegroundTasksPending());
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+ platform.RunForegroundTasks();
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_FALSE(shared->is_compiled());
+
+ // Since the AbortAll() call is made from a task, AbortAll thinks that there
+ // is at least one task running, and fires of an AbortTask to be safe.
+ ASSERT_TRUE(platform.ForegroundTasksPending());
+ platform.RunForegroundTasks();
+ ASSERT_FALSE(platform.ForegroundTasksPending());
+
+ platform.ClearIdleTask();
+}
+
+TEST_F(CompilerDispatcherTest, EnqueueAndStep) {
+ MockPlatform platform;
+ CompilerDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size);
+
+ const char script[] =
+ "function g() { var y = 1; function f16(x) { return x * y }; return f16; "
+ "} g();";
+ Handle<JSFunction> f = Handle<JSFunction>::cast(RunJS(isolate(), script));
+ Handle<SharedFunctionInfo> shared(f->shared(), i_isolate());
+
+ ASSERT_FALSE(dispatcher.IsEnqueued(shared));
+ ASSERT_TRUE(dispatcher.EnqueueAndStep(shared));
+ ASSERT_TRUE(dispatcher.IsEnqueued(shared));
+
+ ASSERT_TRUE(dispatcher.jobs_.begin()->second->status() ==
+ CompileJobStatus::kReadyToParse);
+
+ ASSERT_TRUE(platform.IdleTaskPending());
+ platform.ClearIdleTask();
+ ASSERT_TRUE(platform.BackgroundTasksPending());
+ platform.ClearBackgroundTasks();
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc b/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc
new file mode 100644
index 0000000000..4a531449fb
--- /dev/null
+++ b/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc
@@ -0,0 +1,418 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/compiler/bytecode-analysis.h"
+#include "src/interpreter/bytecode-array-builder.h"
+#include "src/interpreter/bytecode-array-iterator.h"
+#include "src/interpreter/bytecode-decoder.h"
+#include "src/interpreter/bytecode-label.h"
+#include "src/interpreter/control-flow-builders.h"
+#include "src/objects-inl.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class BytecodeAnalysisTest : public TestWithIsolateAndZone {
+ public:
+ BytecodeAnalysisTest() {}
+ ~BytecodeAnalysisTest() override {}
+
+ static void SetUpTestCase() {
+ old_FLAG_ignition_peephole_ = i::FLAG_ignition_peephole;
+ i::FLAG_ignition_peephole = false;
+
+ old_FLAG_ignition_reo_ = i::FLAG_ignition_reo;
+ i::FLAG_ignition_reo = false;
+
+ TestWithIsolateAndZone::SetUpTestCase();
+ }
+
+ static void TearDownTestCase() {
+ TestWithIsolateAndZone::TearDownTestCase();
+ i::FLAG_ignition_peephole = old_FLAG_ignition_peephole_;
+ i::FLAG_ignition_reo = old_FLAG_ignition_reo_;
+ }
+
+ std::string ToLivenessString(const BytecodeLivenessState* liveness) const {
+ const BitVector& bit_vector = liveness->bit_vector();
+
+ std::string out;
+ out.resize(bit_vector.length());
+ for (int i = 0; i < bit_vector.length(); ++i) {
+ if (bit_vector.Contains(i)) {
+ out[i] = 'L';
+ } else {
+ out[i] = '.';
+ }
+ }
+ return out;
+ }
+
+ void EnsureLivenessMatches(
+ Handle<BytecodeArray> bytecode,
+ const std::vector<std::pair<std::string, std::string>>&
+ expected_liveness) {
+ BytecodeAnalysis analysis(bytecode, zone(), true);
+ analysis.Analyze(BailoutId::None());
+
+ interpreter::BytecodeArrayIterator iterator(bytecode);
+ for (auto liveness : expected_liveness) {
+ std::stringstream ss;
+ ss << std::setw(4) << iterator.current_offset() << " : ";
+ iterator.PrintTo(ss);
+
+ EXPECT_EQ(liveness.first, ToLivenessString(analysis.GetInLivenessFor(
+ iterator.current_offset())))
+ << " at bytecode " << ss.str();
+
+ EXPECT_EQ(liveness.second, ToLivenessString(analysis.GetOutLivenessFor(
+ iterator.current_offset())))
+ << " at bytecode " << ss.str();
+
+ iterator.Advance();
+ }
+
+ EXPECT_TRUE(iterator.done());
+ }
+
+ private:
+ static bool old_FLAG_ignition_peephole_;
+ static bool old_FLAG_ignition_reo_;
+
+ DISALLOW_COPY_AND_ASSIGN(BytecodeAnalysisTest);
+};
+
+bool BytecodeAnalysisTest::old_FLAG_ignition_peephole_;
+bool BytecodeAnalysisTest::old_FLAG_ignition_reo_;
+
+TEST_F(BytecodeAnalysisTest, EmptyBlock) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, SimpleLoad) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L...", "...L");
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, StoreThenLoad) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back("...L", "L...");
+
+ builder.LoadNull();
+ expected_liveness.emplace_back("L...", "L...");
+
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L...", "...L");
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, DiamondLoad) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+ interpreter::Register reg_2(2);
+
+ interpreter::BytecodeLabel ld1_label;
+ interpreter::BytecodeLabel end_label;
+
+ builder.JumpIfTrue(&ld1_label);
+ expected_liveness.emplace_back("LLLL", "LLL.");
+
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L.L.", "..L.");
+
+ builder.Jump(&end_label);
+ expected_liveness.emplace_back("..L.", "..L.");
+
+ builder.Bind(&ld1_label);
+ builder.LoadAccumulatorWithRegister(reg_1);
+ expected_liveness.emplace_back(".LL.", "..L.");
+
+ builder.Bind(&end_label);
+
+ builder.LoadAccumulatorWithRegister(reg_2);
+ expected_liveness.emplace_back("..L.", "...L");
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, DiamondLookupsAndBinds) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+ interpreter::Register reg_2(2);
+
+ interpreter::BytecodeLabel ld1_label;
+ interpreter::BytecodeLabel end_label;
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back(".LLL", "LLLL");
+
+ builder.JumpIfTrue(&ld1_label);
+ expected_liveness.emplace_back("LLLL", "LLL.");
+
+ {
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L...", "...L");
+
+ builder.StoreAccumulatorInRegister(reg_2);
+ expected_liveness.emplace_back("...L", "..L.");
+
+ builder.Jump(&end_label);
+ expected_liveness.emplace_back("..L.", "..L.");
+ }
+
+ builder.Bind(&ld1_label);
+ {
+ builder.LoadAccumulatorWithRegister(reg_1);
+ expected_liveness.emplace_back(".LL.", "..L.");
+ }
+
+ builder.Bind(&end_label);
+
+ builder.LoadAccumulatorWithRegister(reg_2);
+ expected_liveness.emplace_back("..L.", "...L");
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, SimpleLoop) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+ interpreter::Register reg_2(2);
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back("..LL", "L.LL");
+
+ interpreter::LoopBuilder loop_builder(&builder);
+ loop_builder.LoopHeader();
+ {
+ builder.JumpIfTrue(loop_builder.break_labels()->New());
+ expected_liveness.emplace_back("L.LL", "L.L.");
+
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L...", "L..L");
+
+ builder.StoreAccumulatorInRegister(reg_2);
+ expected_liveness.emplace_back("L..L", "L.LL");
+
+ loop_builder.BindContinueTarget();
+ loop_builder.JumpToHeader(0);
+ expected_liveness.emplace_back("L.LL", "L.LL");
+ }
+ loop_builder.EndLoop();
+
+ builder.LoadAccumulatorWithRegister(reg_2);
+ expected_liveness.emplace_back("..L.", "...L");
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, TryCatch) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+ interpreter::Register reg_context(2);
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back(".LLL", "LLL.");
+
+ interpreter::TryCatchBuilder try_builder(&builder, HandlerTable::CAUGHT);
+ try_builder.BeginTry(reg_context);
+ {
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("LLL.", ".LLL");
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back(".LLL", ".LL.");
+
+ builder.CallRuntime(Runtime::kThrow);
+ expected_liveness.emplace_back(".LL.", ".LLL");
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ // Star can't throw, so doesn't take handler liveness
+ expected_liveness.emplace_back("...L", "...L");
+ }
+ try_builder.EndTry();
+ expected_liveness.emplace_back("...L", "...L");
+
+ // Catch
+ {
+ builder.LoadAccumulatorWithRegister(reg_1);
+ expected_liveness.emplace_back(".L..", "...L");
+ }
+ try_builder.EndCatch();
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, DiamondInLoop) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+ interpreter::Register reg_2(2);
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back("...L", "L..L");
+
+ interpreter::LoopBuilder loop_builder(&builder);
+ loop_builder.LoopHeader();
+ {
+ builder.JumpIfTrue(loop_builder.break_labels()->New());
+ expected_liveness.emplace_back("L..L", "L..L");
+
+ interpreter::BytecodeLabel ld1_label;
+ interpreter::BytecodeLabel end_label;
+ builder.JumpIfTrue(&ld1_label);
+ expected_liveness.emplace_back("L..L", "L..L");
+
+ {
+ builder.Jump(&end_label);
+ expected_liveness.emplace_back("L..L", "L..L");
+ }
+
+ builder.Bind(&ld1_label);
+ {
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("L...", "L..L");
+ }
+
+ builder.Bind(&end_label);
+
+ loop_builder.BindContinueTarget();
+ loop_builder.JumpToHeader(0);
+ expected_liveness.emplace_back("L..L", "L..L");
+ }
+ loop_builder.EndLoop();
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+TEST_F(BytecodeAnalysisTest, KillingLoopInsideLoop) {
+ interpreter::BytecodeArrayBuilder builder(isolate(), zone(), 3, 0, 3);
+ std::vector<std::pair<std::string, std::string>> expected_liveness;
+
+ interpreter::Register reg_0(0);
+ interpreter::Register reg_1(1);
+
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back(".L.L", "LL..");
+
+ interpreter::LoopBuilder loop_builder(&builder);
+ loop_builder.LoopHeader();
+ {
+ builder.LoadAccumulatorWithRegister(reg_0);
+ expected_liveness.emplace_back("LL..", ".L..");
+
+ builder.LoadAccumulatorWithRegister(reg_1);
+ expected_liveness.emplace_back(".L..", ".L.L");
+
+ builder.JumpIfTrue(loop_builder.break_labels()->New());
+ expected_liveness.emplace_back(".L.L", ".L.L");
+
+ interpreter::LoopBuilder inner_loop_builder(&builder);
+ inner_loop_builder.LoopHeader();
+ {
+ builder.StoreAccumulatorInRegister(reg_0);
+ expected_liveness.emplace_back(".L.L", "LL.L");
+
+ builder.JumpIfTrue(inner_loop_builder.break_labels()->New());
+ expected_liveness.emplace_back("LL.L", "LL.L");
+
+ inner_loop_builder.BindContinueTarget();
+ inner_loop_builder.JumpToHeader(1);
+ expected_liveness.emplace_back(".L.L", ".L.L");
+ }
+ inner_loop_builder.EndLoop();
+
+ loop_builder.BindContinueTarget();
+ loop_builder.JumpToHeader(0);
+ expected_liveness.emplace_back("LL..", "LL..");
+ }
+ loop_builder.EndLoop();
+
+ builder.Return();
+ expected_liveness.emplace_back("...L", "....");
+
+ Handle<BytecodeArray> bytecode = builder.ToBytecodeArray(isolate());
+
+ EnsureLivenessMatches(bytecode, expected_liveness);
+}
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/compiler/common-operator-reducer-unittest.cc b/deps/v8/test/unittests/compiler/common-operator-reducer-unittest.cc
index f294a30596..ecc3070785 100644
--- a/deps/v8/test/unittests/compiler/common-operator-reducer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/common-operator-reducer-unittest.cc
@@ -374,6 +374,33 @@ TEST_F(CommonOperatorReducerTest, ReturnWithPhiAndEffectPhiAndMerge) {
IsReturn(vfalse, efalse, if_false)));
}
+TEST_F(CommonOperatorReducerTest, MultiReturnWithPhiAndEffectPhiAndMerge) {
+ Node* cond = Parameter(2);
+ Node* branch = graph()->NewNode(common()->Branch(), cond, graph()->start());
+ Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
+ Node* etrue = graph()->start();
+ Node* vtrue1 = Parameter(0);
+ Node* vtrue2 = Parameter(1);
+ Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
+ Node* efalse = graph()->start();
+ Node* vfalse1 = Parameter(1);
+ Node* vfalse2 = Parameter(0);
+ Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
+ Node* ephi = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, merge);
+ Node* phi1 = graph()->NewNode(
+ common()->Phi(MachineRepresentation::kTagged, 2), vtrue1, vfalse1, merge);
+ Node* phi2 = graph()->NewNode(
+ common()->Phi(MachineRepresentation::kTagged, 2), vtrue2, vfalse2, merge);
+
+ Node* zero = graph()->NewNode(common()->Int32Constant(0));
+ Node* ret =
+ graph()->NewNode(common()->Return(2), zero, phi1, phi2, ephi, merge);
+ graph()->SetEnd(graph()->NewNode(common()->End(1), ret));
+ StrictMock<MockAdvancedReducerEditor> editor;
+ Reduction const r = Reduce(&editor, ret);
+ // For now a return with multiple return values should not be reduced.
+ ASSERT_TRUE(!r.Changed());
+}
// -----------------------------------------------------------------------------
// Select
diff --git a/deps/v8/test/unittests/compiler/escape-analysis-unittest.cc b/deps/v8/test/unittests/compiler/escape-analysis-unittest.cc
index 9cce5475fd..8ad93eee6a 100644
--- a/deps/v8/test/unittests/compiler/escape-analysis-unittest.cc
+++ b/deps/v8/test/unittests/compiler/escape-analysis-unittest.cc
@@ -148,11 +148,9 @@ class EscapeAnalysisTest : public TypedGraphTest {
}
FieldAccess FieldAccessAtIndex(int offset) {
- FieldAccess access = {kTaggedBase,
- offset,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess access = {kTaggedBase, offset,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kFullWriteBarrier};
return access;
}
@@ -439,9 +437,12 @@ TEST_F(EscapeAnalysisTest, DeoptReplacement) {
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
- Node* state_values1 = graph()->NewNode(common()->StateValues(1), finish);
- Node* state_values2 = graph()->NewNode(common()->StateValues(0));
- Node* state_values3 = graph()->NewNode(common()->StateValues(0));
+ Node* state_values1 = graph()->NewNode(
+ common()->StateValues(1, SparseInputMask::Dense()), finish);
+ Node* state_values2 =
+ graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
+ Node* state_values3 =
+ graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
@@ -479,9 +480,12 @@ TEST_F(EscapeAnalysisTest, DISABLED_DeoptReplacementIdentity) {
Node* effect1 = Store(FieldAccessAtIndex(0), allocation, object1, finish);
Branch();
Node* ifFalse = IfFalse();
- Node* state_values1 = graph()->NewNode(common()->StateValues(1), finish);
- Node* state_values2 = graph()->NewNode(common()->StateValues(1), finish);
- Node* state_values3 = graph()->NewNode(common()->StateValues(0));
+ Node* state_values1 = graph()->NewNode(
+ common()->StateValues(1, SparseInputMask::Dense()), finish);
+ Node* state_values2 = graph()->NewNode(
+ common()->StateValues(1, SparseInputMask::Dense()), finish);
+ Node* state_values3 =
+ graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
Node* frame_state = graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
diff --git a/deps/v8/test/unittests/compiler/graph-unittest.cc b/deps/v8/test/unittests/compiler/graph-unittest.cc
index dc2ba7814b..6e48eaf96d 100644
--- a/deps/v8/test/unittests/compiler/graph-unittest.cc
+++ b/deps/v8/test/unittests/compiler/graph-unittest.cc
@@ -80,7 +80,8 @@ Node* GraphTest::UndefinedConstant() {
Node* GraphTest::EmptyFrameState() {
- Node* state_values = graph()->NewNode(common()->StateValues(0));
+ Node* state_values =
+ graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
return graph()->NewNode(
common()->FrameState(BailoutId::None(), OutputFrameStateCombine::Ignore(),
nullptr),
@@ -98,6 +99,9 @@ Matcher<Node*> GraphTest::IsTrueConstant() {
return IsHeapConstant(factory()->true_value());
}
+Matcher<Node*> GraphTest::IsNullConstant() {
+ return IsHeapConstant(factory()->null_value());
+}
Matcher<Node*> GraphTest::IsUndefinedConstant() {
return IsHeapConstant(factory()->undefined_value());
diff --git a/deps/v8/test/unittests/compiler/graph-unittest.h b/deps/v8/test/unittests/compiler/graph-unittest.h
index 2542e68a91..8701f1ff6d 100644
--- a/deps/v8/test/unittests/compiler/graph-unittest.h
+++ b/deps/v8/test/unittests/compiler/graph-unittest.h
@@ -53,6 +53,7 @@ class GraphTest : public virtual TestWithNativeContext,
}
Matcher<Node*> IsFalseConstant();
Matcher<Node*> IsTrueConstant();
+ Matcher<Node*> IsNullConstant();
Matcher<Node*> IsUndefinedConstant();
CommonOperatorBuilder* common() { return &common_; }
diff --git a/deps/v8/test/unittests/compiler/instruction-selector-unittest.cc b/deps/v8/test/unittests/compiler/instruction-selector-unittest.cc
index 5c67a1ece0..7b4150ec0c 100644
--- a/deps/v8/test/unittests/compiler/instruction-selector-unittest.cc
+++ b/deps/v8/test/unittests/compiler/instruction-selector-unittest.cc
@@ -364,10 +364,13 @@ TARGET_TEST_F(InstructionSelectorTest, CallJSFunctionWithDeopt) {
zone(), false, 1, CallDescriptor::kNeedsFrameState);
// Build frame state for the state before the call.
- Node* parameters =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(1));
- Node* locals = m.AddNode(m.common()->TypedStateValues(&empty_types));
- Node* stack = m.AddNode(m.common()->TypedStateValues(&empty_types));
+ Node* parameters = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(1));
+ Node* locals = m.AddNode(
+ m.common()->TypedStateValues(&empty_types, SparseInputMask::Dense()));
+ Node* stack = m.AddNode(
+ m.common()->TypedStateValues(&empty_types, SparseInputMask::Dense()));
Node* context_sentinel = m.Int32Constant(0);
Node* state_node = m.AddNode(
m.common()->FrameState(bailout_id, OutputFrameStateCombine::Push(),
@@ -376,9 +379,9 @@ TARGET_TEST_F(InstructionSelectorTest, CallJSFunctionWithDeopt) {
m.UndefinedConstant());
// Build the call.
- Node* args[] = {receiver, m.UndefinedConstant(), m.Int32Constant(1), context};
- Node* call =
- m.CallNWithFrameState(descriptor, function_node, args, state_node);
+ Node* nodes[] = {function_node, receiver, m.UndefinedConstant(),
+ m.Int32Constant(1), context, state_node};
+ Node* call = m.CallNWithFrameState(descriptor, arraysize(nodes), nodes);
m.Return(call);
Stream s = m.Build(kAllExceptNopInstructions);
@@ -419,12 +422,15 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeopt) {
CallDescriptor::kNeedsFrameState, Operator::kNoProperties);
// Build frame state for the state before the call.
- Node* parameters =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(43));
- Node* locals = m.AddNode(m.common()->TypedStateValues(&float64_type),
- m.Float64Constant(0.5));
- Node* stack = m.AddNode(m.common()->TypedStateValues(&tagged_type),
- m.UndefinedConstant());
+ Node* parameters = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(43));
+ Node* locals = m.AddNode(
+ m.common()->TypedStateValues(&float64_type, SparseInputMask::Dense()),
+ m.Float64Constant(0.5));
+ Node* stack = m.AddNode(
+ m.common()->TypedStateValues(&tagged_type, SparseInputMask::Dense()),
+ m.UndefinedConstant());
Node* context_sentinel = m.Int32Constant(0);
Node* state_node = m.AddNode(
m.common()->FrameState(bailout_id_before, OutputFrameStateCombine::Push(),
@@ -433,9 +439,9 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeopt) {
m.UndefinedConstant());
// Build the call.
- Node* args[] = {function_node, receiver, context};
Node* stub_code = m.HeapConstant(callable.code());
- Node* call = m.CallNWithFrameState(descriptor, stub_code, args, state_node);
+ Node* nodes[] = {stub_code, function_node, receiver, context, state_node};
+ Node* call = m.CallNWithFrameState(descriptor, arraysize(nodes), nodes);
m.Return(call);
Stream s = m.Build(kAllExceptNopInstructions);
@@ -477,15 +483,6 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeopt) {
// We inserted 0 here.
EXPECT_EQ(0.5, s.ToFloat64(call_instr->InputAt(5)));
EXPECT_TRUE(s.ToHeapObject(call_instr->InputAt(6))->IsUndefined(isolate()));
- EXPECT_EQ(MachineType::AnyTagged(),
- desc_before->GetType(0)); // function is always
- // tagged/any.
- EXPECT_EQ(MachineType::Int32(), desc_before->GetType(1));
- EXPECT_EQ(MachineType::AnyTagged(),
- desc_before->GetType(2)); // context is always
- // tagged/any.
- EXPECT_EQ(MachineType::Float64(), desc_before->GetType(3));
- EXPECT_EQ(MachineType::AnyTagged(), desc_before->GetType(4));
// Function.
EXPECT_EQ(s.ToVreg(function_node), s.ToVreg(call_instr->InputAt(7)));
@@ -521,24 +518,30 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeoptRecursiveFrameState) {
CallDescriptor::kNeedsFrameState, Operator::kNoProperties);
// Build frame state for the state before the call.
- Node* parameters =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(63));
- Node* locals =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(64));
- Node* stack =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(65));
+ Node* parameters = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(63));
+ Node* locals = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(64));
+ Node* stack = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(65));
Node* frame_state_parent = m.AddNode(
m.common()->FrameState(bailout_id_parent,
OutputFrameStateCombine::Ignore(),
m.GetFrameStateFunctionInfo(1, 1)),
parameters, locals, stack, context, function_node, m.UndefinedConstant());
- Node* parameters2 =
- m.AddNode(m.common()->TypedStateValues(&int32_type), m.Int32Constant(43));
- Node* locals2 = m.AddNode(m.common()->TypedStateValues(&float64_type),
- m.Float64Constant(0.25));
- Node* stack2 = m.AddNode(m.common()->TypedStateValues(&int32x2_type),
- m.Int32Constant(44), m.Int32Constant(45));
+ Node* parameters2 = m.AddNode(
+ m.common()->TypedStateValues(&int32_type, SparseInputMask::Dense()),
+ m.Int32Constant(43));
+ Node* locals2 = m.AddNode(
+ m.common()->TypedStateValues(&float64_type, SparseInputMask::Dense()),
+ m.Float64Constant(0.25));
+ Node* stack2 = m.AddNode(
+ m.common()->TypedStateValues(&int32x2_type, SparseInputMask::Dense()),
+ m.Int32Constant(44), m.Int32Constant(45));
Node* state_node = m.AddNode(
m.common()->FrameState(bailout_id_before, OutputFrameStateCombine::Push(),
m.GetFrameStateFunctionInfo(1, 1)),
@@ -546,9 +549,9 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeoptRecursiveFrameState) {
frame_state_parent);
// Build the call.
- Node* args[] = {function_node, receiver, context2};
Node* stub_code = m.HeapConstant(callable.code());
- Node* call = m.CallNWithFrameState(descriptor, stub_code, args, state_node);
+ Node* nodes[] = {stub_code, function_node, receiver, context2, state_node};
+ Node* call = m.CallNWithFrameState(descriptor, arraysize(nodes), nodes);
m.Return(call);
Stream s = m.Build(kAllExceptNopInstructions);
@@ -585,31 +588,20 @@ TARGET_TEST_F(InstructionSelectorTest, CallStubWithDeoptRecursiveFrameState) {
EXPECT_EQ(1u, desc_before_outer->locals_count());
EXPECT_EQ(1u, desc_before_outer->stack_count());
// Values from parent environment.
- EXPECT_EQ(MachineType::AnyTagged(), desc_before->GetType(0));
EXPECT_EQ(63, s.ToInt32(call_instr->InputAt(3)));
- EXPECT_EQ(MachineType::Int32(), desc_before_outer->GetType(1));
// Context:
EXPECT_EQ(66, s.ToInt32(call_instr->InputAt(4)));
- EXPECT_EQ(MachineType::AnyTagged(), desc_before_outer->GetType(2));
EXPECT_EQ(64, s.ToInt32(call_instr->InputAt(5)));
- EXPECT_EQ(MachineType::Int32(), desc_before_outer->GetType(3));
EXPECT_EQ(65, s.ToInt32(call_instr->InputAt(6)));
- EXPECT_EQ(MachineType::Int32(), desc_before_outer->GetType(4));
// Values from the nested frame.
EXPECT_EQ(1u, desc_before->parameters_count());
EXPECT_EQ(1u, desc_before->locals_count());
EXPECT_EQ(2u, desc_before->stack_count());
- EXPECT_EQ(MachineType::AnyTagged(), desc_before->GetType(0));
EXPECT_EQ(43, s.ToInt32(call_instr->InputAt(8)));
- EXPECT_EQ(MachineType::Int32(), desc_before->GetType(1));
EXPECT_EQ(46, s.ToInt32(call_instr->InputAt(9)));
- EXPECT_EQ(MachineType::AnyTagged(), desc_before->GetType(2));
EXPECT_EQ(0.25, s.ToFloat64(call_instr->InputAt(10)));
- EXPECT_EQ(MachineType::Float64(), desc_before->GetType(3));
EXPECT_EQ(44, s.ToInt32(call_instr->InputAt(11)));
- EXPECT_EQ(MachineType::Int32(), desc_before->GetType(4));
EXPECT_EQ(45, s.ToInt32(call_instr->InputAt(12)));
- EXPECT_EQ(MachineType::Int32(), desc_before->GetType(5));
// Function.
EXPECT_EQ(s.ToVreg(function_node), s.ToVreg(call_instr->InputAt(13)));
diff --git a/deps/v8/test/unittests/compiler/instruction-sequence-unittest.cc b/deps/v8/test/unittests/compiler/instruction-sequence-unittest.cc
index 9d17c26a56..ee9f7914a6 100644
--- a/deps/v8/test/unittests/compiler/instruction-sequence-unittest.cc
+++ b/deps/v8/test/unittests/compiler/instruction-sequence-unittest.cc
@@ -19,6 +19,11 @@ static const char*
static char register_names_[10 * (RegisterConfiguration::kMaxGeneralRegisters +
RegisterConfiguration::kMaxFPRegisters)];
+namespace {
+static int allocatable_codes[InstructionSequenceTest::kDefaultNRegs] = {
+ 0, 1, 2, 3, 4, 5, 6, 7};
+}
+
static void InitializeRegisterNames() {
char* loc = register_names_;
for (int i = 0; i < RegisterConfiguration::kMaxGeneralRegisters; ++i) {
@@ -81,7 +86,18 @@ int InstructionSequenceTest::GetAllocatableCode(int index,
}
const RegisterConfiguration* InstructionSequenceTest::config() {
- return sequence()->GetRegisterConfigurationForTesting();
+ if (!config_) {
+ config_.reset(new RegisterConfiguration(
+ num_general_registers_, num_double_registers_, num_general_registers_,
+ num_double_registers_, allocatable_codes, allocatable_codes,
+ kSimpleFPAliasing ? RegisterConfiguration::OVERLAP
+ : RegisterConfiguration::COMBINE,
+ general_register_names_,
+ double_register_names_, // float register names
+ double_register_names_,
+ double_register_names_)); // SIMD 128 register names
+ }
+ return config_.get();
}
@@ -89,6 +105,8 @@ InstructionSequence* InstructionSequenceTest::sequence() {
if (sequence_ == nullptr) {
sequence_ = new (zone())
InstructionSequence(isolate(), zone(), &instruction_blocks_);
+ sequence_->SetRegisterConfigurationForTesting(
+ InstructionSequenceTest::config());
}
return sequence_;
}
diff --git a/deps/v8/test/unittests/compiler/int64-lowering-unittest.cc b/deps/v8/test/unittests/compiler/int64-lowering-unittest.cc
index 400eafb4dc..83c8d003db 100644
--- a/deps/v8/test/unittests/compiler/int64-lowering-unittest.cc
+++ b/deps/v8/test/unittests/compiler/int64-lowering-unittest.cc
@@ -570,8 +570,7 @@ TEST_F(Int64LoweringTest, F64ReinterpretI64) {
MachineRepresentation::kFloat64);
Capture<Node*> stack_slot_capture;
- Matcher<Node*> stack_slot_matcher =
- IsStackSlot(MachineRepresentation::kWord64);
+ Matcher<Node*> stack_slot_matcher = IsStackSlot(sizeof(int64_t));
Capture<Node*> store_capture;
Matcher<Node*> store_matcher =
@@ -602,8 +601,7 @@ TEST_F(Int64LoweringTest, I64ReinterpretF64) {
MachineRepresentation::kWord64);
Capture<Node*> stack_slot;
- Matcher<Node*> stack_slot_matcher =
- IsStackSlot(MachineRepresentation::kWord64);
+ Matcher<Node*> stack_slot_matcher = IsStackSlot(sizeof(int64_t));
Capture<Node*> store;
Matcher<Node*> store_matcher = IsStore(
@@ -875,6 +873,25 @@ TEST_F(Int64LoweringTest, EffectPhiLoop) {
LowerGraph(load, MachineRepresentation::kWord64);
}
+
+TEST_F(Int64LoweringTest, LoopCycle) {
+ // New node with two placeholders.
+ Node* compare = graph()->NewNode(machine()->Word64Equal(), Int64Constant(0),
+ Int64Constant(value(0)));
+
+ Node* load = graph()->NewNode(
+ machine()->Load(MachineType::Int64()), Int64Constant(value(1)),
+ Int64Constant(value(2)), graph()->start(),
+ graph()->NewNode(
+ common()->Loop(2), graph()->start(),
+ graph()->NewNode(common()->IfFalse(),
+ graph()->NewNode(common()->Branch(), compare,
+ graph()->start()))));
+
+ NodeProperties::ReplaceValueInput(compare, load, 0);
+
+ LowerGraph(load, MachineRepresentation::kWord64);
+}
} // namespace compiler
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/compiler/js-create-lowering-unittest.cc b/deps/v8/test/unittests/compiler/js-create-lowering-unittest.cc
index f4a1192abf..56516c9ed0 100644
--- a/deps/v8/test/unittests/compiler/js-create-lowering-unittest.cc
+++ b/deps/v8/test/unittests/compiler/js-create-lowering-unittest.cc
@@ -12,6 +12,7 @@
#include "src/compiler/node-properties.h"
#include "src/compiler/operator-properties.h"
#include "src/isolate-inl.h"
+#include "src/type-feedback-vector.h"
#include "test/unittests/compiler/compiler-test-utils.h"
#include "test/unittests/compiler/graph-unittest.h"
#include "test/unittests/compiler/node-test-utils.h"
@@ -46,7 +47,8 @@ class JSCreateLoweringTest : public TypedGraphTest {
}
Node* FrameState(Handle<SharedFunctionInfo> shared, Node* outer_frame_state) {
- Node* state_values = graph()->NewNode(common()->StateValues(0));
+ Node* state_values =
+ graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense()));
return graph()->NewNode(
common()->FrameState(
BailoutId::None(), OutputFrameStateCombine::Ignore(),
@@ -138,13 +140,26 @@ TEST_F(JSCreateLoweringTest, JSCreateArgumentsInlinedRestArray) {
// JSCreateClosure
TEST_F(JSCreateLoweringTest, JSCreateClosureViaInlinedAllocation) {
+ if (!FLAG_turbo_lower_create_closure) return;
Node* const context = UndefinedConstant();
Node* const effect = graph()->start();
Node* const control = graph()->start();
Handle<SharedFunctionInfo> shared(isolate()->number_function()->shared());
- Reduction r =
- Reduce(graph()->NewNode(javascript()->CreateClosure(shared, NOT_TENURED),
- context, effect, control));
+
+ // Create a mock feedback vector. It just has to be an array with an array
+ // in slot 0.
+ Handle<FixedArray> array = isolate()->factory()->NewFixedArray(
+ TypeFeedbackVector::kReservedIndexCount + 1);
+ array->set_map_no_write_barrier(
+ isolate()->heap()->type_feedback_vector_map());
+ Handle<TypeFeedbackVector> vector = Handle<TypeFeedbackVector>::cast(array);
+ FeedbackVectorSlot slot(0);
+ vector->Set(slot, *vector);
+ VectorSlotPair pair(vector, slot);
+
+ Reduction r = Reduce(
+ graph()->NewNode(javascript()->CreateClosure(shared, pair, NOT_TENURED),
+ context, effect, control));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsFinishRegion(IsAllocate(IsNumberConstant(JSFunction::kSize),
@@ -160,9 +175,9 @@ TEST_F(JSCreateLoweringTest, JSCreateFunctionContextViaInlinedAllocation) {
Node* const context = Parameter(Type::Any());
Node* const effect = graph()->start();
Node* const control = graph()->start();
- Reduction const r =
- Reduce(graph()->NewNode(javascript()->CreateFunctionContext(8), closure,
- context, effect, control));
+ Reduction const r = Reduce(
+ graph()->NewNode(javascript()->CreateFunctionContext(8, FUNCTION_SCOPE),
+ closure, context, effect, control));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(),
IsFinishRegion(IsAllocate(IsNumberConstant(Context::SizeFor(
diff --git a/deps/v8/test/unittests/compiler/js-intrinsic-lowering-unittest.cc b/deps/v8/test/unittests/compiler/js-intrinsic-lowering-unittest.cc
index 780bf65df3..e8bbc33578 100644
--- a/deps/v8/test/unittests/compiler/js-intrinsic-lowering-unittest.cc
+++ b/deps/v8/test/unittests/compiler/js-intrinsic-lowering-unittest.cc
@@ -129,37 +129,6 @@ TEST_F(JSIntrinsicLoweringTest, InlineIsTypedArray) {
// -----------------------------------------------------------------------------
-// %_IsRegExp
-
-
-TEST_F(JSIntrinsicLoweringTest, InlineIsRegExp) {
- Node* const input = Parameter(0);
- Node* const context = Parameter(1);
- Node* const effect = graph()->start();
- Node* const control = graph()->start();
- Reduction const r = Reduce(
- graph()->NewNode(javascript()->CallRuntime(Runtime::kInlineIsRegExp, 1),
- input, context, effect, control));
- ASSERT_TRUE(r.Changed());
-
- Node* phi = r.replacement();
- Capture<Node*> branch, if_false;
- EXPECT_THAT(
- phi,
- IsPhi(
- MachineRepresentation::kTagged, IsFalseConstant(),
- IsNumberEqual(IsLoadField(AccessBuilder::ForMapInstanceType(),
- IsLoadField(AccessBuilder::ForMap(), input,
- effect, CaptureEq(&if_false)),
- effect, _),
- IsNumberConstant(JS_REGEXP_TYPE)),
- IsMerge(IsIfTrue(AllOf(CaptureEq(&branch),
- IsBranch(IsObjectIsSmi(input), control))),
- AllOf(CaptureEq(&if_false), IsIfFalse(CaptureEq(&branch))))));
-}
-
-
-// -----------------------------------------------------------------------------
// %_IsJSReceiver
diff --git a/deps/v8/test/unittests/compiler/js-typed-lowering-unittest.cc b/deps/v8/test/unittests/compiler/js-typed-lowering-unittest.cc
index 6883052abb..979d146164 100644
--- a/deps/v8/test/unittests/compiler/js-typed-lowering-unittest.cc
+++ b/deps/v8/test/unittests/compiler/js-typed-lowering-unittest.cc
@@ -111,6 +111,25 @@ TEST_F(JSTypedLoweringTest, JSToBooleanWithNumber) {
EXPECT_THAT(r.replacement(), IsNumberToBoolean(input));
}
+TEST_F(JSTypedLoweringTest, JSToBooleanWithDetectableReceiverOrNull) {
+ Node* input = Parameter(Type::DetectableReceiverOrNull(), 0);
+ Node* context = Parameter(Type::Any(), 1);
+ Reduction r = Reduce(graph()->NewNode(
+ javascript()->ToBoolean(ToBooleanHint::kAny), input, context));
+ ASSERT_TRUE(r.Changed());
+ EXPECT_THAT(r.replacement(),
+ IsBooleanNot(IsReferenceEqual(input, IsNullConstant())));
+}
+
+TEST_F(JSTypedLoweringTest, JSToBooleanWithReceiverOrNullOrUndefined) {
+ Node* input = Parameter(Type::ReceiverOrNullOrUndefined(), 0);
+ Node* context = Parameter(Type::Any(), 1);
+ Reduction r = Reduce(graph()->NewNode(
+ javascript()->ToBoolean(ToBooleanHint::kAny), input, context));
+ ASSERT_TRUE(r.Changed());
+ EXPECT_THAT(r.replacement(), IsBooleanNot(IsObjectIsUndetectable(input)));
+}
+
TEST_F(JSTypedLoweringTest, JSToBooleanWithAny) {
Node* input = Parameter(Type::Any(), 0);
Node* context = Parameter(Type::Any(), 1);
@@ -251,7 +270,7 @@ TEST_F(JSTypedLoweringTest, JSStrictEqualWithUnique) {
graph()->NewNode(javascript()->StrictEqual(CompareOperationHint::kAny),
lhs, rhs, context, effect, control));
ASSERT_TRUE(r.Changed());
- EXPECT_THAT(r.replacement(), IsReferenceEqual(Type::Unique(), lhs, rhs));
+ EXPECT_THAT(r.replacement(), IsReferenceEqual(lhs, rhs));
}
@@ -504,17 +523,15 @@ TEST_F(JSTypedLoweringTest, JSLoadContext) {
static bool kBooleans[] = {false, true};
TRACED_FOREACH(size_t, index, kIndices) {
TRACED_FOREACH(bool, immutable, kBooleans) {
- Reduction const r1 = Reduce(
- graph()->NewNode(javascript()->LoadContext(0, index, immutable),
- context, context, effect));
+ Reduction const r1 = Reduce(graph()->NewNode(
+ javascript()->LoadContext(0, index, immutable), context, effect));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(),
IsLoadField(AccessBuilder::ForContextSlot(index), context,
effect, graph()->start()));
- Reduction const r2 = Reduce(
- graph()->NewNode(javascript()->LoadContext(1, index, immutable),
- context, context, effect));
+ Reduction const r2 = Reduce(graph()->NewNode(
+ javascript()->LoadContext(1, index, immutable), context, effect));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(),
IsLoadField(AccessBuilder::ForContextSlot(index),
@@ -540,16 +557,16 @@ TEST_F(JSTypedLoweringTest, JSStoreContext) {
Node* const value = Parameter(type);
Reduction const r1 =
- Reduce(graph()->NewNode(javascript()->StoreContext(0, index), context,
- value, context, effect, control));
+ Reduce(graph()->NewNode(javascript()->StoreContext(0, index), value,
+ context, effect, control));
ASSERT_TRUE(r1.Changed());
EXPECT_THAT(r1.replacement(),
IsStoreField(AccessBuilder::ForContextSlot(index), context,
value, effect, control));
Reduction const r2 =
- Reduce(graph()->NewNode(javascript()->StoreContext(1, index), context,
- value, context, effect, control));
+ Reduce(graph()->NewNode(javascript()->StoreContext(1, index), value,
+ context, effect, control));
ASSERT_TRUE(r2.Changed());
EXPECT_THAT(r2.replacement(),
IsStoreField(AccessBuilder::ForContextSlot(index),
@@ -580,13 +597,12 @@ TEST_F(JSTypedLoweringTest, JSLoadPropertyFromExternalTypedArray) {
Node* key = Parameter(
Type::Range(kMinInt / element_size, kMaxInt / element_size, zone()));
Node* base = HeapConstant(array);
- Node* vector = UndefinedConstant();
Node* context = UndefinedConstant();
Node* effect = graph()->start();
Node* control = graph()->start();
- Reduction r = Reduce(graph()->NewNode(javascript()->LoadProperty(feedback),
- base, key, vector, context,
- EmptyFrameState(), effect, control));
+ Reduction r =
+ Reduce(graph()->NewNode(javascript()->LoadProperty(feedback), base, key,
+ context, EmptyFrameState(), effect, control));
Matcher<Node*> offset_matcher =
element_size == 1
@@ -622,13 +638,12 @@ TEST_F(JSTypedLoweringTest, JSLoadPropertyFromExternalTypedArrayWithSafeKey) {
if (min > max) std::swap(min, max);
Node* key = Parameter(Type::Range(min, max, zone()));
Node* base = HeapConstant(array);
- Node* vector = UndefinedConstant();
Node* context = UndefinedConstant();
Node* effect = graph()->start();
Node* control = graph()->start();
- Reduction r = Reduce(graph()->NewNode(javascript()->LoadProperty(feedback),
- base, key, vector, context,
- EmptyFrameState(), effect, control));
+ Reduction r =
+ Reduce(graph()->NewNode(javascript()->LoadProperty(feedback), base, key,
+ context, EmptyFrameState(), effect, control));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
@@ -660,13 +675,12 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArray) {
Node* base = HeapConstant(array);
Node* value =
Parameter(AccessBuilder::ForTypedArrayElement(type, true).type);
- Node* vector = UndefinedConstant();
Node* context = UndefinedConstant();
Node* effect = graph()->start();
Node* control = graph()->start();
VectorSlotPair feedback;
const Operator* op = javascript()->StoreProperty(language_mode, feedback);
- Node* node = graph()->NewNode(op, base, key, value, vector, context,
+ Node* node = graph()->NewNode(op, base, key, value, context,
EmptyFrameState(), effect, control);
Reduction r = Reduce(node);
@@ -703,8 +717,7 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArrayWithConversion) {
Node* key = Parameter(
Type::Range(kMinInt / element_size, kMaxInt / element_size, zone()));
Node* base = HeapConstant(array);
- Node* value = Parameter(Type::Any());
- Node* vector = UndefinedConstant();
+ Node* value = Parameter(Type::PlainPrimitive());
Node* context = UndefinedConstant();
Node* effect = graph()->start();
Node* control = graph()->start();
@@ -714,7 +727,7 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArrayWithConversion) {
EmptyFrameState(), effect, control);
VectorSlotPair feedback;
const Operator* op = javascript()->StoreProperty(language_mode, feedback);
- Node* node = graph()->NewNode(op, base, key, value, vector, context,
+ Node* node = graph()->NewNode(op, base, key, value, context,
EmptyFrameState(), checkpoint, control);
Reduction r = Reduce(node);
@@ -724,10 +737,7 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArrayWithConversion) {
: IsNumberShiftLeft(
key, IsNumberConstant(WhichPowerOf2(element_size)));
- Matcher<Node*> value_matcher =
- IsToNumber(value, context, checkpoint, control);
- Matcher<Node*> effect_matcher = value_matcher;
- Matcher<Node*> control_matcher = IsIfSuccess(value_matcher);
+ Matcher<Node*> value_matcher = IsPlainPrimitiveToNumber(value);
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
@@ -736,7 +746,7 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArrayWithConversion) {
BufferAccess(type),
IsPointerConstant(bit_cast<intptr_t>(&backing_store[0])),
offset_matcher, IsNumberConstant(array->byte_length()->Number()),
- value_matcher, effect_matcher, control_matcher));
+ value_matcher, checkpoint, control));
}
}
}
@@ -759,13 +769,12 @@ TEST_F(JSTypedLoweringTest, JSStorePropertyToExternalTypedArrayWithSafeKey) {
Node* key = Parameter(Type::Range(min, max, zone()));
Node* base = HeapConstant(array);
Node* value = Parameter(access.type);
- Node* vector = UndefinedConstant();
Node* context = UndefinedConstant();
Node* effect = graph()->start();
Node* control = graph()->start();
VectorSlotPair feedback;
const Operator* op = javascript()->StoreProperty(language_mode, feedback);
- Node* node = graph()->NewNode(op, base, key, value, vector, context,
+ Node* node = graph()->NewNode(op, base, key, value, context,
EmptyFrameState(), effect, control);
Reduction r = Reduce(node);
@@ -788,13 +797,12 @@ TEST_F(JSTypedLoweringTest, JSLoadNamedStringLength) {
VectorSlotPair feedback;
Handle<Name> name = factory()->length_string();
Node* const receiver = Parameter(Type::String(), 0);
- Node* const vector = Parameter(Type::Internal(), 1);
Node* const context = UndefinedConstant();
Node* const effect = graph()->start();
Node* const control = graph()->start();
- Reduction const r = Reduce(
- graph()->NewNode(javascript()->LoadNamed(name, feedback), receiver,
- vector, context, EmptyFrameState(), effect, control));
+ Reduction const r =
+ Reduce(graph()->NewNode(javascript()->LoadNamed(name, feedback), receiver,
+ context, EmptyFrameState(), effect, control));
ASSERT_TRUE(r.Changed());
EXPECT_THAT(r.replacement(), IsLoadField(AccessBuilder::ForStringLength(),
receiver, effect, control));
diff --git a/deps/v8/test/unittests/compiler/liveness-analyzer-unittest.cc b/deps/v8/test/unittests/compiler/liveness-analyzer-unittest.cc
index f11d6dff18..5d2ec5fc98 100644
--- a/deps/v8/test/unittests/compiler/liveness-analyzer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/liveness-analyzer-unittest.cc
@@ -28,7 +28,8 @@ class LivenessAnalysisTest : public GraphTest {
jsgraph_(isolate(), graph(), common(), &javascript_, nullptr,
&machine_),
analyzer_(locals_count, false, zone()),
- empty_values_(graph()->NewNode(common()->StateValues(0), 0, nullptr)),
+ empty_values_(graph()->NewNode(
+ common()->StateValues(0, SparseInputMask::Dense()), 0, nullptr)),
next_checkpoint_id_(0),
current_block_(nullptr) {}
@@ -48,7 +49,8 @@ class LivenessAnalysisTest : public GraphTest {
int ast_num = next_checkpoint_id_++;
int first_const = intconst_from_bailout_id(ast_num, locals_count_);
- const Operator* locals_op = common()->StateValues(locals_count_);
+ const Operator* locals_op =
+ common()->StateValues(locals_count_, SparseInputMask::Dense());
ZoneVector<Node*> local_inputs(locals_count_, nullptr, zone());
for (int i = 0; i < locals_count_; i++) {
diff --git a/deps/v8/test/unittests/compiler/load-elimination-unittest.cc b/deps/v8/test/unittests/compiler/load-elimination-unittest.cc
index 81393941bb..8d34fb9699 100644
--- a/deps/v8/test/unittests/compiler/load-elimination-unittest.cc
+++ b/deps/v8/test/unittests/compiler/load-elimination-unittest.cc
@@ -125,11 +125,9 @@ TEST_F(LoadEliminationTest, LoadFieldAndLoadField) {
Node* object = Parameter(Type::Any(), 0);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess const access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess const access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -154,11 +152,9 @@ TEST_F(LoadEliminationTest, StoreFieldAndLoadField) {
Node* value = Parameter(Type::Any(), 1);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -178,17 +174,55 @@ TEST_F(LoadEliminationTest, StoreFieldAndLoadField) {
EXPECT_EQ(value, r.replacement());
}
+TEST_F(LoadEliminationTest, StoreFieldAndKillFields) {
+ Node* object = Parameter(Type::Any(), 0);
+ Node* value = Parameter(Type::Any(), 1);
+ Node* effect = graph()->start();
+ Node* control = graph()->start();
+
+ FieldAccess access1 = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
+ kNoWriteBarrier};
+
+ // Offset that out of field cache size.
+ FieldAccess access2 = {kTaggedBase, 2048 * kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
+ kNoWriteBarrier};
+
+ StrictMock<MockAdvancedReducerEditor> editor;
+ LoadElimination load_elimination(&editor, jsgraph(), zone());
+
+ load_elimination.Reduce(graph()->start());
+
+ Node* store1 = effect = graph()->NewNode(simplified()->StoreField(access1),
+ object, value, effect, control);
+ load_elimination.Reduce(store1);
+
+ // Invalidate caches of object.
+ Node* store2 = effect = graph()->NewNode(simplified()->StoreField(access2),
+ object, value, effect, control);
+ load_elimination.Reduce(store2);
+
+ Node* store3 = graph()->NewNode(simplified()->StoreField(access1),
+ object, value, effect, control);
+
+ Reduction r = load_elimination.Reduce(store3);
+
+ // store3 shall not be replaced, since caches were invalidated.
+ EXPECT_EQ(store3, r.replacement());
+}
+
TEST_F(LoadEliminationTest, StoreFieldAndStoreElementAndLoadField) {
Node* object = Parameter(Type::Any(), 0);
Node* value = Parameter(Type::Any(), 1);
Node* index = Parameter(Type::UnsignedSmall(), 2);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -288,11 +322,9 @@ TEST_F(LoadEliminationTest, LoadFieldOnFalseBranchOfDiamond) {
Node* check = Parameter(Type::Boolean(), 1);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess const access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess const access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -326,11 +358,9 @@ TEST_F(LoadEliminationTest, LoadFieldOnTrueBranchOfDiamond) {
Node* check = Parameter(Type::Boolean(), 1);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess const access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Any(),
- MachineType::AnyTagged(),
+ FieldAccess const access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Any(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -364,11 +394,9 @@ TEST_F(LoadEliminationTest, LoadFieldWithTypeMismatch) {
Node* value = Parameter(Type::Signed32(), 1);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess const access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Unsigned31(),
- MachineType::AnyTagged(),
+ FieldAccess const access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Unsigned31(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
@@ -422,11 +450,9 @@ TEST_F(LoadEliminationTest, AliasAnalysisForFinishRegion) {
Node* value1 = Parameter(Type::Signed32(), 1);
Node* effect = graph()->start();
Node* control = graph()->start();
- FieldAccess const access = {kTaggedBase,
- kPointerSize,
- MaybeHandle<Name>(),
- Type::Signed32(),
- MachineType::AnyTagged(),
+ FieldAccess const access = {kTaggedBase, kPointerSize,
+ MaybeHandle<Name>(), MaybeHandle<Map>(),
+ Type::Signed32(), MachineType::AnyTagged(),
kNoWriteBarrier};
StrictMock<MockAdvancedReducerEditor> editor;
diff --git a/deps/v8/test/unittests/compiler/machine-operator-reducer-unittest.cc b/deps/v8/test/unittests/compiler/machine-operator-reducer-unittest.cc
index 1d29d9733f..4f1946c379 100644
--- a/deps/v8/test/unittests/compiler/machine-operator-reducer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/machine-operator-reducer-unittest.cc
@@ -1183,12 +1183,16 @@ TEST_F(MachineOperatorReducerTest, Int32ModWithConstant) {
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
- IsSelect(MachineRepresentation::kWord32,
- IsInt32LessThan(p0, IsInt32Constant(0)),
- IsInt32Sub(IsInt32Constant(0),
- IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
- IsInt32Constant(mask))),
- IsWord32And(p0, IsInt32Constant(mask))));
+ IsPhi(
+ MachineRepresentation::kWord32,
+ IsInt32Sub(IsInt32Constant(0),
+ IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
+ IsInt32Constant(mask))),
+ IsWord32And(p0, IsInt32Constant(mask)),
+ IsMerge(IsIfTrue(IsBranch(IsInt32LessThan(p0, IsInt32Constant(0)),
+ graph()->start())),
+ IsIfFalse(IsBranch(IsInt32LessThan(p0, IsInt32Constant(0)),
+ graph()->start())))));
}
TRACED_FORRANGE(int32_t, shift, 1, 31) {
Reduction const r = Reduce(graph()->NewNode(
@@ -1199,12 +1203,16 @@ TEST_F(MachineOperatorReducerTest, Int32ModWithConstant) {
ASSERT_TRUE(r.Changed());
EXPECT_THAT(
r.replacement(),
- IsSelect(MachineRepresentation::kWord32,
- IsInt32LessThan(p0, IsInt32Constant(0)),
- IsInt32Sub(IsInt32Constant(0),
- IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
- IsInt32Constant(mask))),
- IsWord32And(p0, IsInt32Constant(mask))));
+ IsPhi(
+ MachineRepresentation::kWord32,
+ IsInt32Sub(IsInt32Constant(0),
+ IsWord32And(IsInt32Sub(IsInt32Constant(0), p0),
+ IsInt32Constant(mask))),
+ IsWord32And(p0, IsInt32Constant(mask)),
+ IsMerge(IsIfTrue(IsBranch(IsInt32LessThan(p0, IsInt32Constant(0)),
+ graph()->start())),
+ IsIfFalse(IsBranch(IsInt32LessThan(p0, IsInt32Constant(0)),
+ graph()->start())))));
}
TRACED_FOREACH(int32_t, divisor, kInt32Values) {
if (divisor == 0 || base::bits::IsPowerOfTwo32(Abs(divisor))) continue;
@@ -2077,8 +2085,19 @@ TEST_F(MachineOperatorReducerTest, Float64LessThanOrEqualWithFloat32Constant) {
// -----------------------------------------------------------------------------
-// Store
+// Float64RoundDown
+TEST_F(MachineOperatorReducerTest, Float64RoundDownWithConstant) {
+ TRACED_FOREACH(double, x, kFloat64Values) {
+ Reduction r = Reduce(graph()->NewNode(
+ machine()->Float64RoundDown().placeholder(), Float64Constant(x)));
+ ASSERT_TRUE(r.Changed());
+ EXPECT_THAT(r.replacement(), IsFloat64Constant(Floor(x)));
+ }
+}
+
+// -----------------------------------------------------------------------------
+// Store
TEST_F(MachineOperatorReducerTest, StoreRepWord8WithWord32And) {
const StoreRepresentation rep(MachineRepresentation::kWord8, kNoWriteBarrier);
diff --git a/deps/v8/test/unittests/compiler/mips/instruction-selector-mips-unittest.cc b/deps/v8/test/unittests/compiler/mips/instruction-selector-mips-unittest.cc
index 1698614760..d1336940a3 100644
--- a/deps/v8/test/unittests/compiler/mips/instruction-selector-mips-unittest.cc
+++ b/deps/v8/test/unittests/compiler/mips/instruction-selector-mips-unittest.cc
@@ -1270,7 +1270,7 @@ TEST_F(InstructionSelectorTest, Float64Abs) {
}
TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
- if (!IsMipsArchVariant(kMips32r2) && !IsMipsArchVariant(kMips32r6)) {
+ if (!IsMipsArchVariant(kMips32r2)) {
return;
}
{
@@ -1283,23 +1283,14 @@ TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_EQ(kMipsMaddS, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMaddfS, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMipsMaddS, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1313,30 +1304,21 @@ TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_EQ(kMipsMaddS, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMaddfS, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMipsMaddS, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
}
TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
- if (!IsMipsArchVariant(kMips32r2) && !IsMipsArchVariant(kMips32r6)) {
+ if (!IsMipsArchVariant(kMips32r2)) {
return;
}
{
@@ -1349,23 +1331,14 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_EQ(kMipsMaddD, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMaddfD, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMipsMaddD, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1379,23 +1352,14 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_EQ(kMipsMaddD, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMaddfD, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMipsMaddD, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1404,83 +1368,59 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
TEST_F(InstructionSelectorTest, Float32SubWithFloat32Mul) {
StreamBuilder m(this, MachineType::Float32(), MachineType::Float32(),
MachineType::Float32(), MachineType::Float32());
- Node* const p0 = m.Parameter(0);
- Node* const p1 = m.Parameter(1);
- Node* const p2 = m.Parameter(2);
- Node* n = nullptr;
-
- if (!IsMipsArchVariant(kMips32r2) && !IsMipsArchVariant(kMips32r6)) {
+ if (!IsMipsArchVariant(kMips32r2)) {
return;
}
+ {
+ Node* const p0 = m.Parameter(0);
+ Node* const p1 = m.Parameter(1);
+ Node* const p2 = m.Parameter(2);
+ Node* n = nullptr;
- if (IsMipsArchVariant(kMips32r2)) {
n = m.Float32Sub(m.Float32Mul(p1, p2), p0);
- } else if (IsMipsArchVariant(kMips32r6)) {
- n = m.Float32Sub(p0, m.Float32Mul(p1, p2));
- }
- m.Return(n);
- Stream s = m.Build();
- ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
+ m.Return(n);
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
EXPECT_EQ(kMipsMsubS, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMsubfS, s[0]->arch_opcode());
- }
- ASSERT_EQ(3U, s[0]->InputCount());
- EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
- EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
- EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
- ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
+ ASSERT_EQ(3U, s[0]->InputCount());
+ EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
+ EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
+ EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
+ ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_FALSE(
UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
+ EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
+ EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
- EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
- EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
TEST_F(InstructionSelectorTest, Float64SubWithFloat64Mul) {
StreamBuilder m(this, MachineType::Float64(), MachineType::Float64(),
MachineType::Float64(), MachineType::Float64());
- Node* const p0 = m.Parameter(0);
- Node* const p1 = m.Parameter(1);
- Node* const p2 = m.Parameter(2);
- Node* n = nullptr;
-
- if (!IsMipsArchVariant(kMips32r2) && !IsMipsArchVariant(kMips32r6)) {
+ if (!IsMipsArchVariant(kMips32r2)) {
return;
}
+ {
+ Node* const p0 = m.Parameter(0);
+ Node* const p1 = m.Parameter(1);
+ Node* const p2 = m.Parameter(2);
+ Node* n = nullptr;
- if (IsMipsArchVariant(kMips32r2)) {
n = m.Float64Sub(m.Float64Mul(p1, p2), p0);
- } else if (IsMipsArchVariant(kMips32r6)) {
- n = m.Float64Sub(p0, m.Float64Mul(p1, p2));
- }
- m.Return(n);
- Stream s = m.Build();
- ASSERT_EQ(1U, s.size());
- if (IsMipsArchVariant(kMips32r2)) {
+ m.Return(n);
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
EXPECT_EQ(kMipsMsubD, s[0]->arch_opcode());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_EQ(kMipsMsubfD, s[0]->arch_opcode());
- }
- ASSERT_EQ(3U, s[0]->InputCount());
- EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
- EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
- EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
- ASSERT_EQ(1U, s[0]->OutputCount());
- if (IsMipsArchVariant(kMips32r2)) {
+ ASSERT_EQ(3U, s[0]->InputCount());
+ EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
+ EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
+ EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
+ ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_FALSE(
UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (IsMipsArchVariant(kMips32r6)) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
+ EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
+ EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
- EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
- EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
TEST_F(InstructionSelectorTest, Float64Max) {
@@ -1514,6 +1454,18 @@ TEST_F(InstructionSelectorTest, Float64Min) {
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
}
+TEST_F(InstructionSelectorTest, Word32ReverseBytes) {
+ {
+ StreamBuilder m(this, MachineType::Int32(), MachineType::Int32());
+ m.Return(m.Word32ReverseBytes(m.Parameter(0)));
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
+ EXPECT_EQ(kMipsByteSwap32, s[0]->arch_opcode());
+ EXPECT_EQ(1U, s[0]->InputCount());
+ EXPECT_EQ(1U, s[0]->OutputCount());
+ }
+}
+
} // namespace compiler
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc b/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc
index b0e82e4316..97fd7bfc37 100644
--- a/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc
+++ b/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc
@@ -1360,14 +1360,13 @@ const MemoryAccessImm kMemoryAccessesImm[] = {
-87, -86, -82, -44, -23, -3, 0, 7, 10, 39, 52, 69, 71, 91, 92, 107, 109,
115, 124, 286, 655, 1362, 1569, 2587, 3067, 3096, 3462, 3510, 4095}}};
-
const MemoryAccessImm1 kMemoryAccessImmMoreThan16bit[] = {
{MachineType::Int8(),
kMips64Lb,
kMips64Sb,
&InstructionSelectorTest::Stream::IsInteger,
{-65000, -55000, 32777, 55000, 65000}},
- {MachineType::Int8(),
+ {MachineType::Uint8(),
kMips64Lbu,
kMips64Sb,
&InstructionSelectorTest::Stream::IsInteger,
@@ -1377,7 +1376,7 @@ const MemoryAccessImm1 kMemoryAccessImmMoreThan16bit[] = {
kMips64Sh,
&InstructionSelectorTest::Stream::IsInteger,
{-65000, -55000, 32777, 55000, 65000}},
- {MachineType::Int16(),
+ {MachineType::Uint16(),
kMips64Lhu,
kMips64Sh,
&InstructionSelectorTest::Stream::IsInteger,
@@ -1601,11 +1600,9 @@ TEST_P(InstructionSelectorMemoryAccessImmMoreThan16bitTest,
StreamBuilder m(this, memacc.type, MachineType::Pointer());
m.Return(m.Load(memacc.type, m.Parameter(0), m.Int32Constant(index)));
Stream s = m.Build();
- ASSERT_EQ(2U, s.size());
- // kMips64Dadd is expected opcode
- // size more than 16 bits wide
- EXPECT_EQ(kMips64Dadd, s[0]->arch_opcode());
- EXPECT_EQ(kMode_None, s[0]->addressing_mode());
+ ASSERT_EQ(1U, s.size());
+ EXPECT_EQ(memacc.load_opcode, s[0]->arch_opcode());
+ EXPECT_EQ(kMode_MRI, s[0]->addressing_mode());
EXPECT_EQ(2U, s[0]->InputCount());
EXPECT_EQ(1U, s[0]->OutputCount());
}
@@ -1621,13 +1618,11 @@ TEST_P(InstructionSelectorMemoryAccessImmMoreThan16bitTest,
m.Int32Constant(index), m.Parameter(1), kNoWriteBarrier);
m.Return(m.Int32Constant(0));
Stream s = m.Build();
- ASSERT_EQ(2U, s.size());
- // kMips64Add is expected opcode
- // size more than 16 bits wide
- EXPECT_EQ(kMips64Dadd, s[0]->arch_opcode());
- EXPECT_EQ(kMode_None, s[0]->addressing_mode());
- EXPECT_EQ(2U, s[0]->InputCount());
- EXPECT_EQ(1U, s[0]->OutputCount());
+ ASSERT_EQ(1U, s.size());
+ EXPECT_EQ(memacc.store_opcode, s[0]->arch_opcode());
+ EXPECT_EQ(kMode_MRI, s[0]->addressing_mode());
+ EXPECT_EQ(3U, s[0]->InputCount());
+ EXPECT_EQ(0U, s[0]->OutputCount());
}
}
@@ -1757,6 +1752,9 @@ TEST_F(InstructionSelectorTest, Float64Abs) {
}
TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
+ if (kArchVariant != kMips64r2) {
+ return;
+ }
{
StreamBuilder m(this, MachineType::Float32(), MachineType::Float32(),
MachineType::Float32(), MachineType::Float32());
@@ -1767,23 +1765,14 @@ TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
- EXPECT_EQ(kMips64MaddS, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MaddfS, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMips64MaddS, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1797,29 +1786,23 @@ TEST_F(InstructionSelectorTest, Float32AddWithFloat32Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
- EXPECT_EQ(kMips64MaddS, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MaddfS, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMips64MaddS, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
}
TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
+ if (kArchVariant != kMips64r2) {
+ return;
+ }
{
StreamBuilder m(this, MachineType::Float64(), MachineType::Float64(),
MachineType::Float64(), MachineType::Float64());
@@ -1830,23 +1813,14 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
- EXPECT_EQ(kMips64MaddD, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MaddfD, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMips64MaddD, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1860,23 +1834,14 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
m.Return(n);
Stream s = m.Build();
ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
- EXPECT_EQ(kMips64MaddD, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MaddfD, s[0]->arch_opcode());
- }
+ EXPECT_EQ(kMips64MaddD, s[0]->arch_opcode());
ASSERT_EQ(3U, s[0]->InputCount());
EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
- EXPECT_FALSE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- }
+ EXPECT_FALSE(
+ UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
@@ -1885,73 +1850,57 @@ TEST_F(InstructionSelectorTest, Float64AddWithFloat64Mul) {
TEST_F(InstructionSelectorTest, Float32SubWithFloat32Mul) {
StreamBuilder m(this, MachineType::Float32(), MachineType::Float32(),
MachineType::Float32(), MachineType::Float32());
- Node* const p0 = m.Parameter(0);
- Node* const p1 = m.Parameter(1);
- Node* const p2 = m.Parameter(2);
- Node* n;
- if (kArchVariant == kMips64r2) {
- n = m.Float32Sub(m.Float32Mul(p1, p2), p0);
- } else if (kArchVariant == kMips64r6) {
- n = m.Float32Sub(p0, m.Float32Mul(p1, p2));
+ if (kArchVariant != kMips64r2) {
+ return;
}
- m.Return(n);
- Stream s = m.Build();
- ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
+ {
+ Node* const p0 = m.Parameter(0);
+ Node* const p1 = m.Parameter(1);
+ Node* const p2 = m.Parameter(2);
+ Node* n;
+ n = m.Float32Sub(m.Float32Mul(p1, p2), p0);
+ m.Return(n);
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
EXPECT_EQ(kMips64MsubS, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MsubfS, s[0]->arch_opcode());
- }
- ASSERT_EQ(3U, s[0]->InputCount());
- EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
- EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
- EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
- ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
+ ASSERT_EQ(3U, s[0]->InputCount());
+ EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
+ EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
+ EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
+ ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_FALSE(
UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
+ EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
+ EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
- EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
- EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
TEST_F(InstructionSelectorTest, Float64SubWithFloat64Mul) {
StreamBuilder m(this, MachineType::Float64(), MachineType::Float64(),
MachineType::Float64(), MachineType::Float64());
- Node* const p0 = m.Parameter(0);
- Node* const p1 = m.Parameter(1);
- Node* const p2 = m.Parameter(2);
- Node* n;
- if (kArchVariant == kMips64r2) {
- n = m.Float64Sub(m.Float64Mul(p1, p2), p0);
- } else if (kArchVariant == kMips64r6) {
- n = m.Float64Sub(p0, m.Float64Mul(p1, p2));
+ if (kArchVariant != kMips64r2) {
+ return;
}
- m.Return(n);
- Stream s = m.Build();
- ASSERT_EQ(1U, s.size());
- if (kArchVariant == kMips64r2) {
+ {
+ Node* const p0 = m.Parameter(0);
+ Node* const p1 = m.Parameter(1);
+ Node* const p2 = m.Parameter(2);
+ Node* n;
+ n = m.Float64Sub(m.Float64Mul(p1, p2), p0);
+ m.Return(n);
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
EXPECT_EQ(kMips64MsubD, s[0]->arch_opcode());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_EQ(kMips64MsubfD, s[0]->arch_opcode());
- }
- ASSERT_EQ(3U, s[0]->InputCount());
- EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
- EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
- EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
- ASSERT_EQ(1U, s[0]->OutputCount());
- if (kArchVariant == kMips64r2) {
+ ASSERT_EQ(3U, s[0]->InputCount());
+ EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
+ EXPECT_EQ(s.ToVreg(p1), s.ToVreg(s[0]->InputAt(1)));
+ EXPECT_EQ(s.ToVreg(p2), s.ToVreg(s[0]->InputAt(2)));
+ ASSERT_EQ(1U, s[0]->OutputCount());
EXPECT_FALSE(
UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
- } else if (kArchVariant == kMips64r6) {
- EXPECT_TRUE(
- UnallocatedOperand::cast(s[0]->Output())->HasSameAsInputPolicy());
+ EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
+ EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
- EXPECT_EQ(s.ToVreg(n), s.ToVreg(s[0]->Output()));
- EXPECT_EQ(kFlags_none, s[0]->flags_mode());
}
TEST_F(InstructionSelectorTest, Float64Max) {
@@ -2015,6 +1964,30 @@ TEST_F(InstructionSelectorTest, LoadAndShiftRight) {
}
}
+TEST_F(InstructionSelectorTest, Word32ReverseBytes) {
+ {
+ StreamBuilder m(this, MachineType::Int32(), MachineType::Int32());
+ m.Return(m.Word32ReverseBytes(m.Parameter(0)));
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
+ EXPECT_EQ(kMips64ByteSwap32, s[0]->arch_opcode());
+ EXPECT_EQ(1U, s[0]->InputCount());
+ EXPECT_EQ(1U, s[0]->OutputCount());
+ }
+}
+
+TEST_F(InstructionSelectorTest, Word64ReverseBytes) {
+ {
+ StreamBuilder m(this, MachineType::Int64(), MachineType::Int64());
+ m.Return(m.Word64ReverseBytes(m.Parameter(0)));
+ Stream s = m.Build();
+ ASSERT_EQ(1U, s.size());
+ EXPECT_EQ(kMips64ByteSwap64, s[0]->arch_opcode());
+ EXPECT_EQ(1U, s[0]->InputCount());
+ EXPECT_EQ(1U, s[0]->OutputCount());
+ }
+}
+
} // namespace compiler
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/compiler/node-test-utils.cc b/deps/v8/test/unittests/compiler/node-test-utils.cc
index 8352691644..8e8ccf0be6 100644
--- a/deps/v8/test/unittests/compiler/node-test-utils.cc
+++ b/deps/v8/test/unittests/compiler/node-test-utils.cc
@@ -12,6 +12,7 @@
#include "src/compiler/node-properties.h"
#include "src/compiler/simplified-operator.h"
#include "src/handles-inl.h"
+#include "src/objects-inl.h"
#include "src/objects.h"
using testing::_;
@@ -803,32 +804,6 @@ class IsTailCallMatcher final : public NodeMatcher {
const Matcher<Node*> control_matcher_;
};
-
-class IsReferenceEqualMatcher final : public NodeMatcher {
- public:
- IsReferenceEqualMatcher(const Matcher<Type*>& type_matcher,
- const Matcher<Node*>& lhs_matcher,
- const Matcher<Node*>& rhs_matcher)
- : NodeMatcher(IrOpcode::kReferenceEqual),
- type_matcher_(type_matcher),
- lhs_matcher_(lhs_matcher),
- rhs_matcher_(rhs_matcher) {}
-
- bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
- return (NodeMatcher::MatchAndExplain(node, listener) &&
- // TODO(bmeurer): The type parameter is currently ignored.
- PrintMatchAndExplain(NodeProperties::GetValueInput(node, 0), "lhs",
- lhs_matcher_, listener) &&
- PrintMatchAndExplain(NodeProperties::GetValueInput(node, 1), "rhs",
- rhs_matcher_, listener));
- }
-
- private:
- const Matcher<Type*> type_matcher_;
- const Matcher<Node*> lhs_matcher_;
- const Matcher<Node*> rhs_matcher_;
-};
-
class IsSpeculativeBinopMatcher final : public NodeMatcher {
public:
IsSpeculativeBinopMatcher(IrOpcode::Value opcode,
@@ -1364,24 +1339,24 @@ STORE_MATCHER(UnalignedStore)
class IsStackSlotMatcher final : public NodeMatcher {
public:
- explicit IsStackSlotMatcher(const Matcher<MachineRepresentation>& rep_matcher)
- : NodeMatcher(IrOpcode::kStackSlot), rep_matcher_(rep_matcher) {}
+ explicit IsStackSlotMatcher(const Matcher<int>& size_matcher)
+ : NodeMatcher(IrOpcode::kStackSlot), size_matcher_(size_matcher) {}
void DescribeTo(std::ostream* os) const final {
NodeMatcher::DescribeTo(os);
- *os << " whose rep (";
- rep_matcher_.DescribeTo(os);
+ *os << " whose size (";
+ size_matcher_.DescribeTo(os);
*os << ")";
}
bool MatchAndExplain(Node* node, MatchResultListener* listener) const final {
return (NodeMatcher::MatchAndExplain(node, listener) &&
- PrintMatchAndExplain(OpParameter<MachineRepresentation>(node),
- "rep", rep_matcher_, listener));
+ PrintMatchAndExplain(OpParameter<int>(node), "size", size_matcher_,
+ listener));
}
private:
- const Matcher<MachineRepresentation> rep_matcher_;
+ const Matcher<int> size_matcher_;
};
class IsToNumberMatcher final : public NodeMatcher {
@@ -2072,13 +2047,6 @@ Matcher<Node*> IsTailCall(
effect_matcher, control_matcher));
}
-Matcher<Node*> IsReferenceEqual(const Matcher<Type*>& type_matcher,
- const Matcher<Node*>& lhs_matcher,
- const Matcher<Node*>& rhs_matcher) {
- return MakeMatcher(
- new IsReferenceEqualMatcher(type_matcher, lhs_matcher, rhs_matcher));
-}
-
#define DEFINE_SPECULATIVE_BINOP_MATCHER(opcode) \
Matcher<Node*> Is##opcode(const Matcher<NumberOperationHint>& hint_matcher, \
const Matcher<Node*>& lhs_matcher, \
@@ -2207,8 +2175,8 @@ Matcher<Node*> IsUnalignedStore(
control_matcher));
}
-Matcher<Node*> IsStackSlot(const Matcher<MachineRepresentation>& rep_matcher) {
- return MakeMatcher(new IsStackSlotMatcher(rep_matcher));
+Matcher<Node*> IsStackSlot(const Matcher<int>& size_matcher) {
+ return MakeMatcher(new IsStackSlotMatcher(size_matcher));
}
Matcher<Node*> IsToNumber(const Matcher<Node*>& base_matcher,
@@ -2281,6 +2249,7 @@ IS_BINOP_MATCHER(NumberAtan2)
IS_BINOP_MATCHER(NumberMax)
IS_BINOP_MATCHER(NumberMin)
IS_BINOP_MATCHER(NumberPow)
+IS_BINOP_MATCHER(ReferenceEqual)
IS_BINOP_MATCHER(Word32And)
IS_BINOP_MATCHER(Word32Or)
IS_BINOP_MATCHER(Word32Xor)
@@ -2305,6 +2274,7 @@ IS_BINOP_MATCHER(Uint32LessThan)
IS_BINOP_MATCHER(Uint32LessThanOrEqual)
IS_BINOP_MATCHER(Int64Add)
IS_BINOP_MATCHER(Int64Sub)
+IS_BINOP_MATCHER(Int64Mul)
IS_BINOP_MATCHER(JSAdd)
IS_BINOP_MATCHER(Float32Equal)
IS_BINOP_MATCHER(Float32LessThan)
@@ -2380,6 +2350,7 @@ IS_UNOP_MATCHER(NumberToUint32)
IS_UNOP_MATCHER(PlainPrimitiveToNumber)
IS_UNOP_MATCHER(ObjectIsReceiver)
IS_UNOP_MATCHER(ObjectIsSmi)
+IS_UNOP_MATCHER(ObjectIsUndetectable)
IS_UNOP_MATCHER(StringFromCharCode)
IS_UNOP_MATCHER(Word32Clz)
IS_UNOP_MATCHER(Word32Ctz)
diff --git a/deps/v8/test/unittests/compiler/node-test-utils.h b/deps/v8/test/unittests/compiler/node-test-utils.h
index fa5ae02dea..11e2704a74 100644
--- a/deps/v8/test/unittests/compiler/node-test-utils.h
+++ b/deps/v8/test/unittests/compiler/node-test-utils.h
@@ -205,8 +205,7 @@ Matcher<Node*> IsTailCall(
Matcher<Node*> IsBooleanNot(const Matcher<Node*>& value_matcher);
-Matcher<Node*> IsReferenceEqual(const Matcher<Type*>& type_matcher,
- const Matcher<Node*>& lhs_matcher,
+Matcher<Node*> IsReferenceEqual(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
Matcher<Node*> IsNumberEqual(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
@@ -311,6 +310,7 @@ Matcher<Node*> IsStoreElement(const Matcher<ElementAccess>& access_matcher,
const Matcher<Node*>& control_matcher);
Matcher<Node*> IsObjectIsReceiver(const Matcher<Node*>& value_matcher);
Matcher<Node*> IsObjectIsSmi(const Matcher<Node*>& value_matcher);
+Matcher<Node*> IsObjectIsUndetectable(const Matcher<Node*>& value_matcher);
Matcher<Node*> IsLoad(const Matcher<LoadRepresentation>& rep_matcher,
const Matcher<Node*>& base_matcher,
@@ -333,7 +333,7 @@ Matcher<Node*> IsUnalignedStore(
const Matcher<Node*>& base_matcher, const Matcher<Node*>& index_matcher,
const Matcher<Node*>& value_matcher, const Matcher<Node*>& effect_matcher,
const Matcher<Node*>& control_matcher);
-Matcher<Node*> IsStackSlot(const Matcher<MachineRepresentation>& rep_matcher);
+Matcher<Node*> IsStackSlot(const Matcher<int>& size_matcher);
Matcher<Node*> IsWord32And(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
Matcher<Node*> IsWord32Or(const Matcher<Node*>& lhs_matcher,
@@ -385,6 +385,8 @@ Matcher<Node*> IsInt64Add(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
Matcher<Node*> IsInt64Sub(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
+Matcher<Node*> IsInt64Mul(const Matcher<Node*>& lhs_matcher,
+ const Matcher<Node*>& rhs_matcher);
Matcher<Node*> IsJSAdd(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher);
Matcher<Node*> IsBitcastTaggedToWord(const Matcher<Node*>& input_matcher);
diff --git a/deps/v8/test/unittests/compiler/regalloc/OWNERS b/deps/v8/test/unittests/compiler/regalloc/OWNERS
new file mode 100644
index 0000000000..88646a1206
--- /dev/null
+++ b/deps/v8/test/unittests/compiler/regalloc/OWNERS
@@ -0,0 +1,5 @@
+set noparent
+
+bmeurer@chromium.org
+jarin@chromium.org
+mtrofin@chromium.org \ No newline at end of file
diff --git a/deps/v8/test/unittests/compiler/live-range-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc
index e4fc2ca151..fc7b268b44 100644
--- a/deps/v8/test/unittests/compiler/live-range-unittest.cc
+++ b/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc
@@ -2,11 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-
#include "test/unittests/compiler/live-range-builder.h"
#include "test/unittests/test-utils.h"
-
// TODO(mtrofin): would we want to centralize this definition?
#ifdef DEBUG
#define V8_ASSERT_DEBUG_DEATH(statement, regex) \
@@ -29,7 +27,6 @@ class LiveRangeUnitTest : public TestWithZone {
return range->SplitAt(LifetimePosition::FromInt(pos), zone());
}
-
TopLevelLiveRange* Splinter(TopLevelLiveRange* top, int start, int end,
int new_id = 0) {
if (top->splinter() == nullptr) {
@@ -70,7 +67,6 @@ class LiveRangeUnitTest : public TestWithZone {
}
};
-
TEST_F(LiveRangeUnitTest, InvalidConstruction) {
// Build a range manually, because the builder guards against empty cases.
TopLevelLiveRange* range =
@@ -81,31 +77,26 @@ TEST_F(LiveRangeUnitTest, InvalidConstruction) {
".*");
}
-
TEST_F(LiveRangeUnitTest, SplitInvalidStart) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(0, 1);
V8_ASSERT_DEBUG_DEATH(Split(range, 0), ".*");
}
-
TEST_F(LiveRangeUnitTest, DISABLE_IN_RELEASE(InvalidSplitEnd)) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(0, 1);
ASSERT_DEATH_IF_SUPPORTED(Split(range, 1), ".*");
}
-
TEST_F(LiveRangeUnitTest, DISABLE_IN_RELEASE(SplitInvalidPreStart)) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(1, 2);
ASSERT_DEATH_IF_SUPPORTED(Split(range, 0), ".*");
}
-
TEST_F(LiveRangeUnitTest, DISABLE_IN_RELEASE(SplitInvalidPostEnd)) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(0, 1);
ASSERT_DEATH_IF_SUPPORTED(Split(range, 2), ".*");
}
-
TEST_F(LiveRangeUnitTest, SplitSingleIntervalNoUsePositions) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(0, 2);
LiveRange* child = Split(range, 1);
@@ -119,7 +110,6 @@ TEST_F(LiveRangeUnitTest, SplitSingleIntervalNoUsePositions) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsBetween) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).Build();
@@ -134,7 +124,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsBetween) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsFront) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).Build();
@@ -150,7 +139,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsFront) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsAfter) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).Build();
@@ -166,7 +154,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalNoUsePositionsAfter) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitSingleIntervalUsePositions) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).AddUse(0).AddUse(2).Build();
@@ -184,7 +171,6 @@ TEST_F(LiveRangeUnitTest, SplitSingleIntervalUsePositions) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitSingleIntervalUsePositionsAtPos) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).AddUse(0).AddUse(2).Build();
@@ -201,7 +187,6 @@ TEST_F(LiveRangeUnitTest, SplitSingleIntervalUsePositionsAtPos) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsBetween) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).AddUse(1).AddUse(5).Build();
@@ -218,7 +203,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsBetween) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsAtInterval) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).AddUse(1).AddUse(4).Build();
@@ -235,7 +219,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsAtInterval) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsFront) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).AddUse(1).AddUse(5).Build();
@@ -252,7 +235,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsFront) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsAfter) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 2).Add(4, 6).AddUse(1).AddUse(5).Build();
@@ -268,7 +250,6 @@ TEST_F(LiveRangeUnitTest, SplitManyIntervalUsePositionsAfter) {
EXPECT_TRUE(RangesMatch(expected_bottom, child));
}
-
TEST_F(LiveRangeUnitTest, SplinterSingleInterval) {
TopLevelLiveRange* range = TestRangeBuilder(zone()).Build(0, 6);
TopLevelLiveRange* splinter = Splinter(range, 3, 5);
@@ -283,7 +264,6 @@ TEST_F(LiveRangeUnitTest, SplinterSingleInterval) {
EXPECT_TRUE(RangesMatch(expected_splinter, splinter));
}
-
TEST_F(LiveRangeUnitTest, MergeSingleInterval) {
TopLevelLiveRange* original = TestRangeBuilder(zone()).Build(0, 6);
TopLevelLiveRange* splinter = Splinter(original, 3, 5);
@@ -296,7 +276,6 @@ TEST_F(LiveRangeUnitTest, MergeSingleInterval) {
EXPECT_TRUE(RangesMatch(result, original));
}
-
TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsOutside) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -313,7 +292,6 @@ TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsOutside) {
EXPECT_TRUE(RangesMatch(expected_splinter, splinter));
}
-
TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsOutside) {
TopLevelLiveRange* original =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -327,14 +305,12 @@ TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsOutside) {
EXPECT_TRUE(RangesMatch(result, original));
}
-
TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsInside) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
V8_ASSERT_DEBUG_DEATH(Splinter(range, 3, 5), ".*");
}
-
TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsLeft) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -350,7 +326,6 @@ TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsLeft) {
EXPECT_TRUE(RangesMatch(expected_splinter, splinter));
}
-
TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsLeft) {
TopLevelLiveRange* original =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -363,7 +338,6 @@ TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsLeft) {
EXPECT_TRUE(RangesMatch(result, original));
}
-
TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsRight) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -379,7 +353,6 @@ TEST_F(LiveRangeUnitTest, SplinterMultipleIntervalsRight) {
EXPECT_TRUE(RangesMatch(expected_splinter, splinter));
}
-
TEST_F(LiveRangeUnitTest, SplinterMergeMultipleTimes) {
TopLevelLiveRange* range =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 10).Add(12, 16).Build();
@@ -398,7 +371,6 @@ TEST_F(LiveRangeUnitTest, SplinterMergeMultipleTimes) {
EXPECT_TRUE(RangesMatch(expected_splinter, splinter));
}
-
TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsRight) {
TopLevelLiveRange* original =
TestRangeBuilder(zone()).Add(0, 3).Add(5, 8).Build();
@@ -413,7 +385,6 @@ TEST_F(LiveRangeUnitTest, MergeMultipleIntervalsRight) {
EXPECT_TRUE(RangesMatch(result, original));
}
-
TEST_F(LiveRangeUnitTest, MergeAfterSplitting) {
TopLevelLiveRange* original = TestRangeBuilder(zone()).Build(0, 8);
TopLevelLiveRange* splinter = Splinter(original, 4, 6);
@@ -430,7 +401,6 @@ TEST_F(LiveRangeUnitTest, MergeAfterSplitting) {
EXPECT_TRUE(RangesMatch(result, original));
}
-
TEST_F(LiveRangeUnitTest, IDGeneration) {
TopLevelLiveRange* vreg = TestRangeBuilder(zone()).Id(2).Build(0, 100);
EXPECT_EQ(2, vreg->vreg());
diff --git a/deps/v8/test/unittests/compiler/move-optimizer-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc
index 71571488e1..d61543a252 100644
--- a/deps/v8/test/unittests/compiler/move-optimizer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc
@@ -98,7 +98,6 @@ class MoveOptimizerTest : public InstructionSequenceTest {
}
};
-
TEST_F(MoveOptimizerTest, RemovesRedundant) {
StartBlock();
auto first_instr = EmitNop();
@@ -127,7 +126,6 @@ TEST_F(MoveOptimizerTest, RemovesRedundant) {
CHECK(Contains(move, FPReg(kF32_1, kFloat32), FPReg(kF32_2, kFloat32)));
}
-
TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
int index1 = GetAllocatableCode(0);
int index2 = GetAllocatableCode(1);
@@ -167,7 +165,6 @@ TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
CHECK(Contains(move, FPReg(f32_1, kFloat32), ExplicitFPReg(f32_2, kFloat32)));
}
-
TEST_F(MoveOptimizerTest, SplitsConstants) {
StartBlock();
EndBlock(Last());
@@ -191,7 +188,6 @@ TEST_F(MoveOptimizerTest, SplitsConstants) {
CHECK(Contains(move, Reg(0), Slot(2)));
}
-
TEST_F(MoveOptimizerTest, SimpleMerge) {
StartBlock();
EndBlock(Branch(Imm(), 1, 2));
@@ -227,7 +223,6 @@ TEST_F(MoveOptimizerTest, SimpleMerge) {
CHECK(Contains(move, FPReg(kF32_1, kFloat32), FPReg(kF32_2, kFloat32)));
}
-
TEST_F(MoveOptimizerTest, SimpleMergeCycle) {
StartBlock();
EndBlock(Branch(Imm(), 1, 2));
@@ -279,7 +274,6 @@ TEST_F(MoveOptimizerTest, SimpleMergeCycle) {
CHECK(Contains(move, FPReg(kF32_2, kFloat32), FPReg(kF32_1, kFloat32)));
}
-
TEST_F(MoveOptimizerTest, GapsCanMoveOverInstruction) {
StartBlock();
int const_index = 1;
@@ -317,7 +311,6 @@ TEST_F(MoveOptimizerTest, GapsCanMoveOverInstruction) {
CHECK_EQ(1, assignment);
}
-
TEST_F(MoveOptimizerTest, SubsetMovesMerge) {
StartBlock();
EndBlock(Branch(Imm(), 1, 2));
@@ -354,7 +347,6 @@ TEST_F(MoveOptimizerTest, SubsetMovesMerge) {
CHECK(Contains(b2_move, Reg(4), Reg(5)));
}
-
TEST_F(MoveOptimizerTest, GapConflictSubsetMovesDoNotMerge) {
StartBlock();
EndBlock(Branch(Imm(), 1, 2));
diff --git a/deps/v8/test/unittests/compiler/register-allocator-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/register-allocator-unittest.cc
index 0533ee7406..4ae2ee9acd 100644
--- a/deps/v8/test/unittests/compiler/register-allocator-unittest.cc
+++ b/deps/v8/test/unittests/compiler/regalloc/register-allocator-unittest.cc
@@ -9,7 +9,6 @@ namespace v8 {
namespace internal {
namespace compiler {
-
namespace {
// We can't just use the size of the moves collection, because of
@@ -23,7 +22,6 @@ int GetMoveCount(const ParallelMove& moves) {
return move_count;
}
-
bool AreOperandsOfSameType(
const AllocatedOperand& op,
const InstructionSequenceTest::TestOperand& test_op) {
@@ -36,7 +34,6 @@ bool AreOperandsOfSameType(
(op.IsStackSlot() && !test_op_is_reg);
}
-
bool AllocatedOperandMatches(
const AllocatedOperand& op,
const InstructionSequenceTest::TestOperand& test_op) {
@@ -46,7 +43,6 @@ bool AllocatedOperandMatches(
test_op.value_ == InstructionSequenceTest::kNoValue);
}
-
int GetParallelMoveCount(int instr_index, Instruction::GapPosition gap_pos,
const InstructionSequence* sequence) {
const ParallelMove* moves =
@@ -55,7 +51,6 @@ int GetParallelMoveCount(int instr_index, Instruction::GapPosition gap_pos,
return GetMoveCount(*moves);
}
-
bool IsParallelMovePresent(int instr_index, Instruction::GapPosition gap_pos,
const InstructionSequence* sequence,
const InstructionSequenceTest::TestOperand& src,
@@ -79,7 +74,6 @@ bool IsParallelMovePresent(int instr_index, Instruction::GapPosition gap_pos,
} // namespace
-
class RegisterAllocatorTest : public InstructionSequenceTest {
public:
void Allocate() {
@@ -88,7 +82,6 @@ class RegisterAllocatorTest : public InstructionSequenceTest {
}
};
-
TEST_F(RegisterAllocatorTest, CanAllocateThreeRegisters) {
// return p0 + p1;
StartBlock();
@@ -136,7 +129,6 @@ TEST_F(RegisterAllocatorTest, SimpleLoop) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SimpleBranch) {
// return i ? K1 : K2
StartBlock();
@@ -154,7 +146,6 @@ TEST_F(RegisterAllocatorTest, SimpleBranch) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SimpleDiamond) {
// return p0 ? p0 : p0
StartBlock();
@@ -174,7 +165,6 @@ TEST_F(RegisterAllocatorTest, SimpleDiamond) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SimpleDiamondPhi) {
// return i ? K1 : K2
StartBlock();
@@ -195,7 +185,6 @@ TEST_F(RegisterAllocatorTest, SimpleDiamondPhi) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, DiamondManyPhis) {
const int kPhis = kDefaultNRegs * 2;
@@ -227,7 +216,6 @@ TEST_F(RegisterAllocatorTest, DiamondManyPhis) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, DoubleDiamondManyRedundantPhis) {
const int kPhis = kDefaultNRegs * 2;
@@ -266,7 +254,6 @@ TEST_F(RegisterAllocatorTest, DoubleDiamondManyRedundantPhis) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, RegressionPhisNeedTooManyRegisters) {
const size_t kNumRegs = 3;
const size_t kParams = kNumRegs + 1;
@@ -315,17 +302,16 @@ TEST_F(RegisterAllocatorTest, RegressionPhisNeedTooManyRegisters) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SpillPhi) {
StartBlock();
EndBlock(Branch(Imm(), 1, 2));
StartBlock();
- auto left = Define(Reg(GetAllocatableCode(0)));
+ auto left = Define(Reg(0));
EndBlock(Jump(2));
StartBlock();
- auto right = Define(Reg(GetAllocatableCode(0)));
+ auto right = Define(Reg(0));
EndBlock();
StartBlock();
@@ -337,7 +323,6 @@ TEST_F(RegisterAllocatorTest, SpillPhi) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, MoveLotsOfConstants) {
StartBlock();
VReg constants[kDefaultNRegs];
@@ -357,7 +342,6 @@ TEST_F(RegisterAllocatorTest, MoveLotsOfConstants) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SplitBeforeInstruction) {
const int kNumRegs = 6;
SetNumRegs(kNumRegs, kNumRegs);
@@ -383,7 +367,6 @@ TEST_F(RegisterAllocatorTest, SplitBeforeInstruction) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SplitBeforeInstruction2) {
const int kNumRegs = 6;
SetNumRegs(kNumRegs, kNumRegs);
@@ -408,7 +391,6 @@ TEST_F(RegisterAllocatorTest, SplitBeforeInstruction2) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, NestedDiamondPhiMerge) {
// Outer diamond.
StartBlock();
@@ -455,7 +437,6 @@ TEST_F(RegisterAllocatorTest, NestedDiamondPhiMerge) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, NestedDiamondPhiMergeDifferent) {
// Outer diamond.
StartBlock();
@@ -502,7 +483,6 @@ TEST_F(RegisterAllocatorTest, NestedDiamondPhiMergeDifferent) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, RegressionSplitBeforeAndMove) {
StartBlock();
@@ -529,7 +509,6 @@ TEST_F(RegisterAllocatorTest, RegressionSplitBeforeAndMove) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, RegressionSpillTwice) {
StartBlock();
auto p_0 = Parameter(Reg(1));
@@ -539,7 +518,6 @@ TEST_F(RegisterAllocatorTest, RegressionSpillTwice) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, RegressionLoadConstantBeforeSpill) {
StartBlock();
// Fill registers.
@@ -574,7 +552,6 @@ TEST_F(RegisterAllocatorTest, RegressionLoadConstantBeforeSpill) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, DiamondWithCallFirstBlock) {
StartBlock();
auto x = EmitOI(Reg(0));
@@ -595,7 +572,6 @@ TEST_F(RegisterAllocatorTest, DiamondWithCallFirstBlock) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, DiamondWithCallSecondBlock) {
StartBlock();
auto x = EmitOI(Reg(0));
@@ -616,7 +592,6 @@ TEST_F(RegisterAllocatorTest, DiamondWithCallSecondBlock) {
Allocate();
}
-
TEST_F(RegisterAllocatorTest, SingleDeferredBlockSpill) {
StartBlock(); // B0
auto var = EmitOI(Reg(0));
@@ -655,7 +630,6 @@ TEST_F(RegisterAllocatorTest, SingleDeferredBlockSpill) {
sequence(), Reg(0), Slot(0)));
}
-
TEST_F(RegisterAllocatorTest, MultipleDeferredBlockSpills) {
if (!FLAG_turbo_preprocess_ranges) return;
@@ -706,12 +680,10 @@ TEST_F(RegisterAllocatorTest, MultipleDeferredBlockSpills) {
EXPECT_TRUE(IsParallelMovePresent(end_of_b2, Instruction::START, sequence(),
Slot(var3_slot), Reg()));
-
EXPECT_EQ(0,
GetParallelMoveCount(start_of_b3, Instruction::START, sequence()));
}
-
namespace {
enum class ParameterType { kFixedSlot, kSlot, kRegister, kFixedRegister };
@@ -738,7 +710,6 @@ class SlotConstraintTest : public RegisterAllocatorTest,
} // namespace
-
#if GTEST_HAS_COMBINE
TEST_P(SlotConstraintTest, SlotConstraint) {
@@ -785,7 +756,6 @@ TEST_P(SlotConstraintTest, SlotConstraint) {
Allocate();
}
-
INSTANTIATE_TEST_CASE_P(
RegisterAllocatorTest, SlotConstraintTest,
::testing::Combine(::testing::ValuesIn(kParameterTypes),
diff --git a/deps/v8/test/unittests/compiler/simplified-operator-reducer-unittest.cc b/deps/v8/test/unittests/compiler/simplified-operator-reducer-unittest.cc
index 6f37609f3a..f5ae91d7c1 100644
--- a/deps/v8/test/unittests/compiler/simplified-operator-reducer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/simplified-operator-reducer-unittest.cc
@@ -336,6 +336,22 @@ TEST_F(SimplifiedOperatorReducerTest, TruncateTaggedToWord32WithConstant) {
}
// -----------------------------------------------------------------------------
+// CheckedFloat64ToInt32
+
+TEST_F(SimplifiedOperatorReducerTest, CheckedFloat64ToInt32WithConstant) {
+ Node* effect = graph()->start();
+ Node* control = graph()->start();
+ TRACED_FOREACH(int32_t, n, kInt32Values) {
+ Reduction r = Reduce(
+ graph()->NewNode(simplified()->CheckedFloat64ToInt32(
+ CheckForMinusZeroMode::kDontCheckForMinusZero),
+ Float64Constant(n), effect, control));
+ ASSERT_TRUE(r.Changed());
+ EXPECT_THAT(r.replacement(), IsInt32Constant(n));
+ }
+}
+
+// -----------------------------------------------------------------------------
// CheckHeapObject
TEST_F(SimplifiedOperatorReducerTest, CheckHeapObjectWithChangeBitToTagged) {
diff --git a/deps/v8/test/unittests/compiler/state-values-utils-unittest.cc b/deps/v8/test/unittests/compiler/state-values-utils-unittest.cc
index 311b90a8d1..388dd56247 100644
--- a/deps/v8/test/unittests/compiler/state-values-utils-unittest.cc
+++ b/deps/v8/test/unittests/compiler/state-values-utils-unittest.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "src/compiler/state-values-utils.h"
+#include "src/bit-vector.h"
#include "test/unittests/compiler/graph-unittest.h"
#include "test/unittests/compiler/node-test-utils.h"
#include "test/unittests/test-utils.h"
@@ -18,8 +19,9 @@ class StateValuesIteratorTest : public GraphTest {
Node* StateValuesFromVector(NodeVector* nodes) {
int count = static_cast<int>(nodes->size());
- return graph()->NewNode(common()->StateValues(count), count,
- count == 0 ? nullptr : &(nodes->front()));
+ return graph()->NewNode(
+ common()->StateValues(count, SparseInputMask::Dense()), count,
+ count == 0 ? nullptr : &(nodes->front()));
}
};
@@ -107,7 +109,8 @@ TEST_F(StateValuesIteratorTest, TreeFromVector) {
// Build the tree.
StateValuesCache builder(&jsgraph);
Node* values_node = builder.GetNodeForValues(
- inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size());
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ nullptr);
// Check the tree contents with vector.
int i = 0;
@@ -119,6 +122,46 @@ TEST_F(StateValuesIteratorTest, TreeFromVector) {
}
}
+TEST_F(StateValuesIteratorTest, TreeFromVectorWithLiveness) {
+ int sizes[] = {0, 1, 2, 100, 5000, 30000};
+ TRACED_FOREACH(int, count, sizes) {
+ JSOperatorBuilder javascript(zone());
+ MachineOperatorBuilder machine(zone());
+ JSGraph jsgraph(isolate(), graph(), common(), &javascript, nullptr,
+ &machine);
+
+ // Generate the input vector.
+ NodeVector inputs(zone());
+ for (int i = 0; i < count; i++) {
+ inputs.push_back(Int32Constant(i));
+ }
+ // Generate the input liveness.
+ BitVector liveness(count, zone());
+ for (int i = 0; i < count; i++) {
+ if (i % 3 == 0) {
+ liveness.Add(i);
+ }
+ }
+
+ // Build the tree.
+ StateValuesCache builder(&jsgraph);
+ Node* values_node = builder.GetNodeForValues(
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ &liveness);
+
+ // Check the tree contents with vector.
+ int i = 0;
+ for (StateValuesAccess::TypedNode node : StateValuesAccess(values_node)) {
+ if (liveness.Contains(i)) {
+ EXPECT_THAT(node.node, IsInt32Constant(i));
+ } else {
+ EXPECT_EQ(node.node, nullptr);
+ }
+ i++;
+ }
+ EXPECT_EQ(inputs.size(), static_cast<size_t>(i));
+ }
+}
TEST_F(StateValuesIteratorTest, BuildTreeIdentical) {
int sizes[] = {0, 1, 2, 100, 5000, 30000};
@@ -137,9 +180,46 @@ TEST_F(StateValuesIteratorTest, BuildTreeIdentical) {
// Build two trees from the same data.
StateValuesCache builder(&jsgraph);
Node* node1 = builder.GetNodeForValues(
- inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size());
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ nullptr);
+ Node* node2 = builder.GetNodeForValues(
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ nullptr);
+
+ // The trees should be equal since the data was the same.
+ EXPECT_EQ(node1, node2);
+ }
+}
+
+TEST_F(StateValuesIteratorTest, BuildTreeWithLivenessIdentical) {
+ int sizes[] = {0, 1, 2, 100, 5000, 30000};
+ TRACED_FOREACH(int, count, sizes) {
+ JSOperatorBuilder javascript(zone());
+ MachineOperatorBuilder machine(zone());
+ JSGraph jsgraph(isolate(), graph(), common(), &javascript, nullptr,
+ &machine);
+
+ // Generate the input vector.
+ NodeVector inputs(zone());
+ for (int i = 0; i < count; i++) {
+ inputs.push_back(Int32Constant(i));
+ }
+ // Generate the input liveness.
+ BitVector liveness(count, zone());
+ for (int i = 0; i < count; i++) {
+ if (i % 3 == 0) {
+ liveness.Add(i);
+ }
+ }
+
+ // Build two trees from the same data.
+ StateValuesCache builder(&jsgraph);
+ Node* node1 = builder.GetNodeForValues(
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ &liveness);
Node* node2 = builder.GetNodeForValues(
- inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size());
+ inputs.size() == 0 ? nullptr : &(inputs.front()), inputs.size(),
+ &liveness);
// The trees should be equal since the data was the same.
EXPECT_EQ(node1, node2);
diff --git a/deps/v8/test/unittests/compiler/typer-unittest.cc b/deps/v8/test/unittests/compiler/typer-unittest.cc
index 3ef436e5c8..c9b5c9db34 100644
--- a/deps/v8/test/unittests/compiler/typer-unittest.cc
+++ b/deps/v8/test/unittests/compiler/typer-unittest.cc
@@ -223,8 +223,8 @@ class TyperTest : public TypedGraphTest {
namespace {
-int32_t shift_left(int32_t x, int32_t y) { return x << y; }
-int32_t shift_right(int32_t x, int32_t y) { return x >> y; }
+int32_t shift_left(int32_t x, int32_t y) { return x << (y & 0x1f); }
+int32_t shift_right(int32_t x, int32_t y) { return x >> (y & 0x1f); }
int32_t bit_or(int32_t x, int32_t y) { return x | y; }
int32_t bit_and(int32_t x, int32_t y) { return x & y; }
int32_t bit_xor(int32_t x, int32_t y) { return x ^ y; }
diff --git a/deps/v8/test/unittests/counters-unittest.cc b/deps/v8/test/unittests/counters-unittest.cc
index 822a5c552e..79678d5a08 100644
--- a/deps/v8/test/unittests/counters-unittest.cc
+++ b/deps/v8/test/unittests/counters-unittest.cc
@@ -4,9 +4,11 @@
#include <vector>
+#include "src/counters-inl.h"
#include "src/counters.h"
#include "src/handles-inl.h"
#include "src/objects-inl.h"
+#include "src/tracing/tracing-category-observer.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
@@ -42,6 +44,42 @@ class AggregatedMemoryHistogramTest : public ::testing::Test {
MockHistogram mock_;
};
+class RuntimeCallStatsTest : public ::testing::Test {
+ public:
+ RuntimeCallStatsTest() {
+ FLAG_runtime_stats =
+ v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE;
+ }
+ virtual ~RuntimeCallStatsTest() {}
+
+ RuntimeCallStats* stats() { return &stats_; }
+ RuntimeCallStats::CounterId counter_id() {
+ return &RuntimeCallStats::TestCounter1;
+ }
+ RuntimeCallStats::CounterId counter_id2() {
+ return &RuntimeCallStats::TestCounter2;
+ }
+ RuntimeCallStats::CounterId counter_id3() {
+ return &RuntimeCallStats::TestCounter3;
+ }
+ RuntimeCallCounter* counter() { return &(stats()->*counter_id()); }
+ RuntimeCallCounter* counter2() { return &(stats()->*counter_id2()); }
+ RuntimeCallCounter* counter3() { return &(stats()->*counter_id3()); }
+ void Sleep(int32_t milliseconds) {
+ base::ElapsedTimer timer;
+ base::TimeDelta delta = base::TimeDelta::FromMilliseconds(milliseconds);
+ timer.Start();
+ while (!timer.HasExpired(delta)) {
+ base::OS::Sleep(base::TimeDelta::FromMicroseconds(0));
+ }
+ }
+
+ const uint32_t kEpsilonMs = 20;
+
+ private:
+ RuntimeCallStats stats_;
+};
+
} // namespace
@@ -195,6 +233,278 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples2) {
}
}
+#define EXPECT_IN_RANGE(start, value, end) \
+ EXPECT_LE(start, value); \
+ EXPECT_GE(end, value)
+
+TEST_F(RuntimeCallStatsTest, RuntimeCallTimer) {
+ RuntimeCallTimer timer;
+
+ Sleep(50);
+ RuntimeCallStats::Enter(stats(), &timer, counter_id());
+ EXPECT_EQ(counter(), timer.counter());
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_TRUE(timer.IsStarted());
+ EXPECT_EQ(&timer, stats()->current_timer());
+
+ Sleep(100);
+
+ RuntimeCallStats::Leave(stats(), &timer);
+ Sleep(50);
+ EXPECT_FALSE(timer.IsStarted());
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(), 100 + kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, RuntimeCallTimerSubTimer) {
+ RuntimeCallTimer timer;
+ RuntimeCallTimer timer2;
+
+ RuntimeCallStats::Enter(stats(), &timer, counter_id());
+ EXPECT_TRUE(timer.IsStarted());
+ EXPECT_FALSE(timer2.IsStarted());
+ EXPECT_EQ(counter(), timer.counter());
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_EQ(&timer, stats()->current_timer());
+
+ Sleep(50);
+
+ RuntimeCallStats::Enter(stats(), &timer2, counter_id2());
+ // timer 1 is paused, while timer 2 is active.
+ EXPECT_TRUE(timer2.IsStarted());
+ EXPECT_EQ(counter(), timer.counter());
+ EXPECT_EQ(counter2(), timer2.counter());
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_EQ(&timer, timer2.parent());
+ EXPECT_EQ(&timer2, stats()->current_timer());
+
+ Sleep(100);
+ RuntimeCallStats::Leave(stats(), &timer2);
+
+ // The subtimer subtracts its time from the parent timer.
+ EXPECT_TRUE(timer.IsStarted());
+ EXPECT_FALSE(timer2.IsStarted());
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(1, counter2()->count());
+ EXPECT_EQ(0, counter()->time().InMilliseconds());
+ EXPECT_IN_RANGE(100, counter2()->time().InMilliseconds(), 100 + kEpsilonMs);
+ EXPECT_EQ(&timer, stats()->current_timer());
+
+ Sleep(100);
+
+ RuntimeCallStats::Leave(stats(), &timer);
+ EXPECT_FALSE(timer.IsStarted());
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(1, counter2()->count());
+ EXPECT_IN_RANGE(150, counter()->time().InMilliseconds(), 150 + kEpsilonMs);
+ EXPECT_IN_RANGE(100, counter2()->time().InMilliseconds(), 100 + kEpsilonMs);
+ EXPECT_EQ(nullptr, stats()->current_timer());
+}
+
+TEST_F(RuntimeCallStatsTest, RuntimeCallTimerRecursive) {
+ RuntimeCallTimer timer;
+ RuntimeCallTimer timer2;
+
+ RuntimeCallStats::Enter(stats(), &timer, counter_id());
+ EXPECT_EQ(counter(), timer.counter());
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_TRUE(timer.IsStarted());
+ EXPECT_EQ(&timer, stats()->current_timer());
+
+ RuntimeCallStats::Enter(stats(), &timer2, counter_id());
+ EXPECT_EQ(counter(), timer2.counter());
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_EQ(&timer, timer2.parent());
+ EXPECT_TRUE(timer2.IsStarted());
+ EXPECT_EQ(&timer2, stats()->current_timer());
+
+ Sleep(50);
+
+ RuntimeCallStats::Leave(stats(), &timer2);
+ EXPECT_EQ(nullptr, timer.parent());
+ EXPECT_FALSE(timer2.IsStarted());
+ EXPECT_TRUE(timer.IsStarted());
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_IN_RANGE(50, counter()->time().InMilliseconds(), 50 + kEpsilonMs);
+
+ Sleep(100);
+
+ RuntimeCallStats::Leave(stats(), &timer);
+ EXPECT_FALSE(timer.IsStarted());
+ EXPECT_EQ(2, counter()->count());
+ EXPECT_IN_RANGE(150, counter()->time().InMilliseconds(),
+ 150 + 2 * kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScope) {
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ }
+ Sleep(100);
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_IN_RANGE(50, counter()->time().InMilliseconds(), 50 + kEpsilonMs);
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ }
+ EXPECT_EQ(2, counter()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(),
+ 100 + 2 * kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScopeRecursive) {
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter()->time().InMilliseconds());
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ }
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_IN_RANGE(50, counter()->time().InMilliseconds(), 50 + kEpsilonMs);
+ }
+ EXPECT_EQ(2, counter()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(),
+ 100 + 2 * kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, RenameTimer) {
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_EQ(0, counter()->time().InMilliseconds());
+ EXPECT_EQ(0, counter2()->time().InMilliseconds());
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(100);
+ }
+ CHANGE_CURRENT_RUNTIME_COUNTER(stats(), TestCounter2);
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(), 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(0, counter2()->time().InMilliseconds(), 0);
+ }
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(1, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(), 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(50, counter2()->time().InMilliseconds(), 50 + kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, BasicPrintAndSnapshot) {
+ std::ostringstream out;
+ stats()->Print(out);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_EQ(0, counter3()->count());
+ EXPECT_EQ(0, counter()->time().InMilliseconds());
+ EXPECT_EQ(0, counter2()->time().InMilliseconds());
+ EXPECT_EQ(0, counter3()->time().InMilliseconds());
+
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(50);
+ stats()->Print(out);
+ }
+ stats()->Print(out);
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_EQ(0, counter3()->count());
+ EXPECT_IN_RANGE(50, counter()->time().InMilliseconds(), 50 + kEpsilonMs);
+ EXPECT_EQ(0, counter2()->time().InMilliseconds());
+ EXPECT_EQ(0, counter3()->time().InMilliseconds());
+}
+
+TEST_F(RuntimeCallStatsTest, PrintAndSnapshot) {
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(100);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter()->time().InMilliseconds());
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id2());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_EQ(0, counter2()->time().InMilliseconds());
+ Sleep(50);
+
+ // This calls Snapshot on the current active timer and sychronizes and
+ // commits the whole timer stack.
+ std::ostringstream out;
+ stats()->Print(out);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(),
+ 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(50, counter2()->time().InMilliseconds(), 50 + kEpsilonMs);
+ // Calling Print several times shouldn't have a (big) impact on the
+ // measured times.
+ stats()->Print(out);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(),
+ 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(50, counter2()->time().InMilliseconds(), 50 + kEpsilonMs);
+
+ Sleep(50);
+ stats()->Print(out);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(0, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(),
+ 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(100, counter2()->time().InMilliseconds(),
+ 100 + kEpsilonMs);
+ Sleep(50);
+ }
+ Sleep(50);
+ EXPECT_EQ(0, counter()->count());
+ EXPECT_EQ(1, counter2()->count());
+ EXPECT_IN_RANGE(100, counter()->time().InMilliseconds(), 100 + kEpsilonMs);
+ EXPECT_IN_RANGE(150, counter2()->time().InMilliseconds(), 150 + kEpsilonMs);
+ Sleep(50);
+ }
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(1, counter2()->count());
+ EXPECT_IN_RANGE(200, counter()->time().InMilliseconds(), 200 + kEpsilonMs);
+ EXPECT_IN_RANGE(150, counter2()->time().InMilliseconds(),
+ 150 + 2 * kEpsilonMs);
+}
+
+TEST_F(RuntimeCallStatsTest, NestedScopes) {
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id());
+ Sleep(100);
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id2());
+ Sleep(100);
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id3());
+ Sleep(50);
+ }
+ Sleep(50);
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id3());
+ Sleep(50);
+ }
+ Sleep(50);
+ }
+ Sleep(100);
+ {
+ RuntimeCallTimerScope scope(stats(), counter_id2());
+ Sleep(100);
+ }
+ Sleep(50);
+ }
+ EXPECT_EQ(1, counter()->count());
+ EXPECT_EQ(2, counter2()->count());
+ EXPECT_EQ(2, counter3()->count());
+ EXPECT_IN_RANGE(250, counter()->time().InMilliseconds(), 250 + kEpsilonMs);
+ EXPECT_IN_RANGE(300, counter2()->time().InMilliseconds(), 300 + kEpsilonMs);
+ EXPECT_IN_RANGE(100, counter3()->time().InMilliseconds(), 100 + kEpsilonMs);
+}
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc b/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc
new file mode 100644
index 0000000000..e6a6f2e250
--- /dev/null
+++ b/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc
@@ -0,0 +1,163 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/heap/embedder-tracing.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+namespace internal {
+namespace heap {
+
+using testing::StrictMock;
+using testing::_;
+using testing::Return;
+using v8::EmbedderHeapTracer;
+using v8::internal::LocalEmbedderHeapTracer;
+
+namespace {
+
+LocalEmbedderHeapTracer::WrapperInfo CreateWrapperInfo() {
+ return LocalEmbedderHeapTracer::WrapperInfo(nullptr, nullptr);
+}
+
+} // namespace
+
+class MockEmbedderHeapTracer : public EmbedderHeapTracer {
+ public:
+ MOCK_METHOD0(TracePrologue, void());
+ MOCK_METHOD0(TraceEpilogue, void());
+ MOCK_METHOD0(AbortTracing, void());
+ MOCK_METHOD0(EnterFinalPause, void());
+ MOCK_METHOD0(NumberOfWrappersToTrace, size_t());
+ MOCK_METHOD1(RegisterV8References,
+ void(const std::vector<std::pair<void*, void*> >&));
+ MOCK_METHOD2(AdvanceTracing,
+ bool(double deadline_in_ms, AdvanceTracingActions actions));
+};
+
+TEST(LocalEmbedderHeapTracer, InUse) {
+ LocalEmbedderHeapTracer local_tracer;
+ MockEmbedderHeapTracer mock_remote_tracer;
+ local_tracer.SetRemoteTracer(&mock_remote_tracer);
+ EXPECT_TRUE(local_tracer.InUse());
+}
+
+TEST(LocalEmbedderHeapTracer, NoRemoteTracer) {
+ LocalEmbedderHeapTracer local_tracer;
+ // We should be able to call all functions without a remote tracer being
+ // attached.
+ EXPECT_FALSE(local_tracer.InUse());
+ local_tracer.TracePrologue();
+ local_tracer.EnterFinalPause();
+ bool more_work = local_tracer.Trace(
+ 0, EmbedderHeapTracer::AdvanceTracingActions(
+ EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION));
+ EXPECT_FALSE(more_work);
+ local_tracer.TraceEpilogue();
+}
+
+TEST(LocalEmbedderHeapTracer, TracePrologueForwards) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ EXPECT_CALL(remote_tracer, TracePrologue());
+ local_tracer.TracePrologue();
+}
+
+TEST(LocalEmbedderHeapTracer, TraceEpilogueForwards) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ EXPECT_CALL(remote_tracer, TraceEpilogue());
+ local_tracer.TraceEpilogue();
+}
+
+TEST(LocalEmbedderHeapTracer, AbortTracingForwards) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ EXPECT_CALL(remote_tracer, AbortTracing());
+ local_tracer.AbortTracing();
+}
+
+TEST(LocalEmbedderHeapTracer, AbortTracingClearsCachedWrappers) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ local_tracer.AddWrapperToTrace(CreateWrapperInfo());
+ EXPECT_CALL(remote_tracer, AbortTracing());
+ local_tracer.AbortTracing();
+ EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
+}
+
+TEST(LocalEmbedderHeapTracer, EnterFinalPauseForwards) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ EXPECT_CALL(remote_tracer, EnterFinalPause());
+ local_tracer.EnterFinalPause();
+}
+
+TEST(LocalEmbedderHeapTracer, NumberOfWrappersToTraceIncludesRemote) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ EXPECT_CALL(remote_tracer, NumberOfWrappersToTrace());
+ local_tracer.NumberOfWrappersToTrace();
+}
+
+TEST(LocalEmbedderHeapTracer, NumberOfCachedWrappersToTraceExcludesRemote) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ local_tracer.NumberOfCachedWrappersToTrace();
+}
+
+TEST(LocalEmbedderHeapTracer, RegisterWrappersWithRemoteTracer) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ local_tracer.AddWrapperToTrace(CreateWrapperInfo());
+ EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
+ EXPECT_CALL(remote_tracer, RegisterV8References(_));
+ local_tracer.RegisterWrappersWithRemoteTracer();
+ EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
+ EXPECT_CALL(remote_tracer, NumberOfWrappersToTrace()).WillOnce(Return(1));
+ EXPECT_EQ(1u, local_tracer.NumberOfWrappersToTrace());
+}
+
+TEST(LocalEmbedderHeapTracer, TraceFinishes) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ local_tracer.AddWrapperToTrace(CreateWrapperInfo());
+ EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
+ EXPECT_CALL(remote_tracer, RegisterV8References(_));
+ local_tracer.RegisterWrappersWithRemoteTracer();
+ EXPECT_CALL(remote_tracer, AdvanceTracing(0, _)).WillOnce(Return(false));
+ EXPECT_FALSE(local_tracer.Trace(
+ 0, EmbedderHeapTracer::AdvanceTracingActions(
+ EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)));
+ EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
+}
+
+TEST(LocalEmbedderHeapTracer, TraceDoesNotFinish) {
+ LocalEmbedderHeapTracer local_tracer;
+ StrictMock<MockEmbedderHeapTracer> remote_tracer;
+ local_tracer.SetRemoteTracer(&remote_tracer);
+ local_tracer.AddWrapperToTrace(CreateWrapperInfo());
+ EXPECT_EQ(1u, local_tracer.NumberOfCachedWrappersToTrace());
+ EXPECT_CALL(remote_tracer, RegisterV8References(_));
+ local_tracer.RegisterWrappersWithRemoteTracer();
+ EXPECT_CALL(remote_tracer, AdvanceTracing(0, _)).WillOnce(Return(true));
+ EXPECT_TRUE(local_tracer.Trace(
+ 0, EmbedderHeapTracer::AdvanceTracingActions(
+ EmbedderHeapTracer::ForceCompletionAction::FORCE_COMPLETION)));
+ EXPECT_EQ(0u, local_tracer.NumberOfCachedWrappersToTrace());
+}
+
+} // namespace heap
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/heap/gc-idle-time-handler-unittest.cc b/deps/v8/test/unittests/heap/gc-idle-time-handler-unittest.cc
index 99351b5a5e..2cd4ad939d 100644
--- a/deps/v8/test/unittests/heap/gc-idle-time-handler-unittest.cc
+++ b/deps/v8/test/unittests/heap/gc-idle-time-handler-unittest.cc
@@ -89,6 +89,7 @@ TEST_F(GCIdleTimeHandlerTest, DontDoFinalIncrementalMarkCompact) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.incremental_marking_stopped = true;
@@ -99,6 +100,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeLowRate) {
TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
@@ -111,6 +113,7 @@ TEST_F(GCIdleTimeHandlerTest, ContextDisposeHighRate) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate = 1.0;
@@ -122,6 +125,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeZeroIdleTime) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
@@ -134,6 +138,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime1) {
TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.contexts_disposed = 1;
heap_state.contexts_disposal_rate =
@@ -146,6 +151,7 @@ TEST_F(GCIdleTimeHandlerTest, AfterContextDisposeSmallIdleTime2) {
TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
double idle_time_ms = 10;
GCIdleTimeAction action = handler()->Compute(idle_time_ms, heap_state);
@@ -154,6 +160,7 @@ TEST_F(GCIdleTimeHandlerTest, IncrementalMarking1) {
TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
size_t speed = kMarkCompactSpeed;
@@ -164,6 +171,7 @@ TEST_F(GCIdleTimeHandlerTest, NotEnoughTime) {
TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
@@ -173,6 +181,7 @@ TEST_F(GCIdleTimeHandlerTest, DoNotStartIncrementalMarking) {
TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
double idle_time_ms = 10.0;
@@ -185,6 +194,7 @@ TEST_F(GCIdleTimeHandlerTest, ContinueAfterStop) {
TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
for (int i = 0; i < kMaxNotifications; i++) {
GCIdleTimeAction action = handler()->Compute(0, heap_state);
@@ -194,6 +204,7 @@ TEST_F(GCIdleTimeHandlerTest, ZeroIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
+ if (!handler()->Enabled()) return;
GCIdleTimeHeapState heap_state = DefaultHeapState();
heap_state.incremental_marking_stopped = true;
for (int i = 0; i < kMaxNotifications; i++) {
@@ -204,6 +215,8 @@ TEST_F(GCIdleTimeHandlerTest, SmallIdleTimeNothingToDo) {
TEST_F(GCIdleTimeHandlerTest, DoneIfNotMakingProgressOnIncrementalMarking) {
+ if (!handler()->Enabled()) return;
+
// Regression test for crbug.com/489323.
GCIdleTimeHeapState heap_state = DefaultHeapState();
diff --git a/deps/v8/test/unittests/heap/heap-unittest.cc b/deps/v8/test/unittests/heap/heap-unittest.cc
index 9492faf9f3..b738b44e83 100644
--- a/deps/v8/test/unittests/heap/heap-unittest.cc
+++ b/deps/v8/test/unittests/heap/heap-unittest.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <cmath>
+#include <iostream>
#include <limits>
#include "src/objects.h"
diff --git a/deps/v8/test/unittests/heap/memory-reducer-unittest.cc b/deps/v8/test/unittests/heap/memory-reducer-unittest.cc
index 9aa05e2ebd..27585dc78d 100644
--- a/deps/v8/test/unittests/heap/memory-reducer-unittest.cc
+++ b/deps/v8/test/unittests/heap/memory-reducer-unittest.cc
@@ -12,36 +12,44 @@ namespace v8 {
namespace internal {
MemoryReducer::State DoneState() {
- return MemoryReducer::State(MemoryReducer::kDone, 0, 0.0, 1.0);
+ return MemoryReducer::State(MemoryReducer::kDone, 0, 0.0, 1.0, 0);
}
+MemoryReducer::State DoneState(size_t committed_memory) {
+ return MemoryReducer::State(MemoryReducer::kDone, 0, 0.0, 1.0,
+ committed_memory);
+}
MemoryReducer::State WaitState(int started_gcs, double next_gc_start_ms) {
return MemoryReducer::State(MemoryReducer::kWait, started_gcs,
- next_gc_start_ms, 1.0);
+ next_gc_start_ms, 1.0, 0);
}
MemoryReducer::State RunState(int started_gcs, double next_gc_start_ms) {
return MemoryReducer::State(MemoryReducer::kRun, started_gcs,
- next_gc_start_ms, 1.0);
+ next_gc_start_ms, 1.0, 0);
}
MemoryReducer::Event MarkCompactEvent(double time_ms,
- bool next_gc_likely_to_collect_more) {
+ bool next_gc_likely_to_collect_more,
+ size_t committed_memory) {
MemoryReducer::Event event;
event.type = MemoryReducer::kMarkCompact;
event.time_ms = time_ms;
event.next_gc_likely_to_collect_more = next_gc_likely_to_collect_more;
+ event.committed_memory = committed_memory;
return event;
}
-MemoryReducer::Event MarkCompactEventGarbageLeft(double time_ms) {
- return MarkCompactEvent(time_ms, true);
+MemoryReducer::Event MarkCompactEventGarbageLeft(double time_ms,
+ size_t committed_memory) {
+ return MarkCompactEvent(time_ms, true, committed_memory);
}
-MemoryReducer::Event MarkCompactEventNoGarbageLeft(double time_ms) {
- return MarkCompactEvent(time_ms, false);
+MemoryReducer::Event MarkCompactEventNoGarbageLeft(double time_ms,
+ size_t committed_memory) {
+ return MarkCompactEvent(time_ms, false, committed_memory);
}
@@ -90,6 +98,19 @@ TEST(MemoryReducer, FromDoneToDone) {
state1 = MemoryReducer::Step(state0, TimerEventPendingGC(0));
EXPECT_EQ(MemoryReducer::kDone, state1.action);
+
+ state1 = MemoryReducer::Step(
+ state0,
+ MarkCompactEventGarbageLeft(0, MemoryReducer::kCommittedMemoryDelta - 1));
+ EXPECT_EQ(MemoryReducer::kDone, state1.action);
+
+ state0 = DoneState(1000 * MB);
+ state1 = MemoryReducer::Step(
+ state0, MarkCompactEventGarbageLeft(
+ 0, static_cast<size_t>(
+ 1000 * MB * MemoryReducer::kCommittedMemoryFactor) -
+ 1));
+ EXPECT_EQ(MemoryReducer::kDone, state1.action);
}
@@ -98,13 +119,17 @@ TEST(MemoryReducer, FromDoneToWait) {
MemoryReducer::State state0(DoneState()), state1(DoneState());
- state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2));
+ state1 = MemoryReducer::Step(
+ state0,
+ MarkCompactEventGarbageLeft(2, MemoryReducer::kCommittedMemoryDelta));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(MemoryReducer::kLongDelayMs + 2, state1.next_gc_start_ms);
EXPECT_EQ(0, state1.started_gcs);
EXPECT_EQ(2, state1.last_gc_time_ms);
- state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2));
+ state1 = MemoryReducer::Step(
+ state0,
+ MarkCompactEventNoGarbageLeft(2, MemoryReducer::kCommittedMemoryDelta));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(MemoryReducer::kLongDelayMs + 2, state1.next_gc_start_ms);
EXPECT_EQ(0, state1.started_gcs);
@@ -115,6 +140,16 @@ TEST(MemoryReducer, FromDoneToWait) {
EXPECT_EQ(MemoryReducer::kLongDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(0, state1.started_gcs);
EXPECT_EQ(state0.last_gc_time_ms, state1.last_gc_time_ms);
+
+ state0 = DoneState(1000 * MB);
+ state1 = MemoryReducer::Step(
+ state0, MarkCompactEventGarbageLeft(
+ 2, static_cast<size_t>(
+ 1000 * MB * MemoryReducer::kCommittedMemoryFactor)));
+ EXPECT_EQ(MemoryReducer::kWait, state1.action);
+ EXPECT_EQ(MemoryReducer::kLongDelayMs + 2, state1.next_gc_start_ms);
+ EXPECT_EQ(0, state1.started_gcs);
+ EXPECT_EQ(2, state1.last_gc_time_ms);
}
@@ -144,13 +179,13 @@ TEST(MemoryReducer, FromWaitToWait) {
EXPECT_EQ(2000 + MemoryReducer::kLongDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(state0.started_gcs, state1.started_gcs);
- state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(2000 + MemoryReducer::kLongDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(state0.started_gcs, state1.started_gcs);
EXPECT_EQ(2000, state1.last_gc_time_ms);
- state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(2000 + MemoryReducer::kLongDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(state0.started_gcs, state1.started_gcs);
@@ -259,7 +294,7 @@ TEST(MemoryReducer, FromRunToDone) {
MemoryReducer::State state0(RunState(2, 0.0)), state1(DoneState());
- state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kDone, state1.action);
EXPECT_EQ(0, state1.next_gc_start_ms);
EXPECT_EQ(MemoryReducer::kMaxNumberOfGCs, state1.started_gcs);
@@ -267,7 +302,7 @@ TEST(MemoryReducer, FromRunToDone) {
state0.started_gcs = MemoryReducer::kMaxNumberOfGCs;
- state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kDone, state1.action);
EXPECT_EQ(0, state1.next_gc_start_ms);
EXPECT_EQ(2000, state1.last_gc_time_ms);
@@ -279,7 +314,7 @@ TEST(MemoryReducer, FromRunToWait) {
MemoryReducer::State state0(RunState(2, 0.0)), state1(DoneState());
- state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(2000 + MemoryReducer::kShortDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(state0.started_gcs, state1.started_gcs);
@@ -287,7 +322,7 @@ TEST(MemoryReducer, FromRunToWait) {
state0.started_gcs = 1;
- state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000));
+ state1 = MemoryReducer::Step(state0, MarkCompactEventNoGarbageLeft(2000, 0));
EXPECT_EQ(MemoryReducer::kWait, state1.action);
EXPECT_EQ(2000 + MemoryReducer::kShortDelayMs, state1.next_gc_start_ms);
EXPECT_EQ(state0.started_gcs, state1.started_gcs);
diff --git a/deps/v8/test/unittests/heap/unmapper-unittest.cc b/deps/v8/test/unittests/heap/unmapper-unittest.cc
new file mode 100644
index 0000000000..5d791a5cb3
--- /dev/null
+++ b/deps/v8/test/unittests/heap/unmapper-unittest.cc
@@ -0,0 +1,88 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifdef __linux__
+#include <sys/mman.h>
+#undef MAP_TYPE
+#endif // __linux__
+
+#include "src/heap/heap-inl.h"
+#include "src/heap/spaces-inl.h"
+#include "src/isolate.h"
+#include "test/unittests/test-utils.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+namespace internal {
+
+class SequentialUnmapperTest : public TestWithIsolate {
+ public:
+ SequentialUnmapperTest() = default;
+ ~SequentialUnmapperTest() override = default;
+
+ static void SetUpTestCase() {
+ old_flag_ = i::FLAG_concurrent_sweeping;
+ i::FLAG_concurrent_sweeping = false;
+ TestWithIsolate::SetUpTestCase();
+ }
+
+ static void TearDownTestCase() {
+ TestWithIsolate::TearDownTestCase();
+ i::FLAG_concurrent_sweeping = old_flag_;
+ }
+
+ Heap* heap() { return isolate()->heap(); }
+ MemoryAllocator* allocator() { return heap()->memory_allocator(); }
+ MemoryAllocator::Unmapper* unmapper() { return allocator()->unmapper(); }
+
+ private:
+ static bool old_flag_;
+
+ DISALLOW_COPY_AND_ASSIGN(SequentialUnmapperTest);
+};
+
+bool SequentialUnmapperTest::old_flag_;
+
+#ifdef __linux__
+
+// See v8:5945.
+TEST_F(SequentialUnmapperTest, UnmapOnTeardownAfterAlreadyFreeingPooled) {
+ Page* page =
+ allocator()->AllocatePage(MemoryAllocator::PageAreaSize(OLD_SPACE),
+ static_cast<PagedSpace*>(heap()->old_space()),
+ Executability::NOT_EXECUTABLE);
+ heap()->old_space()->UnlinkFreeListCategories(page);
+ EXPECT_NE(nullptr, page);
+ const int page_size = getpagesize();
+ void* start_address = static_cast<void*>(page->address());
+ EXPECT_EQ(0, msync(start_address, page_size, MS_SYNC));
+ allocator()->Free<MemoryAllocator::kPooledAndQueue>(page);
+ EXPECT_EQ(0, msync(start_address, page_size, MS_SYNC));
+ unmapper()->FreeQueuedChunks();
+ EXPECT_EQ(0, msync(start_address, page_size, MS_SYNC));
+ unmapper()->TearDown();
+ EXPECT_EQ(-1, msync(start_address, page_size, MS_SYNC));
+}
+
+// See v8:5945.
+TEST_F(SequentialUnmapperTest, UnmapOnTeardown) {
+ Page* page =
+ allocator()->AllocatePage(MemoryAllocator::PageAreaSize(OLD_SPACE),
+ static_cast<PagedSpace*>(heap()->old_space()),
+ Executability::NOT_EXECUTABLE);
+ heap()->old_space()->UnlinkFreeListCategories(page);
+ EXPECT_NE(nullptr, page);
+ const int page_size = getpagesize();
+ void* start_address = static_cast<void*>(page->address());
+ EXPECT_EQ(0, msync(start_address, page_size, MS_SYNC));
+ allocator()->Free<MemoryAllocator::kPooledAndQueue>(page);
+ EXPECT_EQ(0, msync(start_address, page_size, MS_SYNC));
+ unmapper()->TearDown();
+ EXPECT_EQ(-1, msync(start_address, page_size, MS_SYNC));
+}
+
+#endif // __linux__
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
index 999490518e..e9b996ea09 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc
@@ -8,6 +8,7 @@
#include "src/interpreter/bytecode-array-iterator.h"
#include "src/interpreter/bytecode-label.h"
#include "src/interpreter/bytecode-register-allocator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
@@ -77,8 +78,8 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
// Emit global load / store operations.
Handle<String> name = factory->NewStringFromStaticChars("var_name");
- builder.LoadGlobal(1, TypeofMode::NOT_INSIDE_TYPEOF)
- .LoadGlobal(1, TypeofMode::INSIDE_TYPEOF)
+ builder.LoadGlobal(name, 1, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 1, TypeofMode::INSIDE_TYPEOF)
.StoreGlobal(name, 1, LanguageMode::SLOPPY)
.StoreGlobal(name, 1, LanguageMode::STRICT);
@@ -115,18 +116,19 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.LoadLookupGlobalSlot(name, TypeofMode::INSIDE_TYPEOF, 1, 0);
// Emit closure operations.
- builder.CreateClosure(0, NOT_TENURED);
+ builder.CreateClosure(0, 1, NOT_TENURED);
// Emit create context operation.
builder.CreateBlockContext(factory->NewScopeInfo(1));
builder.CreateCatchContext(reg, name, factory->NewScopeInfo(1));
builder.CreateFunctionContext(1);
+ builder.CreateEvalContext(1);
builder.CreateWithContext(reg, factory->NewScopeInfo(1));
// Emit literal creation operations.
- builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("a"), 0, 0)
- .CreateArrayLiteral(factory->NewFixedArray(1), 0, 0)
- .CreateObjectLiteral(factory->NewFixedArray(1), 0, 0, reg);
+ builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("a"), 0, 0);
+ builder.CreateArrayLiteral(0, 0, 0);
+ builder.CreateObjectLiteral(0, 0, 0, reg);
// Call operations.
builder.Call(reg, reg_list, 1, Call::GLOBAL_CALL)
@@ -135,7 +137,8 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Call(reg, reg_list, 1, Call::GLOBAL_CALL, TailCallMode::kAllow)
.CallRuntime(Runtime::kIsArray, reg)
.CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, reg_list, pair)
- .CallJSRuntime(Context::SPREAD_ITERABLE_INDEX, reg_list);
+ .CallJSRuntime(Context::SPREAD_ITERABLE_INDEX, reg_list)
+ .NewWithSpread(reg_list);
// Emit binary operator invocations.
builder.BinaryOperation(Token::Value::ADD, reg, 1)
@@ -195,14 +198,28 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.CompareOperation(Token::Value::INSTANCEOF, reg, 8)
.CompareOperation(Token::Value::IN, reg, 9);
+ // Emit peephole optimizations of equality with Null or Undefined.
+ builder.LoadUndefined()
+ .CompareOperation(Token::Value::EQ, reg, 1)
+ .LoadNull()
+ .CompareOperation(Token::Value::EQ, reg, 1)
+ .LoadUndefined()
+ .CompareOperation(Token::Value::EQ_STRICT, reg, 1)
+ .LoadNull()
+ .CompareOperation(Token::Value::EQ_STRICT, reg, 1);
+
// Emit conversion operator invocations.
builder.ConvertAccumulatorToNumber(reg)
.ConvertAccumulatorToObject(reg)
.ConvertAccumulatorToName(reg);
+ // Emit GetSuperConstructor.
+ builder.GetSuperConstructor(reg);
+
// Short jumps with Imm8 operands
{
- BytecodeLabel start, after_jump1, after_jump2, after_jump3, after_jump4;
+ BytecodeLabel start, after_jump1, after_jump2, after_jump3, after_jump4,
+ after_jump5;
builder.Bind(&start)
.Jump(&after_jump1)
.Bind(&after_jump1)
@@ -212,11 +229,13 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Bind(&after_jump3)
.JumpIfNotHole(&after_jump4)
.Bind(&after_jump4)
+ .JumpIfJSReceiver(&after_jump5)
+ .Bind(&after_jump5)
.JumpLoop(&start, 0);
}
// Longer jumps with constant operands
- BytecodeLabel end[8];
+ BytecodeLabel end[9];
{
BytecodeLabel after_jump;
builder.Jump(&end[0])
@@ -231,7 +250,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.JumpIfFalse(&end[4])
.JumpIfNull(&end[5])
.JumpIfUndefined(&end[6])
- .JumpIfNotHole(&end[7]);
+ .JumpIfNotHole(&end[7])
+ .LoadLiteral(factory->prototype_string())
+ .JumpIfJSReceiver(&end[8]);
}
// Perform an operation that returns boolean value to
@@ -258,6 +279,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.Bind(&after_jump2);
}
+ // Emit set pending message bytecode.
+ builder.SetPendingMessage();
+
// Emit stack check bytecode.
builder.StackCheck(0);
@@ -282,14 +306,14 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
Handle<String> wide_name = factory->NewStringFromStaticChars("var_wide_name");
// Emit wide global load / store operations.
- builder.LoadGlobal(1024, TypeofMode::NOT_INSIDE_TYPEOF)
- .LoadGlobal(1024, TypeofMode::INSIDE_TYPEOF)
- .LoadGlobal(1024, TypeofMode::INSIDE_TYPEOF)
+ builder.LoadGlobal(name, 1024, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
+ .LoadGlobal(name, 1024, TypeofMode::INSIDE_TYPEOF)
.StoreGlobal(name, 1024, LanguageMode::SLOPPY)
.StoreGlobal(wide_name, 1, LanguageMode::STRICT);
// Emit extra wide global load.
- builder.LoadGlobal(1024 * 1024, TypeofMode::NOT_INSIDE_TYPEOF);
+ builder.LoadGlobal(name, 1024 * 1024, TypeofMode::NOT_INSIDE_TYPEOF);
// Emit wide load / store property operations.
builder.LoadNamedProperty(reg, wide_name, 0)
@@ -299,6 +323,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.StoreNamedProperty(reg, wide_name, 0, LanguageMode::STRICT)
.StoreKeyedProperty(reg, reg, 2056, LanguageMode::STRICT);
+ builder.StoreDataPropertyInLiteral(reg, reg,
+ DataPropertyInLiteralFlag::kNoFlags, 0);
+
// Emit wide context operations.
builder.LoadContextSlot(reg, 1024, 0).StoreContextSlot(reg, 1024, 0);
@@ -309,13 +336,14 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
.StoreLookupSlot(wide_name, LanguageMode::STRICT);
// CreateClosureWide
- builder.CreateClosure(1000, NOT_TENURED);
+ builder.CreateClosure(1000, 321, NOT_TENURED);
// Emit wide variant of literal creation operations.
- builder.CreateRegExpLiteral(factory->NewStringFromStaticChars("wide_literal"),
- 0, 0)
- .CreateArrayLiteral(factory->NewFixedArray(2), 0, 0)
- .CreateObjectLiteral(factory->NewFixedArray(2), 0, 0, reg);
+ builder
+ .CreateRegExpLiteral(factory->NewStringFromStaticChars("wide_literal"), 0,
+ 0)
+ .CreateArrayLiteral(0, 0, 0)
+ .CreateObjectLiteral(0, 0, 0, reg);
// Emit load and store operations for module variables.
builder.LoadModuleVariable(-1, 42)
@@ -393,6 +421,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
scorecard[Bytecodes::ToByte(Bytecode::kBitwiseOrSmi)] = 1;
scorecard[Bytecodes::ToByte(Bytecode::kShiftLeftSmi)] = 1;
scorecard[Bytecodes::ToByte(Bytecode::kShiftRightSmi)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestUndetectable)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestUndefined)] = 1;
+ scorecard[Bytecodes::ToByte(Bytecode::kTestNull)] = 1;
}
// Check return occurs at the end and only once in the BytecodeArray.
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
index 894134a959..cd7d764ee5 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-iterator-unittest.cc
@@ -6,6 +6,7 @@
#include "src/interpreter/bytecode-array-builder.h"
#include "src/interpreter/bytecode-array-iterator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
@@ -60,7 +61,7 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
.ForInPrepare(reg_0, triple)
.CallRuntime(Runtime::kLoadIC_Miss, reg_0)
.Debugger()
- .LoadGlobal(0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
.Return();
// Test iterator sees the expected output from the builder.
@@ -268,8 +269,8 @@ TEST_F(BytecodeArrayIteratorTest, IteratesBytecodeArray) {
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
CHECK_EQ(iterator.current_offset(), offset);
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
- CHECK_EQ(iterator.current_bytecode_size(), 6);
- CHECK_EQ(iterator.GetIndexOperand(0), 0x10000000u);
+ CHECK_EQ(iterator.current_bytecode_size(), 10);
+ CHECK_EQ(iterator.GetIndexOperand(1), 0x10000000u);
offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
kPrefixByteSize;
iterator.Advance();
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
new file mode 100644
index 0000000000..2209dc9219
--- /dev/null
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-random-iterator-unittest.cc
@@ -0,0 +1,1011 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/interpreter/bytecode-array-builder.h"
+#include "src/interpreter/bytecode-array-random-iterator.h"
+#include "src/objects-inl.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+class BytecodeArrayRandomIteratorTest : public TestWithIsolateAndZone {
+ public:
+ BytecodeArrayRandomIteratorTest() {}
+ ~BytecodeArrayRandomIteratorTest() override {}
+};
+
+TEST_F(BytecodeArrayRandomIteratorTest, InvalidBeforeStart) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToStart();
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, InvalidAfterEnd) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToEnd();
+ ASSERT_TRUE(iterator.IsValid());
+ ++iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, AccessesFirst) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToStart();
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), 0);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, AccessesLast) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+
+ iterator.GoToEnd();
+
+ int offset = bytecodeArray->length() -
+ Bytecodes::Size(Bytecode::kReturn, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, RandomAccessValid) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ BytecodeArrayRandomIterator iterator(builder.ToBytecodeArray(isolate()),
+ zone());
+ const int kPrefixByteSize = 1;
+ int offset = 0;
+
+ iterator.GoToIndex(13);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(2);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(18);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator -= 3;
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset -= Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator += 2;
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(23);
+ offset = Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ offset +=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+
+ iterator.GoToIndex(24);
+ EXPECT_FALSE(iterator.IsValid());
+
+ iterator.GoToIndex(-5);
+ EXPECT_FALSE(iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, IteratesBytecodeArray) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ BytecodeArrayRandomIterator iterator(builder.ToBytecodeArray(isolate()),
+ zone());
+ const int kPrefixByteSize = 1;
+ int offset = 0;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 1);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 3);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
+ EXPECT_EQ(iterator.current_index(), 4);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 5);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 6);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 7);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 8);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 9);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 10);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 11);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdar);
+ EXPECT_EQ(iterator.current_index(), 12);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 14);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 16);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+ offset +=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kForInPrepare);
+ EXPECT_EQ(iterator.current_index(), 19);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 3);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntime);
+ EXPECT_EQ(iterator.current_index(), 20);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadIC_Miss);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
+ EXPECT_EQ(iterator.current_index(), 21);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ offset += Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
+ EXPECT_EQ(iterator.current_index(), 22);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(iterator.current_bytecode_size(), 10);
+ EXPECT_EQ(iterator.GetIndexOperand(1), 0x10000000u);
+ offset += Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ ++iterator;
+
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ ++iterator;
+ ASSERT_TRUE(!iterator.IsValid());
+}
+
+TEST_F(BytecodeArrayRandomIteratorTest, IteratesBytecodeArrayBackwards) {
+ // Use a builder to create an array with containing multiple bytecodes
+ // with 0, 1 and 2 operands.
+ BytecodeArrayBuilder builder(isolate(), zone(), 3, 3, 0);
+ Factory* factory = isolate()->factory();
+ Handle<HeapObject> heap_num_0 = factory->NewHeapNumber(2.718);
+ Handle<HeapObject> heap_num_1 = factory->NewHeapNumber(2147483647);
+ Smi* zero = Smi::kZero;
+ Smi* smi_0 = Smi::FromInt(64);
+ Smi* smi_1 = Smi::FromInt(-65536);
+ Register reg_0(0);
+ Register reg_1(1);
+ RegisterList pair(0, 2);
+ RegisterList triple(0, 3);
+ Register param = Register::FromParameterIndex(2, builder.parameter_count());
+ Handle<String> name = factory->NewStringFromStaticChars("abc");
+ uint32_t name_index = 2;
+ uint32_t feedback_slot = 97;
+
+ builder.LoadLiteral(heap_num_0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(heap_num_1)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(zero)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_0)
+ .StackCheck(0)
+ .StoreAccumulatorInRegister(reg_0)
+ .LoadLiteral(smi_1)
+ .StackCheck(1)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadAccumulatorWithRegister(reg_0)
+ .BinaryOperation(Token::Value::ADD, reg_0, 2)
+ .StoreAccumulatorInRegister(reg_1)
+ .LoadNamedProperty(reg_1, name, feedback_slot)
+ .BinaryOperation(Token::Value::ADD, reg_0, 3)
+ .StoreAccumulatorInRegister(param)
+ .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, param, pair)
+ .ForInPrepare(reg_0, triple)
+ .CallRuntime(Runtime::kLoadIC_Miss, reg_0)
+ .Debugger()
+ .LoadGlobal(name, 0x10000000, TypeofMode::NOT_INSIDE_TYPEOF)
+ .Return();
+
+ // Test iterator sees the expected output from the builder.
+ Handle<BytecodeArray> bytecodeArray = builder.ToBytecodeArray(isolate());
+ BytecodeArrayRandomIterator iterator(bytecodeArray, zone());
+ const int kPrefixByteSize = 1;
+ int offset = bytecodeArray->length();
+
+ iterator.GoToEnd();
+
+ offset -= Bytecodes::Size(Bytecode::kReturn, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kReturn);
+ EXPECT_EQ(iterator.current_index(), 23);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaGlobal, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaGlobal);
+ EXPECT_EQ(iterator.current_index(), 22);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(iterator.current_bytecode_size(), 10);
+ EXPECT_EQ(iterator.GetIndexOperand(1), 0x10000000u);
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kDebugger, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
+ EXPECT_EQ(iterator.current_index(), 21);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kCallRuntime, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntime);
+ EXPECT_EQ(iterator.current_index(), 20);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadIC_Miss);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kForInPrepare, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kForInPrepare);
+ EXPECT_EQ(iterator.current_index(), 19);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 3);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -=
+ Bytecodes::Size(Bytecode::kCallRuntimeForPair, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kCallRuntimeForPair);
+ EXPECT_EQ(iterator.current_index(), 18);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRuntimeIdOperand(0), Runtime::kLoadLookupSlotForCall);
+ EXPECT_EQ(iterator.GetRegisterOperand(1).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(1), 1);
+ EXPECT_EQ(iterator.GetRegisterCountOperand(2), 1u);
+ EXPECT_EQ(iterator.GetRegisterOperand(3).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(3), 2);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 17);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), param.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 16);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaNamedProperty, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaNamedProperty);
+ EXPECT_EQ(iterator.current_index(), 15);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetIndexOperand(1), name_index);
+ EXPECT_EQ(iterator.GetIndexOperand(2), feedback_slot);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 14);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kAdd, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kAdd);
+ EXPECT_EQ(iterator.current_index(), 13);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdar);
+ EXPECT_EQ(iterator.current_index(), 12);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 11);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_1.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 10);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kQuadruple) +
+ kPrefixByteSize;
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 9);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kQuadruple);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 8);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStackCheck, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStackCheck);
+ EXPECT_EQ(iterator.current_index(), 7);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Bytecodes::NumberOfOperands(iterator.current_bytecode()), 0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaSmi, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
+ EXPECT_EQ(iterator.current_index(), 6);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(Smi::FromInt(iterator.GetImmediateOperand(0)), smi_0);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 5);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaZero, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaZero);
+ EXPECT_EQ(iterator.current_index(), 4);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 3);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 2);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_1));
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kStar, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kStar);
+ EXPECT_EQ(iterator.current_index(), 1);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_EQ(iterator.GetRegisterOperand(0).index(), reg_0.index());
+ EXPECT_EQ(iterator.GetRegisterOperandRange(0), 1);
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+
+ offset -= Bytecodes::Size(Bytecode::kLdaConstant, OperandScale::kSingle);
+ EXPECT_EQ(iterator.current_bytecode(), Bytecode::kLdaConstant);
+ EXPECT_EQ(iterator.current_index(), 0);
+ EXPECT_EQ(iterator.current_offset(), offset);
+ EXPECT_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
+ EXPECT_TRUE(
+ iterator.GetConstantForIndexOperand(0).is_identical_to(heap_num_0));
+ ASSERT_TRUE(iterator.IsValid());
+ --iterator;
+ ASSERT_FALSE(iterator.IsValid());
+}
+
+} // namespace interpreter
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
index bc865ef7d1..91b3a7554b 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc
@@ -10,6 +10,7 @@
#include "src/interpreter/bytecode-label.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/isolate.h"
+#include "src/objects-inl.h"
#include "src/source-position-table.h"
#include "src/utils.h"
#include "test/unittests/interpreter/bytecode-utils.h"
diff --git a/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc
new file mode 100644
index 0000000000..a02d7f01c6
--- /dev/null
+++ b/deps/v8/test/unittests/interpreter/bytecode-operands-unittest.cc
@@ -0,0 +1,47 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/v8.h"
+
+#include "src/interpreter/bytecode-operands.h"
+#include "src/isolate.h"
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+namespace interpreter {
+
+using BytecodeOperandsTest = TestWithIsolateAndZone;
+
+TEST(BytecodeOperandsTest, IsScalableSignedByte) {
+#define SCALABLE_SIGNED_OPERAND(Name, ...) \
+ CHECK(BytecodeOperands::IsScalableSignedByte(OperandType::k##Name));
+ REGISTER_OPERAND_TYPE_LIST(SCALABLE_SIGNED_OPERAND)
+ SIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(SCALABLE_SIGNED_OPERAND)
+#undef SCALABLE_SIGNED_OPERAND
+#define NOT_SCALABLE_SIGNED_OPERAND(Name, ...) \
+ CHECK(!BytecodeOperands::IsScalableSignedByte(OperandType::k##Name));
+ INVALID_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+ UNSIGNED_FIXED_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+ UNSIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_SIGNED_OPERAND)
+#undef NOT_SCALABLE_SIGNED_OPERAND
+}
+
+TEST(BytecodeOperandsTest, IsScalableUnsignedByte) {
+#define SCALABLE_UNSIGNED_OPERAND(Name, ...) \
+ CHECK(BytecodeOperands::IsScalableUnsignedByte(OperandType::k##Name));
+ UNSIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(SCALABLE_UNSIGNED_OPERAND)
+#undef SCALABLE_SIGNED_OPERAND
+#define NOT_SCALABLE_UNSIGNED_OPERAND(Name, ...) \
+ CHECK(!BytecodeOperands::IsScalableUnsignedByte(OperandType::k##Name));
+ INVALID_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ REGISTER_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ SIGNED_SCALABLE_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+ UNSIGNED_FIXED_SCALAR_OPERAND_TYPE_LIST(NOT_SCALABLE_UNSIGNED_OPERAND)
+#undef NOT_SCALABLE_SIGNED_OPERAND
+}
+
+} // namespace interpreter
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
index d1c570d421..7d139f4b56 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-peephole-optimizer-unittest.cc
@@ -19,11 +19,12 @@ class BytecodePeepholeOptimizerTest : public BytecodePipelineStage,
public TestWithIsolateAndZone {
public:
BytecodePeepholeOptimizerTest()
- : peephole_optimizer_(this), last_written_(Bytecode::kIllegal) {}
+ : peephole_optimizer_(this),
+ last_written_(BytecodeNode::Illegal(BytecodeSourceInfo())) {}
~BytecodePeepholeOptimizerTest() override {}
void Reset() {
- last_written_.set_bytecode(Bytecode::kIllegal);
+ last_written_ = BytecodeNode::Illegal(BytecodeSourceInfo());
write_count_ = 0;
}
@@ -403,6 +404,25 @@ TEST_F(BytecodePeepholeOptimizerTest, MergeLdaZeroWithBinaryOp) {
}
}
+TEST_F(BytecodePeepholeOptimizerTest, MergeLdaNullOrUndefinedWithCompareOp) {
+ Bytecode first_bytecodes[] = {Bytecode::kLdaUndefined, Bytecode::kLdaNull};
+
+ for (auto first_bytecode : first_bytecodes) {
+ uint32_t reg_operand = Register(0).ToOperand();
+ uint32_t idx_operand = 1;
+ BytecodeNode first(first_bytecode);
+ BytecodeNode second(Bytecode::kTestEqual, reg_operand, idx_operand);
+ optimizer()->Write(&first);
+ optimizer()->Write(&second);
+ Flush();
+ CHECK_EQ(write_count(), 1);
+ CHECK_EQ(last_written().bytecode(), Bytecode::kTestUndetectable);
+ CHECK_EQ(last_written().operand_count(), 1);
+ CHECK_EQ(last_written().operand(0), reg_operand);
+ Reset();
+ }
+}
+
} // namespace interpreter
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
index c4388e8fed..45366196f4 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-pipeline-unittest.cc
@@ -68,11 +68,12 @@ TEST_F(BytecodeNodeTest, Constructor2) {
}
TEST_F(BytecodeNodeTest, Constructor3) {
- uint32_t operands[] = {0x11};
- BytecodeNode node(Bytecode::kLdaGlobal, operands[0]);
+ uint32_t operands[] = {0x11, 0x22};
+ BytecodeNode node(Bytecode::kLdaGlobal, operands[0], operands[1]);
CHECK_EQ(node.bytecode(), Bytecode::kLdaGlobal);
- CHECK_EQ(node.operand_count(), 1);
+ CHECK_EQ(node.operand_count(), 2);
CHECK_EQ(node.operand(0), operands[0]);
+ CHECK_EQ(node.operand(1), operands[1]);
CHECK(!node.source_info().is_valid());
}
@@ -133,36 +134,6 @@ TEST_F(BytecodeNodeTest, NoEqualityWithDifferentSourceInfo) {
CHECK_NE(node, other);
}
-TEST_F(BytecodeNodeTest, SetBytecode0) {
- uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
- BytecodeSourceInfo source_info(77, false);
- BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
- operands[3], source_info);
- CHECK_EQ(node.source_info(), source_info);
-
- BytecodeNode clone(Bytecode::kIllegal);
- clone = node;
- clone.set_bytecode(Bytecode::kNop);
- CHECK_EQ(clone.bytecode(), Bytecode::kNop);
- CHECK_EQ(clone.operand_count(), 0);
- CHECK_EQ(clone.source_info(), source_info);
-}
-
-TEST_F(BytecodeNodeTest, SetBytecode1) {
- uint32_t operands[] = {0x71, 0xa5, 0x5a, 0xfc};
- BytecodeSourceInfo source_info(77, false);
- BytecodeNode node(Bytecode::kForInNext, operands[0], operands[1], operands[2],
- operands[3], source_info);
-
- BytecodeNode clone(Bytecode::kIllegal);
- clone = node;
- clone.set_bytecode(Bytecode::kJump, 0x01aabbcc);
- CHECK_EQ(clone.bytecode(), Bytecode::kJump);
- CHECK_EQ(clone.operand_count(), 1);
- CHECK_EQ(clone.operand(0), 0x01aabbccu);
- CHECK_EQ(clone.source_info(), source_info);
-}
-
} // namespace interpreter
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
index 81c6da5f8f..b2c8b47c79 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-register-allocator-unittest.cc
@@ -6,6 +6,7 @@
#include "src/interpreter/bytecode-array-builder.h"
#include "src/interpreter/bytecode-register-allocator.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
diff --git a/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
index 55003d7511..ba9e880787 100644
--- a/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecode-register-optimizer-unittest.cc
@@ -78,7 +78,7 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryMaterializedForJump) {
Register temp = NewTemporary();
optimizer()->DoStar(temp, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kJump);
+ optimizer()->PrepareForBytecode<Bytecode::kJump, AccumulatorUse::kNone>();
CHECK_EQ(write_count(), 1u);
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kStar);
CHECK_EQ(output()->at(0).operand(0), static_cast<uint32_t>(temp.ToOperand()));
@@ -96,7 +96,7 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryNotEmitted) {
BytecodeNode node1(Bytecode::kStar, NewTemporary().ToOperand());
ReleaseTemporaries(temp);
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(0).operand(0),
static_cast<uint32_t>(parameter.ToOperand()));
@@ -104,12 +104,12 @@ TEST_F(BytecodeRegisterOptimizerTest, TemporaryNotEmitted) {
TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
Initialize(3, 1);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
optimizer()->DoStar(temp1, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
CHECK_EQ(write_count(), 1u);
CHECK_EQ(output()->at(0).bytecode(), Bytecode::kStar);
CHECK_EQ(output()->at(0).operand(0),
@@ -120,7 +120,7 @@ TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
CHECK_EQ(write_count(), 1u);
optimizer()->DoLdar(temp0, BytecodeSourceInfo());
CHECK_EQ(write_count(), 1u);
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(write_count(), 2u);
CHECK_EQ(output()->at(1).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(1).operand(0),
@@ -129,7 +129,7 @@ TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterUsed) {
TEST_F(BytecodeRegisterOptimizerTest, ReleasedRegisterNotFlushed) {
Initialize(3, 1);
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
optimizer()->DoStar(temp0, BytecodeSourceInfo());
@@ -158,7 +158,7 @@ TEST_F(BytecodeRegisterOptimizerTest, StoresToLocalsImmediate) {
CHECK_EQ(output()->at(0).operand(1),
static_cast<uint32_t>(local.ToOperand()));
- optimizer()->PrepareForBytecode(Bytecode::kReturn);
+ optimizer()->PrepareForBytecode<Bytecode::kReturn, AccumulatorUse::kRead>();
CHECK_EQ(write_count(), 2u);
CHECK_EQ(output()->at(1).bytecode(), Bytecode::kLdar);
CHECK_EQ(output()->at(1).operand(0),
@@ -188,12 +188,13 @@ TEST_F(BytecodeRegisterOptimizerTest, RangeOfTemporariesMaterializedForInput) {
Register parameter = Register::FromParameterIndex(1, 3);
Register temp0 = NewTemporary();
Register temp1 = NewTemporary();
- optimizer()->PrepareForBytecode(Bytecode::kLdaSmi);
+ optimizer()->PrepareForBytecode<Bytecode::kLdaSmi, AccumulatorUse::kWrite>();
optimizer()->DoStar(temp0, BytecodeSourceInfo());
optimizer()->DoMov(parameter, temp1, BytecodeSourceInfo());
CHECK_EQ(write_count(), 0u);
- optimizer()->PrepareForBytecode(Bytecode::kCallJSRuntime);
+ optimizer()
+ ->PrepareForBytecode<Bytecode::kCallJSRuntime, AccumulatorUse::kWrite>();
RegisterList reg_list =
optimizer()->GetInputRegisterList(RegisterList(temp0.index(), 2));
CHECK_EQ(temp0.index(), reg_list.first_register().index());
diff --git a/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc b/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
index 81868590b9..cfcdf6c3bc 100644
--- a/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/bytecodes-unittest.cc
@@ -200,6 +200,126 @@ TEST(Bytecodes, SizesForUnsignedOperands) {
OperandSize::kQuad);
}
+// Helper macros to generate a check for if a bytecode is in a macro list of
+// bytecodes. We can use these to exhaustively test a check over all bytecodes,
+// both those that should pass and those that should fail the check.
+#define OR_IS_BYTECODE(Name, ...) || bytecode == Bytecode::k##Name
+#define IN_BYTECODE_LIST(BYTECODE, LIST) \
+ ([](Bytecode bytecode) { return false LIST(OR_IS_BYTECODE); }(BYTECODE))
+
+TEST(Bytecodes, IsJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsForwardJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_FORWARD_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsForwardJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsUnconditionalJump) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_UNCONDITIONAL_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsUnconditionalJump(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpImmediate) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpImmediate(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpConstant) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpConstant(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJumpImmediate) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
+ IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_IMMEDIATE_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJumpImmediate(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsConditionalJumpConstant) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONDITIONAL_BYTECODE_LIST) && \
+ IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_CONSTANT_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsConditionalJumpConstant(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+TEST(Bytecodes, IsJumpIfToBoolean) {
+#define TEST_BYTECODE(Name, ...) \
+ if (IN_BYTECODE_LIST(Bytecode::k##Name, JUMP_TO_BOOLEAN_BYTECODE_LIST)) { \
+ EXPECT_TRUE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
+ } else { \
+ EXPECT_FALSE(Bytecodes::IsJumpIfToBoolean(Bytecode::k##Name)); \
+ }
+
+ BYTECODE_LIST(TEST_BYTECODE)
+#undef TEST_BYTECODE
+}
+
+#undef OR_IS_BYTECODE
+#undef IN_BYTECODE_LIST
+
TEST(OperandScale, PrefixesRequired) {
CHECK(!Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kSingle));
CHECK(Bytecodes::OperandScaleRequiresPrefixBytecode(OperandScale::kDouble));
diff --git a/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc b/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
index fc80f7145d..3a0ec0835c 100644
--- a/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/constant-array-builder-unittest.cc
@@ -8,6 +8,7 @@
#include "src/handles-inl.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/isolate.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
namespace v8 {
diff --git a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
index b8eb64c884..f80e8a3f80 100644
--- a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
+++ b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.cc
@@ -5,7 +5,6 @@
#include "test/unittests/interpreter/interpreter-assembler-unittest.h"
#include "src/code-factory.h"
-#include "src/compiler/graph.h"
#include "src/compiler/node.h"
#include "src/interface-descriptors.h"
#include "src/isolate.h"
@@ -21,6 +20,14 @@ using namespace compiler;
namespace interpreter {
+InterpreterAssemblerTestState::InterpreterAssemblerTestState(
+ InterpreterAssemblerTest* test, Bytecode bytecode)
+ : compiler::CodeAssemblerState(
+ test->isolate(), test->zone(),
+ InterpreterDispatchDescriptor(test->isolate()),
+ Code::ComputeFlags(Code::BYTECODE_HANDLER),
+ Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)) {}
+
const interpreter::Bytecode kBytecodes[] = {
#define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name,
BYTECODE_LIST(DEFINE_BYTECODE)
@@ -44,6 +51,12 @@ Matcher<Node*> IsIntPtrSub(const Matcher<Node*>& lhs_matcher,
: IsInt32Sub(lhs_matcher, rhs_matcher);
}
+Matcher<Node*> IsIntPtrMul(const Matcher<Node*>& lhs_matcher,
+ const Matcher<Node*>& rhs_matcher) {
+ return kPointerSize == 8 ? IsInt64Mul(lhs_matcher, rhs_matcher)
+ : IsInt32Mul(lhs_matcher, rhs_matcher);
+}
+
Matcher<Node*> IsWordShl(const Matcher<Node*>& lhs_matcher,
const Matcher<Node*>& rhs_matcher) {
return kPointerSize == 8 ? IsWord64Shl(lhs_matcher, rhs_matcher)
@@ -62,6 +75,18 @@ Matcher<Node*> IsWordOr(const Matcher<Node*>& lhs_matcher,
: IsWord32Or(lhs_matcher, rhs_matcher);
}
+Matcher<Node*> IsChangeInt32ToIntPtr(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsChangeInt32ToInt64(matcher) : matcher;
+}
+
+Matcher<Node*> IsChangeUint32ToWord(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsChangeUint32ToUint64(matcher) : matcher;
+}
+
+Matcher<Node*> IsTruncateWordToWord32(const Matcher<Node*>& matcher) {
+ return kPointerSize == 8 ? IsTruncateInt64ToInt32(matcher) : matcher;
+}
+
InterpreterAssemblerTest::InterpreterAssemblerForTest::
~InterpreterAssemblerForTest() {
// Tests don't necessarily read and write accumulator but
@@ -101,15 +126,11 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedByteOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedByteOperand(
int offset) {
- Matcher<Node*> load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset)));
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
- }
- return load_matcher;
}
Matcher<Node*>
@@ -148,9 +169,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedShortOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
int offset) {
- Matcher<Node*> load_matcher;
if (TargetSupportsUnalignedAccess()) {
- load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int16(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
@@ -174,14 +194,9 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedShortOperand(
IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
}
- load_matcher = IsWord32Or(
- IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)), bytes[1]);
- }
-
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
+ return IsWord32Or(IsWord32Shl(bytes[0], IsInt32Constant(kBitsPerByte)),
+ bytes[1]);
}
- return load_matcher;
}
Matcher<Node*>
@@ -224,9 +239,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedQuadOperand(
Matcher<Node*>
InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
int offset) {
- Matcher<Node*> load_matcher;
if (TargetSupportsUnalignedAccess()) {
- load_matcher = IsLoad(
+ return IsLoad(
MachineType::Int32(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
@@ -250,18 +264,13 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsSignedQuadOperand(
IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
IsIntPtrConstant(offset + kMsbOffset + kStep * i)));
}
- load_matcher = IsWord32Or(
+ return IsWord32Or(
IsWord32Shl(bytes[0], IsInt32Constant(3 * kBitsPerByte)),
IsWord32Or(
IsWord32Shl(bytes[1], IsInt32Constant(2 * kBitsPerByte)),
IsWord32Or(IsWord32Shl(bytes[2], IsInt32Constant(1 * kBitsPerByte)),
bytes[3])));
}
-
- if (kPointerSize == 8) {
- load_matcher = IsChangeInt32ToInt64(load_matcher);
- }
- return load_matcher;
}
Matcher<Node*>
@@ -298,7 +307,8 @@ InterpreterAssemblerTest::InterpreterAssemblerForTest::IsUnsignedOperand(
TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* tail_call_node = m.Dispatch();
OperandScale operand_scale = OperandScale::kSingle;
@@ -310,9 +320,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
m.IsLoad(MachineType::Uint8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
next_bytecode_offset_matcher);
- if (kPointerSize == 8) {
- target_bytecode_matcher = IsChangeUint32ToUint64(target_bytecode_matcher);
- }
+ target_bytecode_matcher = IsChangeUint32ToWord(target_bytecode_matcher);
Matcher<Node*> code_target_matcher = m.IsLoad(
MachineType::Pointer(),
IsParameter(InterpreterDispatchDescriptor::kDispatchTable),
@@ -330,10 +338,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
m.IsLoad(MachineType::Uint8(),
IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
after_lookahead_offset);
- if (kPointerSize == 8) {
- after_lookahead_bytecode =
- IsChangeUint32ToUint64(after_lookahead_bytecode);
- }
+ after_lookahead_bytecode = IsChangeUint32ToWord(after_lookahead_bytecode);
target_bytecode_matcher =
IsPhi(MachineType::PointerRepresentation(), target_bytecode_matcher,
after_lookahead_bytecode, _);
@@ -364,7 +369,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
if (!interpreter::Bytecodes::IsJump(bytecode)) return;
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
@@ -372,10 +378,7 @@ TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
IsIntPtrConstant(jump_offset));
Matcher<Node*> target_bytecode_matcher =
m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
- if (kPointerSize == 8) {
- target_bytecode_matcher =
- IsChangeUint32ToUint64(target_bytecode_matcher);
- }
+ target_bytecode_matcher = IsChangeUint32ToWord(target_bytecode_matcher);
Matcher<Node*> code_target_matcher =
m.IsLoad(MachineType::Pointer(),
IsParameter(InterpreterDispatchDescriptor::kDispatchTable),
@@ -398,7 +401,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
- InterpreterAssemblerForTest m(this, bytecode, operand_scale);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode, operand_scale);
int number_of_operands =
interpreter::Bytecodes::NumberOfOperands(bytecode);
for (int i = 0; i < number_of_operands; i++) {
@@ -419,7 +423,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
break;
case interpreter::OperandType::kIdx:
EXPECT_THAT(m.BytecodeOperandIdx(i),
- m.IsUnsignedOperand(offset, operand_size));
+ IsChangeUint32ToWord(
+ m.IsUnsignedOperand(offset, operand_size)));
break;
case interpreter::OperandType::kUImm:
EXPECT_THAT(m.BytecodeOperandUImm(i),
@@ -436,8 +441,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
case interpreter::OperandType::kRegOutPair:
case interpreter::OperandType::kRegOutTriple:
case interpreter::OperandType::kRegPair:
- EXPECT_THAT(m.BytecodeOperandReg(i),
- m.IsSignedOperand(offset, operand_size));
+ EXPECT_THAT(
+ m.BytecodeOperandReg(i),
+ IsChangeInt32ToIntPtr(m.IsSignedOperand(offset, operand_size)));
break;
case interpreter::OperandType::kRuntimeId:
EXPECT_THAT(m.BytecodeOperandRuntimeId(i),
@@ -463,7 +469,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
continue;
}
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
// Should be incoming accumulator if not set.
EXPECT_THAT(m.GetAccumulator(),
IsParameter(InterpreterDispatchDescriptor::kAccumulator));
@@ -485,7 +492,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
EXPECT_THAT(
m.GetContext(),
m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
@@ -496,7 +504,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.IntPtrConstant(44);
Node* reg_location_node = m.RegisterLocation(reg_index_node);
EXPECT_THAT(reg_location_node,
@@ -508,7 +517,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* reg_index_node = m.IntPtrConstant(44);
Node* load_reg_node = m.LoadRegister(reg_index_node);
EXPECT_THAT(load_reg_node,
@@ -520,7 +530,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* store_value = m.Int32Constant(0xdeadbeef);
Node* reg_index_node = m.IntPtrConstant(44);
Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
@@ -536,7 +547,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* value = m.Int32Constant(44);
EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant(
static_cast<intptr_t>(44)
@@ -549,8 +561,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* a = m.Int32Constant(0);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ Node* a = m.Parameter(0);
Node* b = m.Int32Constant(1);
Node* add = m.IntPtrAdd(a, b);
EXPECT_THAT(add, IsIntPtrAdd(a, b));
@@ -559,8 +572,9 @@ TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* a = m.Int32Constant(0);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ Node* a = m.Parameter(0);
Node* b = m.Int32Constant(1);
Node* add = m.IntPtrSub(a, b);
EXPECT_THAT(add, IsIntPtrSub(a, b));
@@ -569,7 +583,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* a = m.IntPtrConstant(0);
Node* add = m.WordShl(a, 10);
EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
@@ -578,25 +593,44 @@ TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
- Node* index = m.IntPtrConstant(2);
- Node* load_constant = m.LoadConstantPoolEntry(index);
- Matcher<Node*> constant_pool_matcher = m.IsLoad(
- MachineType::AnyTagged(),
- IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
- IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag));
- EXPECT_THAT(
- load_constant,
- m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
- IsIntPtrAdd(
- IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
- IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
+ {
+ Node* index = m.IntPtrConstant(2);
+ Node* load_constant = m.LoadConstantPoolEntry(index);
+ Matcher<Node*> constant_pool_matcher =
+ m.IsLoad(MachineType::AnyTagged(),
+ IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
+ IsIntPtrConstant(BytecodeArray::kConstantPoolOffset -
+ kHeapObjectTag));
+ EXPECT_THAT(load_constant,
+ m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
+ IsIntPtrConstant(FixedArray::OffsetOfElementAt(2) -
+ kHeapObjectTag)));
+ }
+ {
+ Node* index = m.Parameter(2);
+ Node* load_constant = m.LoadConstantPoolEntry(index);
+ Matcher<Node*> constant_pool_matcher =
+ m.IsLoad(MachineType::AnyTagged(),
+ IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
+ IsIntPtrConstant(BytecodeArray::kConstantPoolOffset -
+ kHeapObjectTag));
+ EXPECT_THAT(
+ load_constant,
+ m.IsLoad(
+ MachineType::AnyTagged(), constant_pool_matcher,
+ IsIntPtrAdd(
+ IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
+ IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
+ }
}
}
TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* object = m.IntPtrConstant(0xdeadbeef);
int offset = 16;
Node* load_field = m.LoadObjectField(object, offset);
@@ -608,7 +642,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* arg1 = m.Int32Constant(2);
Node* arg2 = m.Int32Constant(3);
Node* context = m.Int32Constant(4);
@@ -622,19 +657,21 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
const int kResultSizes[] = {1, 2};
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
TRACED_FOREACH(int, result_size, kResultSizes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size);
Node* function_id = m.Int32Constant(0);
- Node* first_arg = m.Int32Constant(1);
+ Node* first_arg = m.IntPtrConstant(1);
Node* arg_count = m.Int32Constant(2);
- Node* context = m.Int32Constant(4);
+ Node* context = m.IntPtrConstant(4);
Matcher<Node*> function_table = IsExternalConstant(
ExternalReference::runtime_function_table_address(isolate()));
Matcher<Node*> function = IsIntPtrAdd(
function_table,
- IsInt32Mul(function_id, IsInt32Constant(sizeof(Runtime::Function))));
+ IsChangeUint32ToWord(IsInt32Mul(
+ function_id, IsInt32Constant(sizeof(Runtime::Function)))));
Matcher<Node*> function_entry =
m.IsLoad(MachineType::Pointer(), function,
IsIntPtrConstant(offsetof(Runtime::Function, entry)));
@@ -653,13 +690,14 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
TailCallMode::kAllow};
TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Callable builtin =
CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
- Node* function = m.Int32Constant(0);
- Node* first_arg = m.Int32Constant(1);
+ Node* function = m.IntPtrConstant(0);
+ Node* first_arg = m.IntPtrConstant(1);
Node* arg_count = m.Int32Constant(2);
- Node* context = m.Int32Constant(3);
+ Node* context = m.IntPtrConstant(3);
Node* call_js =
m.CallJS(function, context, first_arg, arg_count, tail_call_mode);
EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count,
@@ -670,7 +708,8 @@ TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) {
TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
- InterpreterAssemblerForTest m(this, bytecode);
+ InterpreterAssemblerTestState state(this, bytecode);
+ InterpreterAssemblerForTest m(&state, bytecode);
Node* feedback_vector = m.LoadTypeFeedbackVector();
Matcher<Node*> load_function_matcher =
diff --git a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
index e3e525273a..210a201d07 100644
--- a/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
+++ b/deps/v8/test/unittests/interpreter/interpreter-assembler-unittest.h
@@ -5,6 +5,7 @@
#ifndef V8_UNITTESTS_INTERPRETER_INTERPRETER_ASSEMBLER_UNITTEST_H_
#define V8_UNITTESTS_INTERPRETER_INTERPRETER_ASSEMBLER_UNITTEST_H_
+#include "src/compiler/code-assembler.h"
#include "src/compiler/machine-operator.h"
#include "src/interpreter/interpreter-assembler.h"
#include "test/unittests/test-utils.h"
@@ -16,6 +17,14 @@ namespace interpreter {
using ::testing::Matcher;
+class InterpreterAssemblerTest;
+
+class InterpreterAssemblerTestState : public compiler::CodeAssemblerState {
+ public:
+ InterpreterAssemblerTestState(InterpreterAssemblerTest* test,
+ Bytecode bytecode);
+};
+
class InterpreterAssemblerTest : public TestWithIsolateAndZone {
public:
InterpreterAssemblerTest() {}
@@ -24,11 +33,10 @@ class InterpreterAssemblerTest : public TestWithIsolateAndZone {
class InterpreterAssemblerForTest final : public InterpreterAssembler {
public:
InterpreterAssemblerForTest(
- InterpreterAssemblerTest* test, Bytecode bytecode,
+ InterpreterAssemblerTestState* state, Bytecode bytecode,
OperandScale operand_scale = OperandScale::kSingle)
- : InterpreterAssembler(test->isolate(), test->zone(), bytecode,
- operand_scale) {}
- ~InterpreterAssemblerForTest() override;
+ : InterpreterAssembler(state, bytecode, operand_scale) {}
+ ~InterpreterAssemblerForTest();
Matcher<compiler::Node*> IsLoad(
const Matcher<compiler::LoadRepresentation>& rep_matcher,
diff --git a/deps/v8/test/unittests/libplatform/default-platform-unittest.cc b/deps/v8/test/unittests/libplatform/default-platform-unittest.cc
index 814b27bc51..473f8d39b8 100644
--- a/deps/v8/test/unittests/libplatform/default-platform-unittest.cc
+++ b/deps/v8/test/unittests/libplatform/default-platform-unittest.cc
@@ -19,6 +19,11 @@ struct MockTask : public Task {
MOCK_METHOD0(Die, void());
};
+struct MockIdleTask : public IdleTask {
+ virtual ~MockIdleTask() { Die(); }
+ MOCK_METHOD1(Run, void(double deadline_in_seconds));
+ MOCK_METHOD0(Die, void());
+};
class DefaultPlatformWithMockTime : public DefaultPlatform {
public:
@@ -126,6 +131,35 @@ TEST(DefaultPlatformTest, PendingDelayedTasksAreDestroyedOnShutdown) {
}
}
+TEST(DefaultPlatformTest, RunIdleTasks) {
+ InSequence s;
+
+ int dummy;
+ Isolate* isolate = reinterpret_cast<Isolate*>(&dummy);
+
+ DefaultPlatformWithMockTime platform;
+
+ StrictMock<MockIdleTask>* task = new StrictMock<MockIdleTask>;
+ platform.CallIdleOnForegroundThread(isolate, task);
+ EXPECT_CALL(*task, Run(42.0 + 23.0));
+ EXPECT_CALL(*task, Die());
+ platform.IncreaseTime(23.0);
+ platform.RunIdleTasks(isolate, 42.0);
+}
+
+TEST(DefaultPlatformTest, PendingIdleTasksAreDestroyedOnShutdown) {
+ InSequence s;
+
+ int dummy;
+ Isolate* isolate = reinterpret_cast<Isolate*>(&dummy);
+
+ {
+ DefaultPlatformWithMockTime platform;
+ StrictMock<MockIdleTask>* task = new StrictMock<MockIdleTask>;
+ platform.CallIdleOnForegroundThread(isolate, task);
+ EXPECT_CALL(*task, Die());
+ }
+}
} // namespace platform
} // namespace v8
diff --git a/deps/v8/test/unittests/object-unittest.cc b/deps/v8/test/unittests/object-unittest.cc
new file mode 100644
index 0000000000..b09b97dea6
--- /dev/null
+++ b/deps/v8/test/unittests/object-unittest.cc
@@ -0,0 +1,57 @@
+// Copyright 2016 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <cmath>
+#include <iostream>
+#include <limits>
+
+#include "src/objects-inl.h"
+#include "src/objects.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+namespace internal {
+
+TEST(Object, InstanceTypeListOrder) {
+ int current = 0;
+ int last = -1;
+ InstanceType current_type = static_cast<InstanceType>(current);
+ EXPECT_EQ(current_type, InstanceType::FIRST_TYPE);
+ EXPECT_EQ(current_type, InstanceType::INTERNALIZED_STRING_TYPE);
+#define TEST_INSTANCE_TYPE(type) \
+ current_type = InstanceType::type; \
+ current = static_cast<int>(current_type); \
+ if (current > static_cast<int>(LAST_NAME_TYPE)) { \
+ EXPECT_EQ(last + 1, current); \
+ } \
+ EXPECT_LT(last, current) << " INSTANCE_TYPE_LIST is not ordered: " \
+ << "last = " << static_cast<InstanceType>(last) \
+ << " vs. current = " << current_type; \
+ last = current;
+
+ INSTANCE_TYPE_LIST(TEST_INSTANCE_TYPE)
+#undef TEST_INSTANCE_TYPE
+}
+
+TEST(Object, StructListOrder) {
+ int current = static_cast<int>(InstanceType::ACCESSOR_INFO_TYPE);
+ int last = current - 1;
+ ASSERT_LT(0, last);
+ InstanceType current_type = static_cast<InstanceType>(current);
+#define TEST_STRUCT(type, class, name) \
+ current_type = InstanceType::type##_TYPE; \
+ current = static_cast<int>(current_type); \
+ EXPECT_EQ(last + 1, current) \
+ << " STRUCT_LIST is not ordered: " \
+ << " last = " << static_cast<InstanceType>(last) \
+ << " vs. current = " << current_type; \
+ last = current;
+
+ STRUCT_LIST(TEST_STRUCT)
+#undef TEST_STRUCT
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/run-all-unittests.cc b/deps/v8/test/unittests/run-all-unittests.cc
index 0b62dbff2b..58cac6afa9 100644
--- a/deps/v8/test/unittests/run-all-unittests.cc
+++ b/deps/v8/test/unittests/run-all-unittests.cc
@@ -37,6 +37,9 @@ class DefaultPlatformEnvironment final : public ::testing::Environment {
int main(int argc, char** argv) {
+ // Don't catch SEH exceptions and continue as the following tests might hang
+ // in an broken environment on windows.
+ testing::GTEST_FLAG(catch_exceptions) = false;
testing::InitGoogleMock(&argc, argv);
testing::AddGlobalTestEnvironment(new DefaultPlatformEnvironment);
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
diff --git a/deps/v8/test/unittests/unittests.gyp b/deps/v8/test/unittests/unittests.gyp
index de871b7678..e65f58dc83 100644
--- a/deps/v8/test/unittests/unittests.gyp
+++ b/deps/v8/test/unittests/unittests.gyp
@@ -8,6 +8,7 @@
'variables': {
'v8_code': 1,
'unittests_sources': [ ### gcmole(all) ###
+ 'api/access-check-unittest.cc',
'base/atomic-utils-unittest.cc',
'base/bits-unittest.cc',
'base/cpu-unittest.cc',
@@ -27,6 +28,7 @@
'cancelable-tasks-unittest.cc',
'char-predicates-unittest.cc',
'compiler/branch-elimination-unittest.cc',
+ 'compiler/bytecode-analysis-unittest.cc',
'compiler/checkpoint-elimination-unittest.cc',
'compiler/common-operator-reducer-unittest.cc',
'compiler/common-operator-unittest.cc',
@@ -56,12 +58,12 @@
'compiler/linkage-tail-call-unittest.cc',
'compiler/liveness-analyzer-unittest.cc',
'compiler/live-range-builder.h',
- 'compiler/live-range-unittest.cc',
+ 'compiler/regalloc/live-range-unittest.cc',
'compiler/load-elimination-unittest.cc',
'compiler/loop-peeling-unittest.cc',
'compiler/machine-operator-reducer-unittest.cc',
'compiler/machine-operator-unittest.cc',
- 'compiler/move-optimizer-unittest.cc',
+ 'compiler/regalloc/move-optimizer-unittest.cc',
'compiler/node-cache-unittest.cc',
'compiler/node-matchers-unittest.cc',
'compiler/node-properties-unittest.cc',
@@ -69,7 +71,7 @@
'compiler/node-test-utils.h',
'compiler/node-unittest.cc',
'compiler/opcodes-unittest.cc',
- 'compiler/register-allocator-unittest.cc',
+ 'compiler/regalloc/register-allocator-unittest.cc',
'compiler/schedule-unittest.cc',
'compiler/scheduler-unittest.cc',
'compiler/scheduler-rpo-unittest.cc',
@@ -81,17 +83,22 @@
'compiler/typer-unittest.cc',
'compiler/value-numbering-reducer-unittest.cc',
'compiler/zone-stats-unittest.cc',
+ 'compiler-dispatcher/compiler-dispatcher-helper.cc',
+ 'compiler-dispatcher/compiler-dispatcher-helper.h',
'compiler-dispatcher/compiler-dispatcher-job-unittest.cc',
'compiler-dispatcher/compiler-dispatcher-tracer-unittest.cc',
+ 'compiler-dispatcher/compiler-dispatcher-unittest.cc',
'counters-unittest.cc',
'eh-frame-iterator-unittest.cc',
'eh-frame-writer-unittest.cc',
'interpreter/bytecodes-unittest.cc',
'interpreter/bytecode-array-builder-unittest.cc',
'interpreter/bytecode-array-iterator-unittest.cc',
+ 'interpreter/bytecode-array-random-iterator-unittest.cc',
'interpreter/bytecode-array-writer-unittest.cc',
'interpreter/bytecode-dead-code-optimizer-unittest.cc',
'interpreter/bytecode-decoder-unittest.cc',
+ 'interpreter/bytecode-operands-unittest.cc',
'interpreter/bytecode-peephole-optimizer-unittest.cc',
'interpreter/bytecode-pipeline-unittest.cc',
'interpreter/bytecode-register-allocator-unittest.cc',
@@ -104,6 +111,7 @@
'libplatform/task-queue-unittest.cc',
'libplatform/worker-thread-unittest.cc',
'heap/bitmap-unittest.cc',
+ 'heap/embedder-tracing-unittest.cc',
'heap/gc-idle-time-handler-unittest.cc',
'heap/gc-tracer-unittest.cc',
'heap/marking-unittest.cc',
@@ -111,7 +119,9 @@
'heap/heap-unittest.cc',
'heap/scavenge-job-unittest.cc',
'heap/slot-set-unittest.cc',
+ 'heap/unmapper-unittest.cc',
'locked-queue-unittest.cc',
+ 'object-unittest.cc',
'register-configuration-unittest.cc',
'run-all-unittests.cc',
'source-position-table-unittest.cc',
@@ -123,9 +133,9 @@
'zone/zone-chunk-list-unittest.cc',
'zone/zone-unittest.cc',
'wasm/asm-types-unittest.cc',
- 'wasm/ast-decoder-unittest.cc',
'wasm/control-transfer-unittest.cc',
'wasm/decoder-unittest.cc',
+ 'wasm/function-body-decoder-unittest.cc',
'wasm/leb-helper-unittest.cc',
'wasm/loop-assignment-analysis-unittest.cc',
'wasm/module-decoder-unittest.cc',
diff --git a/deps/v8/test/unittests/unittests.status b/deps/v8/test/unittests/unittests.status
index 51b7de8ac1..5cb42e0ae3 100644
--- a/deps/v8/test/unittests/unittests.status
+++ b/deps/v8/test/unittests/unittests.status
@@ -3,6 +3,11 @@
# found in the LICENSE file.
[
+[ALWAYS, {
+ # BUG(5677): Real timers are flaky
+ 'RuntimeCallStatsTest.*': [SKIP],
+}], # ALWAYS
+
['arch == x87', {
'Ieee754.Expm1': [SKIP],
'Ieee754.Cos': [SKIP],
@@ -17,4 +22,8 @@
'*': [SKIP],
}], # variant == asm_wasm
+['variant == wasm_traps', {
+ '*': [SKIP],
+}], # variant == wasm_traps
+
]
diff --git a/deps/v8/test/unittests/value-serializer-unittest.cc b/deps/v8/test/unittests/value-serializer-unittest.cc
index 1dabd2a17a..7183a0c579 100644
--- a/deps/v8/test/unittests/value-serializer-unittest.cc
+++ b/deps/v8/test/unittests/value-serializer-unittest.cc
@@ -10,6 +10,7 @@
#include "include/v8.h"
#include "src/api.h"
#include "src/base/build_config.h"
+#include "src/objects-inl.h"
#include "test/unittests/test-utils.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -19,6 +20,7 @@ namespace {
using ::testing::_;
using ::testing::Invoke;
+using ::testing::Return;
class ValueSerializerTest : public TestWithIsolate {
protected:
@@ -129,15 +131,22 @@ class ValueSerializerTest : public TestWithIsolate {
encoded_data_functor(buffer);
}
- template <typename MessageFunctor>
- void InvalidEncodeTest(const char* source, const MessageFunctor& functor) {
+ template <typename InputFunctor, typename MessageFunctor>
+ void InvalidEncodeTest(const InputFunctor& input_functor,
+ const MessageFunctor& functor) {
Context::Scope scope(serialization_context());
TryCatch try_catch(isolate());
- Local<Value> input_value = EvaluateScriptForInput(source);
+ Local<Value> input_value = input_functor();
ASSERT_TRUE(DoEncode(input_value).IsNothing());
functor(try_catch.Message());
}
+ template <typename MessageFunctor>
+ void InvalidEncodeTest(const char* source, const MessageFunctor& functor) {
+ InvalidEncodeTest(
+ [this, source]() { return EvaluateScriptForInput(source); }, functor);
+ }
+
void InvalidEncodeTest(const char* source) {
InvalidEncodeTest(source, [](Local<Message>) {});
}
@@ -1735,6 +1744,38 @@ TEST_F(ValueSerializerTest, DecodeInvalidArrayBuffer) {
InvalidDecodeTest({0xff, 0x09, 0x42, 0xff, 0xff, 0x00});
}
+// An array buffer allocator that never has available memory.
+class OOMArrayBufferAllocator : public ArrayBuffer::Allocator {
+ public:
+ void* Allocate(size_t) override { return nullptr; }
+ void* AllocateUninitialized(size_t) override { return nullptr; }
+ void Free(void*, size_t) override {}
+};
+
+TEST_F(ValueSerializerTest, DecodeArrayBufferOOM) {
+ // This test uses less of the harness, because it has to customize the
+ // isolate.
+ OOMArrayBufferAllocator allocator;
+ Isolate::CreateParams params;
+ params.array_buffer_allocator = &allocator;
+ Isolate* isolate = Isolate::New(params);
+ Isolate::Scope isolate_scope(isolate);
+ HandleScope handle_scope(isolate);
+ Local<Context> context = Context::New(isolate);
+ Context::Scope context_scope(context);
+ TryCatch try_catch(isolate);
+
+ const std::vector<uint8_t> data = {0xff, 0x09, 0x3f, 0x00, 0x42,
+ 0x03, 0x00, 0x80, 0xff, 0x00};
+ ValueDeserializer deserializer(isolate, &data[0],
+ static_cast<int>(data.size()), nullptr);
+ deserializer.SetSupportsLegacyWireFormat(true);
+ ASSERT_TRUE(deserializer.ReadHeader(context).FromMaybe(false));
+ ASSERT_FALSE(try_catch.HasCaught());
+ EXPECT_TRUE(deserializer.ReadValue(context).IsEmpty());
+ EXPECT_TRUE(try_catch.HasCaught());
+}
+
// Includes an ArrayBuffer wrapper marked for transfer from the serialization
// context to the deserialization context.
class ValueSerializerTestWithArrayBufferTransfer : public ValueSerializerTest {
@@ -2042,7 +2083,8 @@ class ValueSerializerTestWithSharedArrayBufferTransfer
protected:
static const size_t kTestByteLength = 4;
- ValueSerializerTestWithSharedArrayBufferTransfer() {
+ ValueSerializerTestWithSharedArrayBufferTransfer()
+ : serializer_delegate_(this) {
const uint8_t data[kTestByteLength] = {0x00, 0x01, 0x80, 0xff};
memcpy(data_, data, kTestByteLength);
{
@@ -2060,10 +2102,6 @@ class ValueSerializerTestWithSharedArrayBufferTransfer
const Local<SharedArrayBuffer>& input_buffer() { return input_buffer_; }
const Local<SharedArrayBuffer>& output_buffer() { return output_buffer_; }
- void BeforeEncode(ValueSerializer* serializer) override {
- serializer->TransferSharedArrayBuffer(0, input_buffer_);
- }
-
void BeforeDecode(ValueDeserializer* deserializer) override {
deserializer->TransferSharedArrayBuffer(0, output_buffer_);
}
@@ -2080,6 +2118,39 @@ class ValueSerializerTestWithSharedArrayBufferTransfer
flag_was_enabled_ = false;
}
+ protected:
+// GMock doesn't use the "override" keyword.
+#if __clang__
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Winconsistent-missing-override"
+#endif
+
+ class SerializerDelegate : public ValueSerializer::Delegate {
+ public:
+ explicit SerializerDelegate(
+ ValueSerializerTestWithSharedArrayBufferTransfer* test)
+ : test_(test) {}
+ MOCK_METHOD2(GetSharedArrayBufferId,
+ Maybe<uint32_t>(Isolate* isolate,
+ Local<SharedArrayBuffer> shared_array_buffer));
+ void ThrowDataCloneError(Local<String> message) override {
+ test_->isolate()->ThrowException(Exception::Error(message));
+ }
+
+ private:
+ ValueSerializerTestWithSharedArrayBufferTransfer* test_;
+ };
+
+#if __clang__
+#pragma clang diagnostic pop
+#endif
+
+ ValueSerializer::Delegate* GetSerializerDelegate() override {
+ return &serializer_delegate_;
+ }
+
+ SerializerDelegate serializer_delegate_;
+
private:
static bool flag_was_enabled_;
uint8_t data_[kTestByteLength];
@@ -2092,6 +2163,10 @@ bool ValueSerializerTestWithSharedArrayBufferTransfer::flag_was_enabled_ =
TEST_F(ValueSerializerTestWithSharedArrayBufferTransfer,
RoundTripSharedArrayBufferTransfer) {
+ EXPECT_CALL(serializer_delegate_,
+ GetSharedArrayBufferId(isolate(), input_buffer()))
+ .WillRepeatedly(Return(Just(0U)));
+
RoundTripTest([this]() { return input_buffer(); },
[this](Local<Value> value) {
ASSERT_TRUE(value->IsSharedArrayBuffer());
@@ -2123,12 +2198,6 @@ TEST_F(ValueSerializerTestWithSharedArrayBufferTransfer,
});
}
-TEST_F(ValueSerializerTestWithSharedArrayBufferTransfer,
- SharedArrayBufferMustBeTransferred) {
- // A SharedArrayBuffer which was not marked for transfer should fail encoding.
- InvalidEncodeTest("new SharedArrayBuffer(32)");
-}
-
TEST_F(ValueSerializerTest, UnsupportedHostObject) {
InvalidEncodeTest("new ExampleHostObject()");
InvalidEncodeTest("({ a: new ExampleHostObject() })");
@@ -2402,10 +2471,10 @@ bool ValueSerializerTestWithWasm::g_saved_flag = false;
// A simple module which exports an "increment" function.
// Copied from test/mjsunit/wasm/incrementer.wasm.
const unsigned char kIncrementerWasm[] = {
- 0x00, 0x61, 0x73, 0x6d, 0x0d, 0x00, 0x00, 0x00, 0x01, 0x06, 0x01, 0x60,
- 0x01, 0x7f, 0x01, 0x7f, 0x03, 0x02, 0x01, 0x00, 0x07, 0x0d, 0x01, 0x09,
- 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x00, 0x00, 0x0a,
- 0x08, 0x01, 0x06, 0x00, 0x20, 0x00, 0x41, 0x01, 0x6a};
+ 0, 97, 115, 109, 1, 0, 0, 0, 1, 6, 1, 96, 1, 127, 1, 127,
+ 3, 2, 1, 0, 7, 13, 1, 9, 105, 110, 99, 114, 101, 109, 101, 110,
+ 116, 0, 0, 10, 9, 1, 7, 0, 32, 0, 65, 1, 106, 11,
+};
TEST_F(ValueSerializerTestWithWasm, RoundTripWasmModule) {
RoundTripTest(
@@ -2484,6 +2553,7 @@ const unsigned char kSerializedIncrementerWasm[] = {
0x2f, 0x2f};
TEST_F(ValueSerializerTestWithWasm, DecodeWasmModule) {
+ if (true) return; // TODO(mtrofin): fix this test
std::vector<uint8_t> raw(
kSerializedIncrementerWasm,
kSerializedIncrementerWasm + sizeof(kSerializedIncrementerWasm));
@@ -2504,6 +2574,7 @@ const unsigned char kSerializedIncrementerWasmWithInvalidCompiledData[] = {
0x01, 0x06, 0x00, 0x20, 0x00, 0x41, 0x01, 0x6a, 0x00};
TEST_F(ValueSerializerTestWithWasm, DecodeWasmModuleWithInvalidCompiledData) {
+ if (true) return; // TODO(titzer): regenerate this test
std::vector<uint8_t> raw(
kSerializedIncrementerWasmWithInvalidCompiledData,
kSerializedIncrementerWasmWithInvalidCompiledData +
diff --git a/deps/v8/test/unittests/wasm/OWNERS b/deps/v8/test/unittests/wasm/OWNERS
index eda8deabfd..59b1e7dc7a 100644
--- a/deps/v8/test/unittests/wasm/OWNERS
+++ b/deps/v8/test/unittests/wasm/OWNERS
@@ -1,5 +1,6 @@
ahaas@chromium.org
bradnelson@chromium.org
+clemensh@chromium.org
mtrofin@chromium.org
rossberg@chromium.org
titzer@chromium.org
diff --git a/deps/v8/test/unittests/wasm/control-transfer-unittest.cc b/deps/v8/test/unittests/wasm/control-transfer-unittest.cc
index ab2d937758..12712683c1 100644
--- a/deps/v8/test/unittests/wasm/control-transfer-unittest.cc
+++ b/deps/v8/test/unittests/wasm/control-transfer-unittest.cc
@@ -163,10 +163,10 @@ TEST_F(ControlTransferTest, SimpleIfElse_v1) {
0, // @1
kExprIf, // @2
kLocalVoid, // @3
- kExprI8Const, // @4
+ kExprI32Const, // @4
0, // @5
kExprElse, // @6
- kExprI8Const, // @7
+ kExprI32Const, // @7
0, // @8
kExprEnd // @9
};
@@ -260,39 +260,39 @@ TEST_F(ControlTransferTest, Br1) {
TEST_F(ControlTransferTest, Br_v1a) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprBr, // @4
- 0, // @5
- kExprEnd // @6
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprBr, // @4
+ 0, // @5
+ kExprEnd // @6
};
EXPECT_PC_DELTAS({4, 3});
}
TEST_F(ControlTransferTest, Br_v1b) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprBr, // @4
- 0, // @5
- kExprEnd // @6
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprBr, // @4
+ 0, // @5
+ kExprEnd // @6
};
EXPECT_PC_DELTAS({4, 3});
}
TEST_F(ControlTransferTest, Br_v1c) {
byte code[] = {
- kExprI8Const, // @0
- 0, // @1
- kExprBlock, // @2
- kLocalVoid, // @3
- kExprBr, // @4
- 0, // @5
- kExprEnd // @6
+ kExprI32Const, // @0
+ 0, // @1
+ kExprBlock, // @2
+ kLocalVoid, // @3
+ kExprBr, // @4
+ 0, // @5
+ kExprEnd // @6
};
EXPECT_PC_DELTAS({4, 3});
}
@@ -383,80 +383,80 @@ TEST_F(ControlTransferTest, SimpleLoopExit2) {
TEST_F(ControlTransferTest, BrTable0) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprBrTable, // @4
- 0, // @5
- U32V_1(0), // @6
- kExprEnd // @7
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprBrTable, // @4
+ 0, // @5
+ U32V_1(0), // @6
+ kExprEnd // @7
};
EXPECT_PC_DELTAS({4, 4});
}
TEST_F(ControlTransferTest, BrTable0_v1a) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprI8Const, // @4
- 0, // @5
- kExprBrTable, // @6
- 0, // @7
- U32V_1(0), // @8
- kExprEnd // @9
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprI32Const, // @4
+ 0, // @5
+ kExprBrTable, // @6
+ 0, // @7
+ U32V_1(0), // @8
+ kExprEnd // @9
};
EXPECT_PC_DELTAS({6, 4});
}
TEST_F(ControlTransferTest, BrTable0_v1b) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprI8Const, // @4
- 0, // @5
- kExprBrTable, // @6
- 0, // @7
- U32V_1(0), // @8
- kExprEnd // @9
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprI32Const, // @4
+ 0, // @5
+ kExprBrTable, // @6
+ 0, // @7
+ U32V_1(0), // @8
+ kExprEnd // @9
};
EXPECT_PC_DELTAS({6, 4});
}
TEST_F(ControlTransferTest, BrTable1) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprI8Const, // @2
- 0, // @3
- kExprBrTable, // @4
- 1, // @5
- U32V_1(0), // @6
- U32V_1(0), // @7
- kExprEnd // @8
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprI32Const, // @2
+ 0, // @3
+ kExprBrTable, // @4
+ 1, // @5
+ U32V_1(0), // @6
+ U32V_1(0), // @7
+ kExprEnd // @8
};
EXPECT_PC_DELTAS({4, 5}, {5, 4});
}
TEST_F(ControlTransferTest, BrTable2) {
byte code[] = {
- kExprBlock, // @0
- kLocalVoid, // @1
- kExprBlock, // @2
- kLocalVoid, // @3
- kExprI8Const, // @4
- 0, // @5
- kExprBrTable, // @6
- 2, // @7
- U32V_1(0), // @8
- U32V_1(0), // @9
- U32V_1(1), // @10
- kExprEnd, // @11
- kExprEnd // @12
+ kExprBlock, // @0
+ kLocalVoid, // @1
+ kExprBlock, // @2
+ kLocalVoid, // @3
+ kExprI32Const, // @4
+ 0, // @5
+ kExprBrTable, // @6
+ 2, // @7
+ U32V_1(0), // @8
+ U32V_1(0), // @9
+ U32V_1(1), // @10
+ kExprEnd, // @11
+ kExprEnd // @12
};
EXPECT_PC_DELTAS({6, 6}, {7, 5}, {8, 5});
}
diff --git a/deps/v8/test/unittests/wasm/ast-decoder-unittest.cc b/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc
index e630ac4721..82fdaf9866 100644
--- a/deps/v8/test/unittests/wasm/ast-decoder-unittest.cc
+++ b/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc
@@ -10,7 +10,7 @@
#include "src/objects.h"
-#include "src/wasm/ast-decoder.h"
+#include "src/wasm/function-body-decoder.h"
#include "src/wasm/signature-map.h"
#include "src/wasm/wasm-macro-gen.h"
#include "src/wasm/wasm-module.h"
@@ -32,7 +32,7 @@ static const byte kCodeGetLocal1[] = {kExprGetLocal, 1};
static const byte kCodeSetLocal0[] = {WASM_SET_LOCAL(0, WASM_ZERO)};
static const byte kCodeTeeLocal0[] = {WASM_TEE_LOCAL(0, WASM_ZERO)};
-static const LocalType kLocalTypes[] = {kAstI32, kAstI64, kAstF32, kAstF64};
+static const ValueType kValueTypes[] = {kWasmI32, kWasmI64, kWasmF32, kWasmF64};
static const MachineType machineTypes[] = {
MachineType::Int8(), MachineType::Uint8(), MachineType::Int16(),
MachineType::Uint16(), MachineType::Int32(), MachineType::Uint32(),
@@ -83,11 +83,11 @@ static const WasmOpcode kInt32BinopOpcodes[] = {
static bool old_eh_flag;
-class AstDecoderTest : public TestWithZone {
+class FunctionBodyDecoderTest : public TestWithZone {
public:
- typedef std::pair<uint32_t, LocalType> LocalsDecl;
+ typedef std::pair<uint32_t, ValueType> LocalsDecl;
- AstDecoderTest() : module(nullptr), local_decls(zone()) {}
+ FunctionBodyDecoderTest() : module(nullptr), local_decls(zone()) {}
static void SetUpTestCase() { old_eh_flag = FLAG_wasm_eh_prototype; }
@@ -100,18 +100,37 @@ class AstDecoderTest : public TestWithZone {
ModuleEnv* module;
LocalDeclEncoder local_decls;
- void AddLocals(LocalType type, uint32_t count) {
+ void AddLocals(ValueType type, uint32_t count) {
local_decls.AddLocals(count, type);
}
+ void PrepareBytecode(const byte** startp, const byte** endp) {
+ const byte* start = *startp;
+ const byte* end = *endp;
+ size_t locals_size = local_decls.Size();
+ size_t total_size = end - start + locals_size + 1;
+ byte* buffer = static_cast<byte*>(zone()->New(total_size));
+ // Prepend the local decls to the code.
+ local_decls.Emit(buffer);
+ // Emit the code.
+ memcpy(buffer + locals_size, start, end - start);
+ // Append an extra end opcode.
+ buffer[total_size - 1] = kExprEnd;
+
+ *startp = buffer;
+ *endp = buffer + total_size;
+ }
+
// Prepends local variable declarations and renders nice error messages for
// verification failures.
void Verify(ErrorCode expected, FunctionSig* sig, const byte* start,
const byte* end) {
- local_decls.Prepend(zone(), &start, &end);
+ PrepareBytecode(&start, &end);
+
// Verify the code.
- DecodeResult result =
- VerifyWasmCode(zone()->allocator(), module, sig, start, end);
+ DecodeResult result = VerifyWasmCode(
+ zone()->allocator(), module == nullptr ? nullptr : module->module, sig,
+ start, end);
if (result.error_code != expected) {
ptrdiff_t pc = result.error_pc - result.start;
@@ -139,10 +158,10 @@ class AstDecoderTest : public TestWithZone {
EXPECT_VERIFIES_SC(success, code);
// Try all combinations of return and parameter types.
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- for (size_t k = 0; k < arraysize(kLocalTypes); k++) {
- LocalType types[] = {kLocalTypes[i], kLocalTypes[j], kLocalTypes[k]};
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ for (size_t k = 0; k < arraysize(kValueTypes); k++) {
+ ValueType types[] = {kValueTypes[i], kValueTypes[j], kValueTypes[k]};
if (types[0] != success->GetReturn(0) ||
types[1] != success->GetParam(0) ||
types[2] != success->GetParam(1)) {
@@ -159,19 +178,19 @@ class AstDecoderTest : public TestWithZone {
TestUnop(opcode, success->GetReturn(), success->GetParam(0));
}
- void TestUnop(WasmOpcode opcode, LocalType ret_type, LocalType param_type) {
+ void TestUnop(WasmOpcode opcode, ValueType ret_type, ValueType param_type) {
// Return(op(local[0]))
byte code[] = {WASM_UNOP(opcode, WASM_GET_LOCAL(0))};
{
- LocalType types[] = {ret_type, param_type};
+ ValueType types[] = {ret_type, param_type};
FunctionSig sig(1, 1, types);
EXPECT_VERIFIES_SC(&sig, code);
}
// Try all combinations of return and parameter types.
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType types[] = {kLocalTypes[i], kLocalTypes[j]};
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType types[] = {kValueTypes[i], kValueTypes[j]};
if (types[0] != ret_type || types[1] != param_type) {
// Test signature mismatch.
FunctionSig sig(1, 1, types);
@@ -182,35 +201,89 @@ class AstDecoderTest : public TestWithZone {
}
};
-TEST_F(AstDecoderTest, Int8Const) {
- byte code[] = {kExprI8Const, 0};
- for (int i = -128; i < 128; i++) {
- code[1] = static_cast<byte>(i);
+namespace {
+// A helper for tests that require a module environment for functions,
+// globals, or memories.
+class TestModuleEnv : public ModuleEnv {
+ public:
+ explicit TestModuleEnv(ModuleOrigin origin = kWasmOrigin)
+ : ModuleEnv(&mod, nullptr) {
+ mod.origin = origin;
+ }
+ byte AddGlobal(ValueType type, bool mutability = true) {
+ mod.globals.push_back({type, mutability, WasmInitExpr(), 0, false, false});
+ CHECK(mod.globals.size() <= 127);
+ return static_cast<byte>(mod.globals.size() - 1);
+ }
+ byte AddSignature(FunctionSig* sig) {
+ mod.signatures.push_back(sig);
+ CHECK(mod.signatures.size() <= 127);
+ return static_cast<byte>(mod.signatures.size() - 1);
+ }
+ byte AddFunction(FunctionSig* sig) {
+ mod.functions.push_back({sig, // sig
+ 0, // func_index
+ 0, // sig_index
+ 0, // name_offset
+ 0, // name_length
+ 0, // code_start_offset
+ 0, // code_end_offset
+ false, // import
+ false}); // export
+ CHECK(mod.functions.size() <= 127);
+ return static_cast<byte>(mod.functions.size() - 1);
+ }
+ byte AddImport(FunctionSig* sig) {
+ byte result = AddFunction(sig);
+ mod.functions[result].imported = true;
+ return result;
+ }
+
+ void InitializeMemory() {
+ mod.has_memory = true;
+ mod.min_mem_pages = 1;
+ mod.max_mem_pages = 100;
+ }
+
+ void InitializeFunctionTable() {
+ mod.function_tables.push_back(
+ {0, 0, true, std::vector<int32_t>(), false, false, SignatureMap()});
+ }
+
+ private:
+ WasmModule mod;
+};
+} // namespace
+
+TEST_F(FunctionBodyDecoderTest, Int32Const1) {
+ byte code[] = {kExprI32Const, 0};
+ for (int i = -64; i <= 63; i++) {
+ code[1] = static_cast<byte>(i & 0x7F);
EXPECT_VERIFIES_C(i_i, code);
}
}
-TEST_F(AstDecoderTest, EmptyFunction) {
+TEST_F(FunctionBodyDecoderTest, EmptyFunction) {
byte code[] = {0};
Verify(kSuccess, sigs.v_v(), code, code);
Verify(kError, sigs.i_i(), code, code);
}
-TEST_F(AstDecoderTest, IncompleteIf1) {
+TEST_F(FunctionBodyDecoderTest, IncompleteIf1) {
byte code[] = {kExprIf};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, Int8Const_fallthru) {
+TEST_F(FunctionBodyDecoderTest, Int32Const_fallthru) {
EXPECT_VERIFIES(i_i, WASM_I32V_1(0));
}
-TEST_F(AstDecoderTest, Int8Const_fallthru2) {
+TEST_F(FunctionBodyDecoderTest, Int32Const_fallthru2) {
EXPECT_FAILURE(i_i, WASM_I32V_1(0), WASM_I32V_1(1));
}
-TEST_F(AstDecoderTest, Int32Const) {
+TEST_F(FunctionBodyDecoderTest, Int32Const) {
const int kInc = 4498211;
for (int32_t i = kMinInt; i < kMaxInt - kInc; i = i + kInc) {
// TODO(binji): expand test for other sized int32s; 1 through 5 bytes.
@@ -219,7 +292,7 @@ TEST_F(AstDecoderTest, Int32Const) {
}
}
-TEST_F(AstDecoderTest, Int64Const) {
+TEST_F(FunctionBodyDecoderTest, Int64Const) {
const int kInc = 4498211;
for (int32_t i = kMinInt; i < kMaxInt - kInc; i = i + kInc) {
byte code[] = {WASM_I64V((static_cast<int64_t>(i) << 32) | i)};
@@ -227,7 +300,7 @@ TEST_F(AstDecoderTest, Int64Const) {
}
}
-TEST_F(AstDecoderTest, Float32Const) {
+TEST_F(FunctionBodyDecoderTest, Float32Const) {
byte code[] = {kExprF32Const, 0, 0, 0, 0};
float* ptr = reinterpret_cast<float*>(code + 1);
for (int i = 0; i < 30; i++) {
@@ -236,7 +309,7 @@ TEST_F(AstDecoderTest, Float32Const) {
}
}
-TEST_F(AstDecoderTest, Float64Const) {
+TEST_F(FunctionBodyDecoderTest, Float64Const) {
byte code[] = {kExprF64Const, 0, 0, 0, 0, 0, 0, 0, 0};
double* ptr = reinterpret_cast<double*>(code + 1);
for (int i = 0; i < 30; i++) {
@@ -245,7 +318,7 @@ TEST_F(AstDecoderTest, Float64Const) {
}
}
-TEST_F(AstDecoderTest, Int32Const_off_end) {
+TEST_F(FunctionBodyDecoderTest, Int32Const_off_end) {
byte code[] = {kExprI32Const, 0xaa, 0xbb, 0xcc, 0x44};
for (int size = 1; size <= 4; size++) {
@@ -253,21 +326,21 @@ TEST_F(AstDecoderTest, Int32Const_off_end) {
}
}
-TEST_F(AstDecoderTest, GetLocal0_param) {
+TEST_F(FunctionBodyDecoderTest, GetLocal0_param) {
EXPECT_VERIFIES_C(i_i, kCodeGetLocal0);
}
-TEST_F(AstDecoderTest, GetLocal0_local) {
- AddLocals(kAstI32, 1);
+TEST_F(FunctionBodyDecoderTest, GetLocal0_local) {
+ AddLocals(kWasmI32, 1);
EXPECT_VERIFIES_C(i_v, kCodeGetLocal0);
}
-TEST_F(AstDecoderTest, TooManyLocals) {
- AddLocals(kAstI32, 4034986500);
+TEST_F(FunctionBodyDecoderTest, TooManyLocals) {
+ AddLocals(kWasmI32, 4034986500);
EXPECT_FAILURE_C(i_v, kCodeGetLocal0);
}
-TEST_F(AstDecoderTest, GetLocal0_param_n) {
+TEST_F(FunctionBodyDecoderTest, GetLocal0_param_n) {
FunctionSig* array[] = {sigs.i_i(), sigs.i_ii(), sigs.i_iii()};
for (size_t i = 0; i < arraysize(array); i++) {
@@ -275,9 +348,9 @@ TEST_F(AstDecoderTest, GetLocal0_param_n) {
}
}
-TEST_F(AstDecoderTest, GetLocalN_local) {
+TEST_F(FunctionBodyDecoderTest, GetLocalN_local) {
for (byte i = 1; i < 8; i++) {
- AddLocals(kAstI32, 1);
+ AddLocals(kWasmI32, 1);
for (byte j = 0; j < i; j++) {
byte code[] = {kExprGetLocal, j};
EXPECT_VERIFIES_C(i_v, code);
@@ -285,42 +358,42 @@ TEST_F(AstDecoderTest, GetLocalN_local) {
}
}
-TEST_F(AstDecoderTest, GetLocal0_fail_no_params) {
+TEST_F(FunctionBodyDecoderTest, GetLocal0_fail_no_params) {
EXPECT_FAILURE_C(i_v, kCodeGetLocal0);
}
-TEST_F(AstDecoderTest, GetLocal1_fail_no_locals) {
+TEST_F(FunctionBodyDecoderTest, GetLocal1_fail_no_locals) {
EXPECT_FAILURE_C(i_i, kCodeGetLocal1);
}
-TEST_F(AstDecoderTest, GetLocal_off_end) {
+TEST_F(FunctionBodyDecoderTest, GetLocal_off_end) {
static const byte code[] = {kExprGetLocal};
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, NumLocalBelowLimit) {
- AddLocals(kAstI32, kMaxNumWasmLocals - 1);
+TEST_F(FunctionBodyDecoderTest, NumLocalBelowLimit) {
+ AddLocals(kWasmI32, kMaxNumWasmLocals - 1);
EXPECT_VERIFIES(v_v, WASM_NOP);
}
-TEST_F(AstDecoderTest, NumLocalAtLimit) {
- AddLocals(kAstI32, kMaxNumWasmLocals);
+TEST_F(FunctionBodyDecoderTest, NumLocalAtLimit) {
+ AddLocals(kWasmI32, kMaxNumWasmLocals);
EXPECT_VERIFIES(v_v, WASM_NOP);
}
-TEST_F(AstDecoderTest, NumLocalAboveLimit) {
- AddLocals(kAstI32, kMaxNumWasmLocals + 1);
+TEST_F(FunctionBodyDecoderTest, NumLocalAboveLimit) {
+ AddLocals(kWasmI32, kMaxNumWasmLocals + 1);
EXPECT_FAILURE(v_v, WASM_NOP);
}
-TEST_F(AstDecoderTest, GetLocal_varint) {
- const int kMaxLocals = kMaxNumWasmLocals;
- AddLocals(kAstI32, kMaxLocals);
+TEST_F(FunctionBodyDecoderTest, GetLocal_varint) {
+ const int kMaxLocals = kMaxNumWasmLocals - 1;
+ AddLocals(kWasmI32, kMaxLocals);
EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_1(66));
EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_2(7777));
- EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_3(888888));
- EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_4(3999999));
+ EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_3(8888));
+ EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_4(9999));
EXPECT_VERIFIES(i_i, kExprGetLocal, U32V_5(kMaxLocals - 1));
@@ -334,7 +407,15 @@ TEST_F(AstDecoderTest, GetLocal_varint) {
EXPECT_FAILURE(i_v, kExprGetLocal, U32V_4(kMaxLocals + 1));
}
-TEST_F(AstDecoderTest, Binops_off_end) {
+TEST_F(FunctionBodyDecoderTest, GetLocal_toomany) {
+ AddLocals(kWasmI32, kMaxNumWasmLocals - 100);
+ AddLocals(kWasmI32, 100);
+
+ EXPECT_VERIFIES(i_v, kExprGetLocal, U32V_1(66));
+ EXPECT_FAILURE(i_i, kExprGetLocal, U32V_1(66));
+}
+
+TEST_F(FunctionBodyDecoderTest, Binops_off_end) {
byte code1[] = {0}; // [opcode]
for (size_t i = 0; i < arraysize(kInt32BinopOpcodes); i++) {
code1[0] = kInt32BinopOpcodes[i];
@@ -355,70 +436,70 @@ TEST_F(AstDecoderTest, Binops_off_end) {
}
}
-TEST_F(AstDecoderTest, BinopsAcrossBlock1) {
+TEST_F(FunctionBodyDecoderTest, BinopsAcrossBlock1) {
static const byte code[] = {WASM_ZERO, kExprBlock, WASM_ZERO, kExprI32Add,
kExprEnd};
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, BinopsAcrossBlock2) {
+TEST_F(FunctionBodyDecoderTest, BinopsAcrossBlock2) {
static const byte code[] = {WASM_ZERO, WASM_ZERO, kExprBlock, kExprI32Add,
kExprEnd};
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, BinopsAcrossBlock3) {
+TEST_F(FunctionBodyDecoderTest, BinopsAcrossBlock3) {
static const byte code[] = {WASM_ZERO, WASM_ZERO, kExprIf, kExprI32Add,
kExprElse, kExprI32Add, kExprEnd};
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, Nop) {
+TEST_F(FunctionBodyDecoderTest, Nop) {
static const byte code[] = {kExprNop};
EXPECT_VERIFIES_C(v_v, code);
}
-TEST_F(AstDecoderTest, SetLocal0_void) {
+TEST_F(FunctionBodyDecoderTest, SetLocal0_void) {
EXPECT_FAILURE(i_i, WASM_SET_LOCAL(0, WASM_ZERO));
}
-TEST_F(AstDecoderTest, SetLocal0_param) {
+TEST_F(FunctionBodyDecoderTest, SetLocal0_param) {
EXPECT_FAILURE_C(i_i, kCodeSetLocal0);
EXPECT_FAILURE_C(f_ff, kCodeSetLocal0);
EXPECT_FAILURE_C(d_dd, kCodeSetLocal0);
}
-TEST_F(AstDecoderTest, TeeLocal0_param) {
+TEST_F(FunctionBodyDecoderTest, TeeLocal0_param) {
EXPECT_VERIFIES_C(i_i, kCodeTeeLocal0);
EXPECT_FAILURE_C(f_ff, kCodeTeeLocal0);
EXPECT_FAILURE_C(d_dd, kCodeTeeLocal0);
}
-TEST_F(AstDecoderTest, SetLocal0_local) {
+TEST_F(FunctionBodyDecoderTest, SetLocal0_local) {
EXPECT_FAILURE_C(i_v, kCodeSetLocal0);
EXPECT_FAILURE_C(v_v, kCodeSetLocal0);
- AddLocals(kAstI32, 1);
+ AddLocals(kWasmI32, 1);
EXPECT_FAILURE_C(i_v, kCodeSetLocal0);
EXPECT_VERIFIES_C(v_v, kCodeSetLocal0);
}
-TEST_F(AstDecoderTest, TeeLocal0_local) {
+TEST_F(FunctionBodyDecoderTest, TeeLocal0_local) {
EXPECT_FAILURE_C(i_v, kCodeTeeLocal0);
- AddLocals(kAstI32, 1);
+ AddLocals(kWasmI32, 1);
EXPECT_VERIFIES_C(i_v, kCodeTeeLocal0);
}
-TEST_F(AstDecoderTest, TeeLocalN_local) {
+TEST_F(FunctionBodyDecoderTest, TeeLocalN_local) {
for (byte i = 1; i < 8; i++) {
- AddLocals(kAstI32, 1);
+ AddLocals(kWasmI32, 1);
for (byte j = 0; j < i; j++) {
- EXPECT_FAILURE(v_v, WASM_TEE_LOCAL(j, WASM_I8(i)));
- EXPECT_VERIFIES(i_i, WASM_TEE_LOCAL(j, WASM_I8(i)));
+ EXPECT_FAILURE(v_v, WASM_TEE_LOCAL(j, WASM_I32V_1(i)));
+ EXPECT_VERIFIES(i_i, WASM_TEE_LOCAL(j, WASM_I32V_1(i)));
}
}
}
-TEST_F(AstDecoderTest, BlockN) {
+TEST_F(FunctionBodyDecoderTest, BlockN) {
const int kMaxSize = 200;
byte buffer[kMaxSize + 3];
@@ -433,33 +514,29 @@ TEST_F(AstDecoderTest, BlockN) {
#define WASM_EMPTY_BLOCK kExprBlock, kLocalVoid, kExprEnd
-TEST_F(AstDecoderTest, Block0) {
+TEST_F(FunctionBodyDecoderTest, Block0) {
static const byte code[] = {WASM_EMPTY_BLOCK};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, Block0_fallthru1) {
+TEST_F(FunctionBodyDecoderTest, Block0_fallthru1) {
static const byte code[] = {WASM_BLOCK(WASM_EMPTY_BLOCK)};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, Block0Block0) {
+TEST_F(FunctionBodyDecoderTest, Block0Block0) {
static const byte code[] = {WASM_EMPTY_BLOCK, WASM_EMPTY_BLOCK};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, Block0_end) {
- EXPECT_VERIFIES(v_v, WASM_EMPTY_BLOCK, kExprEnd);
-}
-
-TEST_F(AstDecoderTest, Block0_end_end) {
- EXPECT_FAILURE(v_v, WASM_EMPTY_BLOCK, kExprEnd, kExprEnd);
+TEST_F(FunctionBodyDecoderTest, Block0_end) {
+ EXPECT_FAILURE(v_v, WASM_EMPTY_BLOCK, kExprEnd);
}
-TEST_F(AstDecoderTest, Block1) {
+TEST_F(FunctionBodyDecoderTest, Block1) {
byte code[] = {WASM_BLOCK_I(WASM_GET_LOCAL(0))};
EXPECT_VERIFIES_C(i_i, code);
EXPECT_FAILURE_C(v_i, code);
@@ -468,7 +545,7 @@ TEST_F(AstDecoderTest, Block1) {
EXPECT_FAILURE_C(i_d, code);
}
-TEST_F(AstDecoderTest, Block1_i) {
+TEST_F(FunctionBodyDecoderTest, Block1_i) {
byte code[] = {WASM_BLOCK_I(WASM_ZERO)};
EXPECT_VERIFIES_C(i_i, code);
EXPECT_FAILURE_C(f_ff, code);
@@ -476,7 +553,7 @@ TEST_F(AstDecoderTest, Block1_i) {
EXPECT_FAILURE_C(l_ll, code);
}
-TEST_F(AstDecoderTest, Block1_f) {
+TEST_F(FunctionBodyDecoderTest, Block1_f) {
byte code[] = {WASM_BLOCK_F(WASM_F32(0))};
EXPECT_FAILURE_C(i_i, code);
EXPECT_VERIFIES_C(f_ff, code);
@@ -484,23 +561,23 @@ TEST_F(AstDecoderTest, Block1_f) {
EXPECT_FAILURE_C(l_ll, code);
}
-TEST_F(AstDecoderTest, Block1_continue) {
+TEST_F(FunctionBodyDecoderTest, Block1_continue) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(0)));
}
-TEST_F(AstDecoderTest, Block1_br) {
+TEST_F(FunctionBodyDecoderTest, Block1_br) {
EXPECT_VERIFIES(v_v, B1(WASM_BR(0)));
EXPECT_VERIFIES(v_v, B1(WASM_BR(1)));
EXPECT_FAILURE(v_v, B1(WASM_BR(2)));
}
-TEST_F(AstDecoderTest, Block2_br) {
+TEST_F(FunctionBodyDecoderTest, Block2_br) {
EXPECT_VERIFIES(v_v, B2(WASM_NOP, WASM_BR(0)));
EXPECT_VERIFIES(v_v, B2(WASM_BR(0), WASM_NOP));
EXPECT_VERIFIES(v_v, B2(WASM_BR(0), WASM_BR(0)));
}
-TEST_F(AstDecoderTest, Block2) {
+TEST_F(FunctionBodyDecoderTest, Block2) {
EXPECT_FAILURE(i_i, WASM_BLOCK(WASM_NOP, WASM_NOP));
EXPECT_FAILURE(i_i, WASM_BLOCK_I(WASM_NOP, WASM_NOP));
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_NOP, WASM_ZERO));
@@ -508,25 +585,26 @@ TEST_F(AstDecoderTest, Block2) {
EXPECT_FAILURE(i_i, WASM_BLOCK_I(WASM_ZERO, WASM_ZERO));
}
-TEST_F(AstDecoderTest, Block2b) {
+TEST_F(FunctionBodyDecoderTest, Block2b) {
byte code[] = {WASM_BLOCK_I(WASM_SET_LOCAL(0, WASM_ZERO), WASM_ZERO)};
EXPECT_VERIFIES_C(i_i, code);
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(f_ff, code);
}
-TEST_F(AstDecoderTest, Block2_fallthru) {
+TEST_F(FunctionBodyDecoderTest, Block2_fallthru) {
EXPECT_VERIFIES(
i_i, B2(WASM_SET_LOCAL(0, WASM_ZERO), WASM_SET_LOCAL(0, WASM_ZERO)),
- WASM_I8(23));
+ WASM_I32V_1(23));
}
-TEST_F(AstDecoderTest, Block3) {
- EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_SET_LOCAL(0, WASM_ZERO),
- WASM_SET_LOCAL(0, WASM_ZERO), WASM_I8(11)));
+TEST_F(FunctionBodyDecoderTest, Block3) {
+ EXPECT_VERIFIES(i_i,
+ WASM_BLOCK_I(WASM_SET_LOCAL(0, WASM_ZERO),
+ WASM_SET_LOCAL(0, WASM_ZERO), WASM_I32V_1(11)));
}
-TEST_F(AstDecoderTest, Block5) {
+TEST_F(FunctionBodyDecoderTest, Block5) {
EXPECT_FAILURE(v_i, WASM_BLOCK(WASM_ZERO));
EXPECT_FAILURE(v_i, WASM_BLOCK(WASM_ZERO, WASM_ZERO));
@@ -539,14 +617,14 @@ TEST_F(AstDecoderTest, Block5) {
v_i, WASM_BLOCK(WASM_ZERO, WASM_ZERO, WASM_ZERO, WASM_ZERO, WASM_ZERO));
}
-TEST_F(AstDecoderTest, BlockType) {
+TEST_F(FunctionBodyDecoderTest, BlockType) {
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(l_l, WASM_BLOCK_L(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(f_f, WASM_BLOCK_F(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(d_d, WASM_BLOCK_D(WASM_GET_LOCAL(0)));
}
-TEST_F(AstDecoderTest, BlockType_fail) {
+TEST_F(FunctionBodyDecoderTest, BlockType_fail) {
EXPECT_FAILURE(i_i, WASM_BLOCK_L(WASM_I64V_1(0)));
EXPECT_FAILURE(i_i, WASM_BLOCK_F(WASM_F32(0.0)));
EXPECT_FAILURE(i_i, WASM_BLOCK_D(WASM_F64(1.1)));
@@ -564,14 +642,14 @@ TEST_F(AstDecoderTest, BlockType_fail) {
EXPECT_FAILURE(d_dd, WASM_BLOCK_F(WASM_F32(0.0)));
}
-TEST_F(AstDecoderTest, BlockF32) {
+TEST_F(FunctionBodyDecoderTest, BlockF32) {
static const byte code[] = {WASM_BLOCK_F(kExprF32Const, 0, 0, 0, 0)};
EXPECT_VERIFIES_C(f_ff, code);
EXPECT_FAILURE_C(i_i, code);
EXPECT_FAILURE_C(d_dd, code);
}
-TEST_F(AstDecoderTest, BlockN_off_end) {
+TEST_F(FunctionBodyDecoderTest, BlockN_off_end) {
byte code[] = {WASM_BLOCK(kExprNop, kExprNop, kExprNop, kExprNop)};
EXPECT_VERIFIES_C(v_v, code);
for (size_t i = 1; i < arraysize(code); i++) {
@@ -579,102 +657,102 @@ TEST_F(AstDecoderTest, BlockN_off_end) {
}
}
-TEST_F(AstDecoderTest, Block2_continue) {
+TEST_F(FunctionBodyDecoderTest, Block2_continue) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_NOP, WASM_BR(0)));
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_NOP, WASM_BR(1)));
EXPECT_FAILURE(v_v, WASM_LOOP(WASM_NOP, WASM_BR(2)));
}
-TEST_F(AstDecoderTest, Block3_continue) {
+TEST_F(FunctionBodyDecoderTest, Block3_continue) {
EXPECT_VERIFIES(v_v, B1(WASM_LOOP(WASM_NOP, WASM_BR(0))));
EXPECT_VERIFIES(v_v, B1(WASM_LOOP(WASM_NOP, WASM_BR(1))));
EXPECT_VERIFIES(v_v, B1(WASM_LOOP(WASM_NOP, WASM_BR(2))));
EXPECT_FAILURE(v_v, B1(WASM_LOOP(WASM_NOP, WASM_BR(3))));
}
-TEST_F(AstDecoderTest, NestedBlock_return) {
- EXPECT_VERIFIES(i_i, B1(B1(WASM_RETURN1(WASM_ZERO))));
+TEST_F(FunctionBodyDecoderTest, NestedBlock_return) {
+ EXPECT_VERIFIES(i_i, B1(B1(WASM_RETURN1(WASM_ZERO))), WASM_ZERO);
}
-TEST_F(AstDecoderTest, BlockBrBinop) {
- EXPECT_VERIFIES(
- i_i, WASM_I32_AND(WASM_BLOCK_I(WASM_BRV(0, WASM_I8(1))), WASM_I8(2)));
+TEST_F(FunctionBodyDecoderTest, BlockBrBinop) {
+ EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_BLOCK_I(WASM_BRV(0, WASM_I32V_1(1))),
+ WASM_I32V_1(2)));
}
-TEST_F(AstDecoderTest, If_empty1) {
+TEST_F(FunctionBodyDecoderTest, If_empty1) {
EXPECT_VERIFIES(v_v, WASM_ZERO, WASM_IF_OP, kExprEnd);
}
-TEST_F(AstDecoderTest, If_empty2) {
+TEST_F(FunctionBodyDecoderTest, If_empty2) {
EXPECT_VERIFIES(v_v, WASM_ZERO, WASM_IF_OP, kExprElse, kExprEnd);
}
-TEST_F(AstDecoderTest, If_empty3) {
+TEST_F(FunctionBodyDecoderTest, If_empty3) {
EXPECT_VERIFIES(v_v, WASM_ZERO, WASM_IF_OP, WASM_NOP, kExprElse, kExprEnd);
EXPECT_FAILURE(v_v, WASM_ZERO, WASM_IF_OP, WASM_ZERO, kExprElse, kExprEnd);
}
-TEST_F(AstDecoderTest, If_empty4) {
+TEST_F(FunctionBodyDecoderTest, If_empty4) {
EXPECT_VERIFIES(v_v, WASM_ZERO, WASM_IF_OP, kExprElse, WASM_NOP, kExprEnd);
EXPECT_FAILURE(v_v, WASM_ZERO, WASM_IF_OP, kExprElse, WASM_ZERO, kExprEnd);
}
-TEST_F(AstDecoderTest, If_empty_stack) {
+TEST_F(FunctionBodyDecoderTest, If_empty_stack) {
byte code[] = {kExprIf};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, If_incomplete1) {
- byte code[] = {kExprI8Const, 0, kExprIf};
+TEST_F(FunctionBodyDecoderTest, If_incomplete1) {
+ byte code[] = {kExprI32Const, 0, kExprIf};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, If_incomplete2) {
- byte code[] = {kExprI8Const, 0, kExprIf, kExprNop};
+TEST_F(FunctionBodyDecoderTest, If_incomplete2) {
+ byte code[] = {kExprI32Const, 0, kExprIf, kExprNop};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, If_else_else) {
- byte code[] = {kExprI8Const, 0, WASM_IF_OP, kExprElse, kExprElse, kExprEnd};
+TEST_F(FunctionBodyDecoderTest, If_else_else) {
+ byte code[] = {kExprI32Const, 0, WASM_IF_OP, kExprElse, kExprElse, kExprEnd};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, IfEmpty) {
+TEST_F(FunctionBodyDecoderTest, IfEmpty) {
EXPECT_VERIFIES(v_i, kExprGetLocal, 0, WASM_IF_OP, kExprEnd);
}
-TEST_F(AstDecoderTest, IfSet) {
+TEST_F(FunctionBodyDecoderTest, IfSet) {
EXPECT_VERIFIES(v_i,
WASM_IF(WASM_GET_LOCAL(0), WASM_SET_LOCAL(0, WASM_ZERO)));
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0),
WASM_SET_LOCAL(0, WASM_ZERO), WASM_NOP));
}
-TEST_F(AstDecoderTest, IfElseEmpty) {
+TEST_F(FunctionBodyDecoderTest, IfElseEmpty) {
EXPECT_VERIFIES(v_i, WASM_GET_LOCAL(0), WASM_IF_OP, kExprElse, kExprEnd);
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_NOP));
}
-TEST_F(AstDecoderTest, IfElseUnreachable1) {
+TEST_F(FunctionBodyDecoderTest, IfElseUnreachable1) {
EXPECT_VERIFIES(i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_UNREACHABLE,
WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0),
WASM_UNREACHABLE));
}
-TEST_F(AstDecoderTest, IfElseUnreachable2) {
+TEST_F(FunctionBodyDecoderTest, IfElseUnreachable2) {
static const byte code[] = {
WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_UNREACHABLE, WASM_GET_LOCAL(0))};
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType types[] = {kAstI32, kLocalTypes[i]};
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType types[] = {kWasmI32, kValueTypes[i]};
FunctionSig sig(1, 1, types);
- if (kLocalTypes[i] == kAstI32) {
+ if (kValueTypes[i] == kWasmI32) {
EXPECT_VERIFIES_SC(&sig, code);
} else {
EXPECT_FAILURE_SC(&sig, code);
@@ -682,148 +760,136 @@ TEST_F(AstDecoderTest, IfElseUnreachable2) {
}
}
-TEST_F(AstDecoderTest, IfBreak) {
+TEST_F(FunctionBodyDecoderTest, IfBreak) {
EXPECT_VERIFIES(v_i, WASM_IF(WASM_GET_LOCAL(0), WASM_BR(0)));
EXPECT_VERIFIES(v_i, WASM_IF(WASM_GET_LOCAL(0), WASM_BR(1)));
EXPECT_FAILURE(v_i, WASM_IF(WASM_GET_LOCAL(0), WASM_BR(2)));
}
-TEST_F(AstDecoderTest, IfElseBreak) {
+TEST_F(FunctionBodyDecoderTest, IfElseBreak) {
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_BR(0)));
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_BR(1)));
EXPECT_FAILURE(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_BR(2)));
}
-TEST_F(AstDecoderTest, Block_else) {
- byte code[] = {kExprI8Const, 0, kExprBlock, kExprElse, kExprEnd};
+TEST_F(FunctionBodyDecoderTest, Block_else) {
+ byte code[] = {kExprI32Const, 0, kExprBlock, kExprElse, kExprEnd};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, IfNop) {
+TEST_F(FunctionBodyDecoderTest, IfNop) {
EXPECT_VERIFIES(v_i, WASM_IF(WASM_GET_LOCAL(0), WASM_NOP));
-}
-
-TEST_F(AstDecoderTest, IfNopElseNop) {
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_NOP));
}
-TEST_F(AstDecoderTest, If_end_end) {
- static const byte code[] = {kExprGetLocal, 0, WASM_IF_OP, kExprEnd, kExprEnd};
- EXPECT_VERIFIES_C(v_i, code);
-}
-
-TEST_F(AstDecoderTest, If_end_end_end) {
- static const byte code[] = {kExprGetLocal, 0, WASM_IF_OP,
- kExprEnd, kExprEnd, kExprEnd};
- EXPECT_FAILURE_C(v_i, code);
-}
-
-TEST_F(AstDecoderTest, If_falloff1) {
- static const byte code[] = {kExprGetLocal, 0, kExprIf};
- EXPECT_FAILURE_C(v_i, code);
-}
-
-TEST_F(AstDecoderTest, If_falloff2) {
- static const byte code[] = {kExprGetLocal, 0, WASM_IF_OP};
- EXPECT_FAILURE_C(v_i, code);
+TEST_F(FunctionBodyDecoderTest, If_end) {
+ EXPECT_VERIFIES(v_i, kExprGetLocal, 0, WASM_IF_OP, kExprEnd);
+ EXPECT_FAILURE(v_i, kExprGetLocal, 0, WASM_IF_OP, kExprEnd, kExprEnd);
}
-TEST_F(AstDecoderTest, IfElse_falloff) {
- static const byte code[] = {kExprGetLocal, 0, WASM_IF_OP, kExprNop,
- kExprElse};
- EXPECT_FAILURE_C(v_i, code);
+TEST_F(FunctionBodyDecoderTest, If_falloff1) {
+ EXPECT_FAILURE(v_i, kExprGetLocal, 0, kExprIf);
+ EXPECT_FAILURE(v_i, kExprGetLocal, 0, WASM_IF_OP);
+ EXPECT_FAILURE(v_i, kExprGetLocal, 0, WASM_IF_OP, kExprNop, kExprElse);
}
-TEST_F(AstDecoderTest, IfElseNop) {
+TEST_F(FunctionBodyDecoderTest, IfElseNop) {
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0),
WASM_SET_LOCAL(0, WASM_ZERO), WASM_NOP));
}
-TEST_F(AstDecoderTest, IfBlock1) {
+TEST_F(FunctionBodyDecoderTest, IfBlock1) {
EXPECT_VERIFIES(
v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), B1(WASM_SET_LOCAL(0, WASM_ZERO)),
WASM_NOP));
}
-TEST_F(AstDecoderTest, IfBlock1b) {
+TEST_F(FunctionBodyDecoderTest, IfBlock1b) {
EXPECT_VERIFIES(v_i,
WASM_IF(WASM_GET_LOCAL(0), B1(WASM_SET_LOCAL(0, WASM_ZERO))));
}
-TEST_F(AstDecoderTest, IfBlock2a) {
+TEST_F(FunctionBodyDecoderTest, IfBlock2a) {
EXPECT_VERIFIES(v_i,
WASM_IF(WASM_GET_LOCAL(0), B2(WASM_SET_LOCAL(0, WASM_ZERO),
WASM_SET_LOCAL(0, WASM_ZERO))));
}
-TEST_F(AstDecoderTest, IfBlock2b) {
+TEST_F(FunctionBodyDecoderTest, IfBlock2b) {
EXPECT_VERIFIES(
v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), B2(WASM_SET_LOCAL(0, WASM_ZERO),
WASM_SET_LOCAL(0, WASM_ZERO)),
WASM_NOP));
}
-TEST_F(AstDecoderTest, IfElseSet) {
+TEST_F(FunctionBodyDecoderTest, IfElseSet) {
EXPECT_VERIFIES(v_i,
WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_SET_LOCAL(0, WASM_ZERO),
- WASM_SET_LOCAL(0, WASM_I8(1))));
+ WASM_SET_LOCAL(0, WASM_I32V_1(1))));
}
-TEST_F(AstDecoderTest, Loop0) { EXPECT_VERIFIES(v_v, WASM_LOOP_OP, kExprEnd); }
+TEST_F(FunctionBodyDecoderTest, Loop0) {
+ EXPECT_VERIFIES(v_v, WASM_LOOP_OP, kExprEnd);
+}
-TEST_F(AstDecoderTest, Loop1) {
+TEST_F(FunctionBodyDecoderTest, Loop1) {
static const byte code[] = {WASM_LOOP(WASM_SET_LOCAL(0, WASM_ZERO))};
EXPECT_VERIFIES_C(v_i, code);
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(f_ff, code);
}
-TEST_F(AstDecoderTest, Loop2) {
+TEST_F(FunctionBodyDecoderTest, Loop2) {
EXPECT_VERIFIES(v_i, WASM_LOOP(WASM_SET_LOCAL(0, WASM_ZERO),
WASM_SET_LOCAL(0, WASM_ZERO)));
}
-TEST_F(AstDecoderTest, Loop1_continue) {
+TEST_F(FunctionBodyDecoderTest, Loop1_continue) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(0)));
}
-TEST_F(AstDecoderTest, Loop1_break) {
+TEST_F(FunctionBodyDecoderTest, Loop1_break) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(1)));
}
-TEST_F(AstDecoderTest, Loop2_continue) {
+TEST_F(FunctionBodyDecoderTest, Loop2_continue) {
EXPECT_VERIFIES(v_i, WASM_LOOP(WASM_SET_LOCAL(0, WASM_ZERO), WASM_BR(0)));
}
-TEST_F(AstDecoderTest, Loop2_break) {
+TEST_F(FunctionBodyDecoderTest, Loop2_break) {
EXPECT_VERIFIES(v_i, WASM_LOOP(WASM_SET_LOCAL(0, WASM_ZERO), WASM_BR(1)));
}
-TEST_F(AstDecoderTest, InfiniteLoop) {
- EXPECT_VERIFIES(i_i, WASM_LOOP(WASM_BR(0)));
- EXPECT_VERIFIES(i_i, WASM_LOOP(WASM_BRV(1, WASM_ZERO)));
+TEST_F(FunctionBodyDecoderTest, InfiniteLoop1) {
+ EXPECT_VERIFIES(i_i, WASM_LOOP(WASM_BR(0)), WASM_ZERO);
+ EXPECT_VERIFIES(i_i, WASM_LOOP(WASM_BR(0)), WASM_ZERO);
+ EXPECT_VERIFIES(i_i, WASM_LOOP_I(WASM_BRV(1, WASM_ZERO)));
+}
+
+TEST_F(FunctionBodyDecoderTest, InfiniteLoop2) {
+ EXPECT_FAILURE(i_i, WASM_LOOP(WASM_BR(0), WASM_ZERO), WASM_ZERO);
}
-TEST_F(AstDecoderTest, Loop2_unreachable) {
- EXPECT_VERIFIES(i_i, WASM_LOOP(WASM_BR(0), WASM_NOP));
+TEST_F(FunctionBodyDecoderTest, Loop2_unreachable) {
+ EXPECT_VERIFIES(i_i, WASM_LOOP_I(WASM_BR(0), WASM_NOP));
}
-TEST_F(AstDecoderTest, LoopType) {
+TEST_F(FunctionBodyDecoderTest, LoopType) {
EXPECT_VERIFIES(i_i, WASM_LOOP_I(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(l_l, WASM_LOOP_L(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(f_f, WASM_LOOP_F(WASM_GET_LOCAL(0)));
EXPECT_VERIFIES(d_d, WASM_LOOP_D(WASM_GET_LOCAL(0)));
}
-TEST_F(AstDecoderTest, LoopType_void) {
+TEST_F(FunctionBodyDecoderTest, LoopType_void) {
EXPECT_FAILURE(v_v, WASM_LOOP_I(WASM_ZERO));
EXPECT_FAILURE(v_v, WASM_LOOP_L(WASM_I64V_1(0)));
EXPECT_FAILURE(v_v, WASM_LOOP_F(WASM_F32(0.0)));
EXPECT_FAILURE(v_v, WASM_LOOP_D(WASM_F64(1.1)));
}
-TEST_F(AstDecoderTest, LoopType_fail) {
+TEST_F(FunctionBodyDecoderTest, LoopType_fail) {
EXPECT_FAILURE(i_i, WASM_LOOP_L(WASM_I64V_1(0)));
EXPECT_FAILURE(i_i, WASM_LOOP_F(WASM_F32(0.0)));
EXPECT_FAILURE(i_i, WASM_LOOP_D(WASM_F64(1.1)));
@@ -841,22 +907,21 @@ TEST_F(AstDecoderTest, LoopType_fail) {
EXPECT_FAILURE(d_dd, WASM_LOOP_F(WASM_F32(0.0)));
}
-TEST_F(AstDecoderTest, ReturnVoid1) {
+TEST_F(FunctionBodyDecoderTest, ReturnVoid1) {
static const byte code[] = {kExprNop};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
EXPECT_FAILURE_C(i_f, code);
}
-TEST_F(AstDecoderTest, ReturnVoid2) {
+TEST_F(FunctionBodyDecoderTest, ReturnVoid2) {
static const byte code[] = {WASM_BLOCK(WASM_BR(0))};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
EXPECT_FAILURE_C(i_f, code);
}
-TEST_F(AstDecoderTest, ReturnVoid3) {
- EXPECT_FAILURE(v_v, kExprI8Const, 0);
+TEST_F(FunctionBodyDecoderTest, ReturnVoid3) {
EXPECT_FAILURE(v_v, kExprI32Const, 0);
EXPECT_FAILURE(v_v, kExprI64Const, 0);
EXPECT_FAILURE(v_v, kExprF32Const, 0, 0, 0, 0);
@@ -865,36 +930,56 @@ TEST_F(AstDecoderTest, ReturnVoid3) {
EXPECT_FAILURE(v_i, kExprGetLocal, 0);
}
-TEST_F(AstDecoderTest, Unreachable1) {
- EXPECT_VERIFIES(v_v, kExprUnreachable);
- EXPECT_VERIFIES(v_v, kExprUnreachable, kExprUnreachable);
- EXPECT_VERIFIES(v_v, B2(WASM_UNREACHABLE, WASM_ZERO));
- EXPECT_VERIFIES(v_v, B2(WASM_BR(0), WASM_ZERO));
- EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_UNREACHABLE, WASM_ZERO));
- EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(0), WASM_ZERO));
+TEST_F(FunctionBodyDecoderTest, Unreachable1) {
+ EXPECT_VERIFIES(v_v, WASM_UNREACHABLE);
+ EXPECT_VERIFIES(v_v, WASM_UNREACHABLE, WASM_UNREACHABLE);
+ EXPECT_VERIFIES(i_i, WASM_UNREACHABLE, WASM_ZERO);
+}
+
+TEST_F(FunctionBodyDecoderTest, Unreachable2) {
+ EXPECT_FAILURE(v_v, B2(WASM_UNREACHABLE, WASM_ZERO));
+ EXPECT_FAILURE(v_v, B2(WASM_BR(0), WASM_ZERO));
+}
+
+TEST_F(FunctionBodyDecoderTest, UnreachableLoop1) {
+ EXPECT_FAILURE(v_v, WASM_LOOP(WASM_UNREACHABLE, WASM_ZERO));
+ EXPECT_FAILURE(v_v, WASM_LOOP(WASM_BR(0), WASM_ZERO));
+ EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_UNREACHABLE, WASM_NOP));
+ EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(0), WASM_NOP));
}
-TEST_F(AstDecoderTest, Unreachable_binop) {
+TEST_F(FunctionBodyDecoderTest, Unreachable_binop1) {
EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_ZERO, WASM_UNREACHABLE));
EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_UNREACHABLE, WASM_ZERO));
}
-TEST_F(AstDecoderTest, Unreachable_select) {
+TEST_F(FunctionBodyDecoderTest, Unreachable_binop2) {
+ EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_F32(0.0), WASM_UNREACHABLE));
+ EXPECT_FAILURE(i_i, WASM_I32_AND(WASM_UNREACHABLE, WASM_F32(0.0)));
+}
+
+TEST_F(FunctionBodyDecoderTest, Unreachable_select1) {
EXPECT_VERIFIES(i_i, WASM_SELECT(WASM_UNREACHABLE, WASM_ZERO, WASM_ZERO));
EXPECT_VERIFIES(i_i, WASM_SELECT(WASM_ZERO, WASM_UNREACHABLE, WASM_ZERO));
EXPECT_VERIFIES(i_i, WASM_SELECT(WASM_ZERO, WASM_ZERO, WASM_UNREACHABLE));
}
-TEST_F(AstDecoderTest, If1) {
- EXPECT_VERIFIES(i_i,
- WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I8(9), WASM_I8(8)));
- EXPECT_VERIFIES(
- i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I8(9), WASM_GET_LOCAL(0)));
+TEST_F(FunctionBodyDecoderTest, Unreachable_select2) {
+ EXPECT_VERIFIES(i_i, WASM_SELECT(WASM_F32(0.0), WASM_UNREACHABLE, WASM_ZERO));
+ EXPECT_FAILURE(i_i, WASM_SELECT(WASM_UNREACHABLE, WASM_F32(0.0), WASM_ZERO));
+ EXPECT_FAILURE(i_i, WASM_SELECT(WASM_UNREACHABLE, WASM_ZERO, WASM_F32(0.0)));
+}
+
+TEST_F(FunctionBodyDecoderTest, If1) {
EXPECT_VERIFIES(
- i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0), WASM_I8(8)));
+ i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I32V_1(9), WASM_I32V_1(8)));
+ EXPECT_VERIFIES(i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I32V_1(9),
+ WASM_GET_LOCAL(0)));
+ EXPECT_VERIFIES(i_i, WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0),
+ WASM_I32V_1(8)));
}
-TEST_F(AstDecoderTest, If_off_end) {
+TEST_F(FunctionBodyDecoderTest, If_off_end) {
static const byte kCode[] = {
WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0), WASM_GET_LOCAL(0))};
for (size_t len = 3; len < arraysize(kCode); len++) {
@@ -902,64 +987,65 @@ TEST_F(AstDecoderTest, If_off_end) {
}
}
-TEST_F(AstDecoderTest, If_type1) {
+TEST_F(FunctionBodyDecoderTest, If_type1) {
// float|double ? 1 : 2
static const byte kCode[] = {
- WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I8(0), WASM_I8(2))};
+ WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_I32V_1(0), WASM_I32V_1(2))};
EXPECT_VERIFIES_C(i_i, kCode);
EXPECT_FAILURE_C(i_f, kCode);
EXPECT_FAILURE_C(i_d, kCode);
}
-TEST_F(AstDecoderTest, If_type2) {
+TEST_F(FunctionBodyDecoderTest, If_type2) {
// 1 ? float|double : 2
static const byte kCode[] = {
- WASM_IF_ELSE_I(WASM_I8(1), WASM_GET_LOCAL(0), WASM_I8(1))};
+ WASM_IF_ELSE_I(WASM_I32V_1(1), WASM_GET_LOCAL(0), WASM_I32V_1(1))};
EXPECT_VERIFIES_C(i_i, kCode);
EXPECT_FAILURE_C(i_f, kCode);
EXPECT_FAILURE_C(i_d, kCode);
}
-TEST_F(AstDecoderTest, If_type3) {
+TEST_F(FunctionBodyDecoderTest, If_type3) {
// stmt ? 0 : 1
static const byte kCode[] = {
- WASM_IF_ELSE_I(WASM_NOP, WASM_I8(0), WASM_I8(1))};
+ WASM_IF_ELSE_I(WASM_NOP, WASM_I32V_1(0), WASM_I32V_1(1))};
EXPECT_FAILURE_C(i_i, kCode);
EXPECT_FAILURE_C(i_f, kCode);
EXPECT_FAILURE_C(i_d, kCode);
}
-TEST_F(AstDecoderTest, If_type4) {
+TEST_F(FunctionBodyDecoderTest, If_type4) {
// 0 ? stmt : 1
static const byte kCode[] = {
- WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_NOP, WASM_I8(1))};
+ WASM_IF_ELSE_I(WASM_GET_LOCAL(0), WASM_NOP, WASM_I32V_1(1))};
EXPECT_FAILURE_C(i_i, kCode);
EXPECT_FAILURE_C(i_f, kCode);
EXPECT_FAILURE_C(i_d, kCode);
}
-TEST_F(AstDecoderTest, If_type5) {
+TEST_F(FunctionBodyDecoderTest, If_type5) {
// 0 ? 1 : stmt
- static const byte kCode[] = {WASM_IF_ELSE_I(WASM_ZERO, WASM_I8(1), WASM_NOP)};
+ static const byte kCode[] = {
+ WASM_IF_ELSE_I(WASM_ZERO, WASM_I32V_1(1), WASM_NOP)};
EXPECT_FAILURE_C(i_i, kCode);
EXPECT_FAILURE_C(i_f, kCode);
EXPECT_FAILURE_C(i_d, kCode);
}
-TEST_F(AstDecoderTest, Int64Local_param) {
+TEST_F(FunctionBodyDecoderTest, Int64Local_param) {
EXPECT_VERIFIES_C(l_l, kCodeGetLocal0);
}
-TEST_F(AstDecoderTest, Int64Locals) {
+TEST_F(FunctionBodyDecoderTest, Int64Locals) {
for (byte i = 1; i < 8; i++) {
- AddLocals(kAstI64, 1);
+ AddLocals(kWasmI64, 1);
for (byte j = 0; j < i; j++) {
EXPECT_VERIFIES(l_v, WASM_GET_LOCAL(j));
}
}
}
-TEST_F(AstDecoderTest, Int32Binops) {
+TEST_F(FunctionBodyDecoderTest, Int32Binops) {
TestBinop(kExprI32Add, sigs.i_ii());
TestBinop(kExprI32Sub, sigs.i_ii());
TestBinop(kExprI32Mul, sigs.i_ii());
@@ -980,7 +1066,7 @@ TEST_F(AstDecoderTest, Int32Binops) {
TestBinop(kExprI32LeU, sigs.i_ii());
}
-TEST_F(AstDecoderTest, DoubleBinops) {
+TEST_F(FunctionBodyDecoderTest, DoubleBinops) {
TestBinop(kExprF64Add, sigs.d_dd());
TestBinop(kExprF64Sub, sigs.d_dd());
TestBinop(kExprF64Mul, sigs.d_dd());
@@ -991,7 +1077,7 @@ TEST_F(AstDecoderTest, DoubleBinops) {
TestBinop(kExprF64Le, sigs.i_dd());
}
-TEST_F(AstDecoderTest, FloatBinops) {
+TEST_F(FunctionBodyDecoderTest, FloatBinops) {
TestBinop(kExprF32Add, sigs.f_ff());
TestBinop(kExprF32Sub, sigs.f_ff());
TestBinop(kExprF32Mul, sigs.f_ff());
@@ -1002,23 +1088,26 @@ TEST_F(AstDecoderTest, FloatBinops) {
TestBinop(kExprF32Le, sigs.i_ff());
}
-TEST_F(AstDecoderTest, TypeConversions) {
- TestUnop(kExprI32SConvertF32, kAstI32, kAstF32);
- TestUnop(kExprI32SConvertF64, kAstI32, kAstF64);
- TestUnop(kExprI32UConvertF32, kAstI32, kAstF32);
- TestUnop(kExprI32UConvertF64, kAstI32, kAstF64);
- TestUnop(kExprF64SConvertI32, kAstF64, kAstI32);
- TestUnop(kExprF64UConvertI32, kAstF64, kAstI32);
- TestUnop(kExprF64ConvertF32, kAstF64, kAstF32);
- TestUnop(kExprF32SConvertI32, kAstF32, kAstI32);
- TestUnop(kExprF32UConvertI32, kAstF32, kAstI32);
- TestUnop(kExprF32ConvertF64, kAstF32, kAstF64);
+TEST_F(FunctionBodyDecoderTest, TypeConversions) {
+ TestUnop(kExprI32SConvertF32, kWasmI32, kWasmF32);
+ TestUnop(kExprI32SConvertF64, kWasmI32, kWasmF64);
+ TestUnop(kExprI32UConvertF32, kWasmI32, kWasmF32);
+ TestUnop(kExprI32UConvertF64, kWasmI32, kWasmF64);
+ TestUnop(kExprF64SConvertI32, kWasmF64, kWasmI32);
+ TestUnop(kExprF64UConvertI32, kWasmF64, kWasmI32);
+ TestUnop(kExprF64ConvertF32, kWasmF64, kWasmF32);
+ TestUnop(kExprF32SConvertI32, kWasmF32, kWasmI32);
+ TestUnop(kExprF32UConvertI32, kWasmF32, kWasmI32);
+ TestUnop(kExprF32ConvertF64, kWasmF32, kWasmF64);
}
-TEST_F(AstDecoderTest, MacrosStmt) {
+TEST_F(FunctionBodyDecoderTest, MacrosStmt) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
EXPECT_VERIFIES(v_i, WASM_SET_LOCAL(0, WASM_I32V_3(87348)));
- EXPECT_VERIFIES(
- v_i, WASM_STORE_MEM(MachineType::Int32(), WASM_I8(24), WASM_I8(40)));
+ EXPECT_VERIFIES(v_i, WASM_STORE_MEM(MachineType::Int32(), WASM_I32V_1(24),
+ WASM_I32V_1(40)));
EXPECT_VERIFIES(v_i, WASM_IF(WASM_GET_LOCAL(0), WASM_NOP));
EXPECT_VERIFIES(v_i, WASM_IF_ELSE(WASM_GET_LOCAL(0), WASM_NOP, WASM_NOP));
EXPECT_VERIFIES(v_v, WASM_NOP);
@@ -1027,18 +1116,18 @@ TEST_F(AstDecoderTest, MacrosStmt) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_BR(0)));
}
-TEST_F(AstDecoderTest, MacrosContinue) {
+TEST_F(FunctionBodyDecoderTest, MacrosContinue) {
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_CONTINUE(0)));
}
-TEST_F(AstDecoderTest, MacrosVariadic) {
+TEST_F(FunctionBodyDecoderTest, MacrosVariadic) {
EXPECT_VERIFIES(v_v, B2(WASM_NOP, WASM_NOP));
EXPECT_VERIFIES(v_v, B3(WASM_NOP, WASM_NOP, WASM_NOP));
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_NOP, WASM_NOP));
EXPECT_VERIFIES(v_v, WASM_LOOP(WASM_NOP, WASM_NOP, WASM_NOP));
}
-TEST_F(AstDecoderTest, MacrosNestedBlocks) {
+TEST_F(FunctionBodyDecoderTest, MacrosNestedBlocks) {
EXPECT_VERIFIES(v_v, B2(WASM_NOP, B2(WASM_NOP, WASM_NOP)));
EXPECT_VERIFIES(v_v, B3(WASM_NOP, // --
B2(WASM_NOP, WASM_NOP), // --
@@ -1046,61 +1135,63 @@ TEST_F(AstDecoderTest, MacrosNestedBlocks) {
EXPECT_VERIFIES(v_v, B1(B1(B2(WASM_NOP, WASM_NOP))));
}
-TEST_F(AstDecoderTest, MultipleReturn) {
- static LocalType kIntTypes5[] = {kAstI32, kAstI32, kAstI32, kAstI32, kAstI32};
+TEST_F(FunctionBodyDecoderTest, MultipleReturn) {
+ static ValueType kIntTypes5[] = {kWasmI32, kWasmI32, kWasmI32, kWasmI32,
+ kWasmI32};
FunctionSig sig_ii_v(2, 0, kIntTypes5);
EXPECT_VERIFIES_S(&sig_ii_v, WASM_RETURNN(2, WASM_ZERO, WASM_ONE));
EXPECT_FAILURE_S(&sig_ii_v, WASM_RETURNN(1, WASM_ZERO));
FunctionSig sig_iii_v(3, 0, kIntTypes5);
EXPECT_VERIFIES_S(&sig_iii_v,
- WASM_RETURNN(3, WASM_ZERO, WASM_ONE, WASM_I8(44)));
+ WASM_RETURNN(3, WASM_ZERO, WASM_ONE, WASM_I32V_1(44)));
EXPECT_FAILURE_S(&sig_iii_v, WASM_RETURNN(2, WASM_ZERO, WASM_ONE));
}
-TEST_F(AstDecoderTest, MultipleReturn_fallthru) {
- static LocalType kIntTypes5[] = {kAstI32, kAstI32, kAstI32, kAstI32, kAstI32};
+TEST_F(FunctionBodyDecoderTest, MultipleReturn_fallthru) {
+ static ValueType kIntTypes5[] = {kWasmI32, kWasmI32, kWasmI32, kWasmI32,
+ kWasmI32};
FunctionSig sig_ii_v(2, 0, kIntTypes5);
EXPECT_VERIFIES_S(&sig_ii_v, WASM_ZERO, WASM_ONE);
EXPECT_FAILURE_S(&sig_ii_v, WASM_ZERO);
FunctionSig sig_iii_v(3, 0, kIntTypes5);
- EXPECT_VERIFIES_S(&sig_iii_v, WASM_ZERO, WASM_ONE, WASM_I8(44));
+ EXPECT_VERIFIES_S(&sig_iii_v, WASM_ZERO, WASM_ONE, WASM_I32V_1(44));
EXPECT_FAILURE_S(&sig_iii_v, WASM_ZERO, WASM_ONE);
}
-TEST_F(AstDecoderTest, MacrosInt32) {
- EXPECT_VERIFIES(i_i, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_I8(12)));
- EXPECT_VERIFIES(i_i, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_I8(13)));
- EXPECT_VERIFIES(i_i, WASM_I32_MUL(WASM_GET_LOCAL(0), WASM_I8(14)));
- EXPECT_VERIFIES(i_i, WASM_I32_DIVS(WASM_GET_LOCAL(0), WASM_I8(15)));
- EXPECT_VERIFIES(i_i, WASM_I32_DIVU(WASM_GET_LOCAL(0), WASM_I8(16)));
- EXPECT_VERIFIES(i_i, WASM_I32_REMS(WASM_GET_LOCAL(0), WASM_I8(17)));
- EXPECT_VERIFIES(i_i, WASM_I32_REMU(WASM_GET_LOCAL(0), WASM_I8(18)));
- EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_GET_LOCAL(0), WASM_I8(19)));
- EXPECT_VERIFIES(i_i, WASM_I32_IOR(WASM_GET_LOCAL(0), WASM_I8(20)));
- EXPECT_VERIFIES(i_i, WASM_I32_XOR(WASM_GET_LOCAL(0), WASM_I8(21)));
- EXPECT_VERIFIES(i_i, WASM_I32_SHL(WASM_GET_LOCAL(0), WASM_I8(22)));
- EXPECT_VERIFIES(i_i, WASM_I32_SHR(WASM_GET_LOCAL(0), WASM_I8(23)));
- EXPECT_VERIFIES(i_i, WASM_I32_SAR(WASM_GET_LOCAL(0), WASM_I8(24)));
- EXPECT_VERIFIES(i_i, WASM_I32_ROR(WASM_GET_LOCAL(0), WASM_I8(24)));
- EXPECT_VERIFIES(i_i, WASM_I32_ROL(WASM_GET_LOCAL(0), WASM_I8(24)));
- EXPECT_VERIFIES(i_i, WASM_I32_EQ(WASM_GET_LOCAL(0), WASM_I8(25)));
- EXPECT_VERIFIES(i_i, WASM_I32_NE(WASM_GET_LOCAL(0), WASM_I8(25)));
-
- EXPECT_VERIFIES(i_i, WASM_I32_LTS(WASM_GET_LOCAL(0), WASM_I8(26)));
- EXPECT_VERIFIES(i_i, WASM_I32_LES(WASM_GET_LOCAL(0), WASM_I8(27)));
- EXPECT_VERIFIES(i_i, WASM_I32_LTU(WASM_GET_LOCAL(0), WASM_I8(28)));
- EXPECT_VERIFIES(i_i, WASM_I32_LEU(WASM_GET_LOCAL(0), WASM_I8(29)));
-
- EXPECT_VERIFIES(i_i, WASM_I32_GTS(WASM_GET_LOCAL(0), WASM_I8(26)));
- EXPECT_VERIFIES(i_i, WASM_I32_GES(WASM_GET_LOCAL(0), WASM_I8(27)));
- EXPECT_VERIFIES(i_i, WASM_I32_GTU(WASM_GET_LOCAL(0), WASM_I8(28)));
- EXPECT_VERIFIES(i_i, WASM_I32_GEU(WASM_GET_LOCAL(0), WASM_I8(29)));
-}
-
-TEST_F(AstDecoderTest, MacrosInt64) {
+TEST_F(FunctionBodyDecoderTest, MacrosInt32) {
+ EXPECT_VERIFIES(i_i, WASM_I32_ADD(WASM_GET_LOCAL(0), WASM_I32V_1(12)));
+ EXPECT_VERIFIES(i_i, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_I32V_1(13)));
+ EXPECT_VERIFIES(i_i, WASM_I32_MUL(WASM_GET_LOCAL(0), WASM_I32V_1(14)));
+ EXPECT_VERIFIES(i_i, WASM_I32_DIVS(WASM_GET_LOCAL(0), WASM_I32V_1(15)));
+ EXPECT_VERIFIES(i_i, WASM_I32_DIVU(WASM_GET_LOCAL(0), WASM_I32V_1(16)));
+ EXPECT_VERIFIES(i_i, WASM_I32_REMS(WASM_GET_LOCAL(0), WASM_I32V_1(17)));
+ EXPECT_VERIFIES(i_i, WASM_I32_REMU(WASM_GET_LOCAL(0), WASM_I32V_1(18)));
+ EXPECT_VERIFIES(i_i, WASM_I32_AND(WASM_GET_LOCAL(0), WASM_I32V_1(19)));
+ EXPECT_VERIFIES(i_i, WASM_I32_IOR(WASM_GET_LOCAL(0), WASM_I32V_1(20)));
+ EXPECT_VERIFIES(i_i, WASM_I32_XOR(WASM_GET_LOCAL(0), WASM_I32V_1(21)));
+ EXPECT_VERIFIES(i_i, WASM_I32_SHL(WASM_GET_LOCAL(0), WASM_I32V_1(22)));
+ EXPECT_VERIFIES(i_i, WASM_I32_SHR(WASM_GET_LOCAL(0), WASM_I32V_1(23)));
+ EXPECT_VERIFIES(i_i, WASM_I32_SAR(WASM_GET_LOCAL(0), WASM_I32V_1(24)));
+ EXPECT_VERIFIES(i_i, WASM_I32_ROR(WASM_GET_LOCAL(0), WASM_I32V_1(24)));
+ EXPECT_VERIFIES(i_i, WASM_I32_ROL(WASM_GET_LOCAL(0), WASM_I32V_1(24)));
+ EXPECT_VERIFIES(i_i, WASM_I32_EQ(WASM_GET_LOCAL(0), WASM_I32V_1(25)));
+ EXPECT_VERIFIES(i_i, WASM_I32_NE(WASM_GET_LOCAL(0), WASM_I32V_1(25)));
+
+ EXPECT_VERIFIES(i_i, WASM_I32_LTS(WASM_GET_LOCAL(0), WASM_I32V_1(26)));
+ EXPECT_VERIFIES(i_i, WASM_I32_LES(WASM_GET_LOCAL(0), WASM_I32V_1(27)));
+ EXPECT_VERIFIES(i_i, WASM_I32_LTU(WASM_GET_LOCAL(0), WASM_I32V_1(28)));
+ EXPECT_VERIFIES(i_i, WASM_I32_LEU(WASM_GET_LOCAL(0), WASM_I32V_1(29)));
+
+ EXPECT_VERIFIES(i_i, WASM_I32_GTS(WASM_GET_LOCAL(0), WASM_I32V_1(26)));
+ EXPECT_VERIFIES(i_i, WASM_I32_GES(WASM_GET_LOCAL(0), WASM_I32V_1(27)));
+ EXPECT_VERIFIES(i_i, WASM_I32_GTU(WASM_GET_LOCAL(0), WASM_I32V_1(28)));
+ EXPECT_VERIFIES(i_i, WASM_I32_GEU(WASM_GET_LOCAL(0), WASM_I32V_1(29)));
+}
+
+TEST_F(FunctionBodyDecoderTest, MacrosInt64) {
EXPECT_VERIFIES(l_ll, WASM_I64_ADD(WASM_GET_LOCAL(0), WASM_I64V_1(12)));
EXPECT_VERIFIES(l_ll, WASM_I64_SUB(WASM_GET_LOCAL(0), WASM_I64V_1(13)));
EXPECT_VERIFIES(l_ll, WASM_I64_MUL(WASM_GET_LOCAL(0), WASM_I64V_1(14)));
@@ -1132,7 +1223,7 @@ TEST_F(AstDecoderTest, MacrosInt64) {
EXPECT_VERIFIES(i_ll, WASM_I64_NE(WASM_GET_LOCAL(0), WASM_I64V_1(25)));
}
-TEST_F(AstDecoderTest, AllSimpleExpressions) {
+TEST_F(FunctionBodyDecoderTest, AllSimpleExpressions) {
// Test all simple expressions which are described by a signature.
#define DECODE_TEST(name, opcode, sig) \
{ \
@@ -1149,21 +1240,30 @@ TEST_F(AstDecoderTest, AllSimpleExpressions) {
#undef DECODE_TEST
}
-TEST_F(AstDecoderTest, MemorySize) {
+TEST_F(FunctionBodyDecoderTest, MemorySize) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
byte code[] = {kExprMemorySize, 0};
EXPECT_VERIFIES_C(i_i, code);
EXPECT_FAILURE_C(f_ff, code);
}
-TEST_F(AstDecoderTest, LoadMemOffset) {
+TEST_F(FunctionBodyDecoderTest, LoadMemOffset) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
for (int offset = 0; offset < 128; offset += 7) {
- byte code[] = {kExprI8Const, 0, kExprI32LoadMem, ZERO_ALIGNMENT,
+ byte code[] = {kExprI32Const, 0, kExprI32LoadMem, ZERO_ALIGNMENT,
static_cast<byte>(offset)};
EXPECT_VERIFIES_C(i_i, code);
}
}
-TEST_F(AstDecoderTest, LoadMemAlignment) {
+TEST_F(FunctionBodyDecoderTest, LoadMemAlignment) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
struct {
WasmOpcode instruction;
uint32_t maximum_aligment;
@@ -1197,7 +1297,10 @@ TEST_F(AstDecoderTest, LoadMemAlignment) {
}
}
-TEST_F(AstDecoderTest, StoreMemOffset) {
+TEST_F(FunctionBodyDecoderTest, StoreMemOffset) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
for (int offset = 0; offset < 128; offset += 7) {
byte code[] = {WASM_STORE_MEM_OFFSET(MachineType::Int32(), offset,
WASM_ZERO, WASM_ZERO)};
@@ -1205,7 +1308,10 @@ TEST_F(AstDecoderTest, StoreMemOffset) {
}
}
-TEST_F(AstDecoderTest, StoreMemOffset_void) {
+TEST_F(FunctionBodyDecoderTest, StoreMemOffset_void) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
EXPECT_FAILURE(i_i, WASM_STORE_MEM_OFFSET(MachineType::Int32(), 0, WASM_ZERO,
WASM_ZERO));
}
@@ -1220,7 +1326,10 @@ TEST_F(AstDecoderTest, StoreMemOffset_void) {
#define VARINT3(x) BYTE0(x) | 0x80, BYTE1(x) | 0x80, BYTE2(x)
#define VARINT4(x) BYTE0(x) | 0x80, BYTE1(x) | 0x80, BYTE2(x) | 0x80, BYTE3(x)
-TEST_F(AstDecoderTest, LoadMemOffset_varint) {
+TEST_F(FunctionBodyDecoderTest, LoadMemOffset_varint) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
EXPECT_VERIFIES(i_i, WASM_ZERO, kExprI32LoadMem, ZERO_ALIGNMENT,
VARINT1(0x45));
EXPECT_VERIFIES(i_i, WASM_ZERO, kExprI32LoadMem, ZERO_ALIGNMENT,
@@ -1231,7 +1340,10 @@ TEST_F(AstDecoderTest, LoadMemOffset_varint) {
VARINT4(0x36666667));
}
-TEST_F(AstDecoderTest, StoreMemOffset_varint) {
+TEST_F(FunctionBodyDecoderTest, StoreMemOffset_varint) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
EXPECT_VERIFIES(v_i, WASM_ZERO, WASM_ZERO, kExprI32StoreMem, ZERO_ALIGNMENT,
VARINT1(0x33));
EXPECT_VERIFIES(v_i, WASM_ZERO, WASM_ZERO, kExprI32StoreMem, ZERO_ALIGNMENT,
@@ -1242,14 +1354,17 @@ TEST_F(AstDecoderTest, StoreMemOffset_varint) {
VARINT4(0x44444444));
}
-TEST_F(AstDecoderTest, AllLoadMemCombinations) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType local_type = kLocalTypes[i];
+TEST_F(FunctionBodyDecoderTest, AllLoadMemCombinations) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType local_type = kValueTypes[i];
for (size_t j = 0; j < arraysize(machineTypes); j++) {
MachineType mem_type = machineTypes[j];
byte code[] = {WASM_LOAD_MEM(mem_type, WASM_ZERO)};
FunctionSig sig(1, 0, &local_type);
- if (local_type == WasmOpcodes::LocalTypeFor(mem_type)) {
+ if (local_type == WasmOpcodes::ValueTypeFor(mem_type)) {
EXPECT_VERIFIES_SC(&sig, code);
} else {
EXPECT_FAILURE_SC(&sig, code);
@@ -1258,14 +1373,17 @@ TEST_F(AstDecoderTest, AllLoadMemCombinations) {
}
}
-TEST_F(AstDecoderTest, AllStoreMemCombinations) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType local_type = kLocalTypes[i];
+TEST_F(FunctionBodyDecoderTest, AllStoreMemCombinations) {
+ TestModuleEnv module_env;
+ module = &module_env;
+ module_env.InitializeMemory();
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType local_type = kValueTypes[i];
for (size_t j = 0; j < arraysize(machineTypes); j++) {
MachineType mem_type = machineTypes[j];
byte code[] = {WASM_STORE_MEM(mem_type, WASM_ZERO, WASM_GET_LOCAL(0))};
FunctionSig sig(0, 1, &local_type);
- if (local_type == WasmOpcodes::LocalTypeFor(mem_type)) {
+ if (local_type == WasmOpcodes::ValueTypeFor(mem_type)) {
EXPECT_VERIFIES_SC(&sig, code);
} else {
EXPECT_FAILURE_SC(&sig, code);
@@ -1274,55 +1392,7 @@ TEST_F(AstDecoderTest, AllStoreMemCombinations) {
}
}
-namespace {
-// A helper for tests that require a module environment for functions and
-// globals.
-class TestModuleEnv : public ModuleEnv {
- public:
- TestModuleEnv() {
- instance = nullptr;
- module = &mod;
- }
- byte AddGlobal(LocalType type, bool mutability = true) {
- mod.globals.push_back({type, mutability, WasmInitExpr(), 0, false, false});
- CHECK(mod.globals.size() <= 127);
- return static_cast<byte>(mod.globals.size() - 1);
- }
- byte AddSignature(FunctionSig* sig) {
- mod.signatures.push_back(sig);
- CHECK(mod.signatures.size() <= 127);
- return static_cast<byte>(mod.signatures.size() - 1);
- }
- byte AddFunction(FunctionSig* sig) {
- mod.functions.push_back({sig, // sig
- 0, // func_index
- 0, // sig_index
- 0, // name_offset
- 0, // name_length
- 0, // code_start_offset
- 0, // code_end_offset
- false, // import
- false}); // export
- CHECK(mod.functions.size() <= 127);
- return static_cast<byte>(mod.functions.size() - 1);
- }
- byte AddImport(FunctionSig* sig) {
- byte result = AddFunction(sig);
- mod.functions[result].imported = true;
- return result;
- }
-
- void InitializeFunctionTable() {
- mod.function_tables.push_back(
- {0, 0, true, std::vector<int32_t>(), false, false, SignatureMap()});
- }
-
- private:
- WasmModule mod;
-};
-} // namespace
-
-TEST_F(AstDecoderTest, SimpleCalls) {
+TEST_F(FunctionBodyDecoderTest, SimpleCalls) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1332,11 +1402,12 @@ TEST_F(AstDecoderTest, SimpleCalls) {
module_env.AddFunction(sigs.i_ii());
EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION0(0));
- EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(1, WASM_I8(27)));
- EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(2, WASM_I8(37), WASM_I8(77)));
+ EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(1, WASM_I32V_1(27)));
+ EXPECT_VERIFIES_S(sig,
+ WASM_CALL_FUNCTION(2, WASM_I32V_1(37), WASM_I32V_2(77)));
}
-TEST_F(AstDecoderTest, CallsWithTooFewArguments) {
+TEST_F(FunctionBodyDecoderTest, CallsWithTooFewArguments) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1350,7 +1421,7 @@ TEST_F(AstDecoderTest, CallsWithTooFewArguments) {
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(2, WASM_GET_LOCAL(0)));
}
-TEST_F(AstDecoderTest, CallsWithMismatchedSigs2) {
+TEST_F(FunctionBodyDecoderTest, CallsWithMismatchedSigs2) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1362,27 +1433,27 @@ TEST_F(AstDecoderTest, CallsWithMismatchedSigs2) {
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(0, WASM_F64(17.1)));
}
-TEST_F(AstDecoderTest, CallsWithMismatchedSigs3) {
+TEST_F(FunctionBodyDecoderTest, CallsWithMismatchedSigs3) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
module_env.AddFunction(sigs.i_f());
- EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(0, WASM_I8(17)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(0, WASM_I32V_1(17)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(0, WASM_I64V_1(27)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(0, WASM_F64(37.2)));
module_env.AddFunction(sigs.i_d());
- EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(1, WASM_I8(16)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(1, WASM_I32V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(1, WASM_I64V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(1, WASM_F32(17.6)));
}
-TEST_F(AstDecoderTest, MultiReturn) {
+TEST_F(FunctionBodyDecoderTest, MultiReturn) {
FLAG_wasm_mv_prototype = true;
- LocalType storage[] = {kAstI32, kAstI32};
+ ValueType storage[] = {kWasmI32, kWasmI32};
FunctionSig sig_ii_v(2, 0, storage);
FunctionSig sig_v_ii(0, 2, storage);
TestModuleEnv module_env;
@@ -1396,15 +1467,15 @@ TEST_F(AstDecoderTest, MultiReturn) {
EXPECT_VERIFIES(v_v, WASM_CALL_FUNCTION0(1), kExprCallFunction, 0);
}
-TEST_F(AstDecoderTest, MultiReturnType) {
+TEST_F(FunctionBodyDecoderTest, MultiReturnType) {
FLAG_wasm_mv_prototype = true;
- for (size_t a = 0; a < arraysize(kLocalTypes); a++) {
- for (size_t b = 0; b < arraysize(kLocalTypes); b++) {
- for (size_t c = 0; c < arraysize(kLocalTypes); c++) {
- for (size_t d = 0; d < arraysize(kLocalTypes); d++) {
- LocalType storage_ab[] = {kLocalTypes[a], kLocalTypes[b]};
+ for (size_t a = 0; a < arraysize(kValueTypes); a++) {
+ for (size_t b = 0; b < arraysize(kValueTypes); b++) {
+ for (size_t c = 0; c < arraysize(kValueTypes); c++) {
+ for (size_t d = 0; d < arraysize(kValueTypes); d++) {
+ ValueType storage_ab[] = {kValueTypes[a], kValueTypes[b]};
FunctionSig sig_ab_v(2, 0, storage_ab);
- LocalType storage_cd[] = {kLocalTypes[c], kLocalTypes[d]};
+ ValueType storage_cd[] = {kValueTypes[c], kValueTypes[d]};
FunctionSig sig_cd_v(2, 0, storage_cd);
TestModuleEnv module_env;
@@ -1424,7 +1495,7 @@ TEST_F(AstDecoderTest, MultiReturnType) {
}
}
-TEST_F(AstDecoderTest, SimpleIndirectCalls) {
+TEST_F(FunctionBodyDecoderTest, SimpleIndirectCalls) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module_env.InitializeFunctionTable();
@@ -1435,12 +1506,12 @@ TEST_F(AstDecoderTest, SimpleIndirectCalls) {
byte f2 = module_env.AddSignature(sigs.i_ii());
EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT0(f0, WASM_ZERO));
- EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I8(22)));
- EXPECT_VERIFIES_S(
- sig, WASM_CALL_INDIRECT2(f2, WASM_ZERO, WASM_I8(32), WASM_I8(72)));
+ EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I32V_1(22)));
+ EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT2(f2, WASM_ZERO, WASM_I32V_1(32),
+ WASM_I32V_2(72)));
}
-TEST_F(AstDecoderTest, IndirectCallsOutOfBounds) {
+TEST_F(FunctionBodyDecoderTest, IndirectCallsOutOfBounds) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module_env.InitializeFunctionTable();
@@ -1450,14 +1521,14 @@ TEST_F(AstDecoderTest, IndirectCallsOutOfBounds) {
module_env.AddSignature(sigs.i_v());
EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT0(0, WASM_ZERO));
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(1, WASM_ZERO, WASM_I8(22)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(1, WASM_ZERO, WASM_I32V_1(22)));
module_env.AddSignature(sigs.i_i());
- EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT1(1, WASM_ZERO, WASM_I8(27)));
+ EXPECT_VERIFIES_S(sig, WASM_CALL_INDIRECT1(1, WASM_ZERO, WASM_I32V_1(27)));
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(2, WASM_ZERO, WASM_I8(27)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(2, WASM_ZERO, WASM_I32V_1(27)));
}
-TEST_F(AstDecoderTest, IndirectCallsWithMismatchedSigs3) {
+TEST_F(FunctionBodyDecoderTest, IndirectCallsWithMismatchedSigs3) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module_env.InitializeFunctionTable();
@@ -1465,22 +1536,22 @@ TEST_F(AstDecoderTest, IndirectCallsWithMismatchedSigs3) {
byte f0 = module_env.AddFunction(sigs.i_f());
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f0, WASM_ZERO, WASM_I8(17)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f0, WASM_ZERO, WASM_I32V_1(17)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f0, WASM_ZERO, WASM_I64V_1(27)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f0, WASM_ZERO, WASM_F64(37.2)));
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT0(f0, WASM_I8(17)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT0(f0, WASM_I32V_1(17)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT0(f0, WASM_I64V_1(27)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT0(f0, WASM_F64(37.2)));
byte f1 = module_env.AddFunction(sigs.i_d());
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I8(16)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I32V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I64V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_F32(17.6)));
}
-TEST_F(AstDecoderTest, IndirectCallsWithoutTableCrash) {
+TEST_F(FunctionBodyDecoderTest, IndirectCallsWithoutTableCrash) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1490,12 +1561,12 @@ TEST_F(AstDecoderTest, IndirectCallsWithoutTableCrash) {
byte f2 = module_env.AddSignature(sigs.i_ii());
EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT0(f0, WASM_ZERO));
- EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I8(22)));
- EXPECT_FAILURE_S(
- sig, WASM_CALL_INDIRECT2(f2, WASM_ZERO, WASM_I8(32), WASM_I8(72)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT1(f1, WASM_ZERO, WASM_I32V_1(22)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_INDIRECT2(f2, WASM_ZERO, WASM_I32V_1(32),
+ WASM_I32V_2(72)));
}
-TEST_F(AstDecoderTest, SimpleImportCalls) {
+TEST_F(FunctionBodyDecoderTest, SimpleImportCalls) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1505,11 +1576,12 @@ TEST_F(AstDecoderTest, SimpleImportCalls) {
byte f2 = module_env.AddImport(sigs.i_ii());
EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION0(f0));
- EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(f1, WASM_I8(22)));
- EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(f2, WASM_I8(32), WASM_I8(72)));
+ EXPECT_VERIFIES_S(sig, WASM_CALL_FUNCTION(f1, WASM_I32V_1(22)));
+ EXPECT_VERIFIES_S(sig,
+ WASM_CALL_FUNCTION(f2, WASM_I32V_1(32), WASM_I32V_2(72)));
}
-TEST_F(AstDecoderTest, ImportCallsWithMismatchedSigs3) {
+TEST_F(FunctionBodyDecoderTest, ImportCallsWithMismatchedSigs3) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
@@ -1517,51 +1589,51 @@ TEST_F(AstDecoderTest, ImportCallsWithMismatchedSigs3) {
byte f0 = module_env.AddImport(sigs.i_f());
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION0(f0));
- EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f0, WASM_I8(17)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f0, WASM_I32V_1(17)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f0, WASM_I64V_1(27)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f0, WASM_F64(37.2)));
byte f1 = module_env.AddImport(sigs.i_d());
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION0(f1));
- EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f1, WASM_I8(16)));
+ EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f1, WASM_I32V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f1, WASM_I64V_1(16)));
EXPECT_FAILURE_S(sig, WASM_CALL_FUNCTION(f1, WASM_F32(17.6)));
}
-TEST_F(AstDecoderTest, Int32Globals) {
+TEST_F(FunctionBodyDecoderTest, Int32Globals) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
- module_env.AddGlobal(kAstI32);
+ module_env.AddGlobal(kWasmI32);
EXPECT_VERIFIES_S(sig, WASM_GET_GLOBAL(0));
EXPECT_FAILURE_S(sig, WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0)));
EXPECT_VERIFIES_S(sig, WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0)), WASM_ZERO);
}
-TEST_F(AstDecoderTest, ImmutableGlobal) {
+TEST_F(FunctionBodyDecoderTest, ImmutableGlobal) {
FunctionSig* sig = sigs.v_v();
TestModuleEnv module_env;
module = &module_env;
- uint32_t g0 = module_env.AddGlobal(kAstI32, true);
- uint32_t g1 = module_env.AddGlobal(kAstI32, false);
+ uint32_t g0 = module_env.AddGlobal(kWasmI32, true);
+ uint32_t g1 = module_env.AddGlobal(kWasmI32, false);
EXPECT_VERIFIES_S(sig, WASM_SET_GLOBAL(g0, WASM_ZERO));
EXPECT_FAILURE_S(sig, WASM_SET_GLOBAL(g1, WASM_ZERO));
}
-TEST_F(AstDecoderTest, Int32Globals_fail) {
+TEST_F(FunctionBodyDecoderTest, Int32Globals_fail) {
FunctionSig* sig = sigs.i_i();
TestModuleEnv module_env;
module = &module_env;
- module_env.AddGlobal(kAstI64);
- module_env.AddGlobal(kAstI64);
- module_env.AddGlobal(kAstF32);
- module_env.AddGlobal(kAstF64);
+ module_env.AddGlobal(kWasmI64);
+ module_env.AddGlobal(kWasmI64);
+ module_env.AddGlobal(kWasmF32);
+ module_env.AddGlobal(kWasmF64);
EXPECT_FAILURE_S(sig, WASM_GET_GLOBAL(0));
EXPECT_FAILURE_S(sig, WASM_GET_GLOBAL(1));
@@ -1574,13 +1646,13 @@ TEST_F(AstDecoderTest, Int32Globals_fail) {
EXPECT_FAILURE_S(sig, WASM_SET_GLOBAL(3, WASM_GET_LOCAL(0)), WASM_ZERO);
}
-TEST_F(AstDecoderTest, Int64Globals) {
+TEST_F(FunctionBodyDecoderTest, Int64Globals) {
FunctionSig* sig = sigs.l_l();
TestModuleEnv module_env;
module = &module_env;
- module_env.AddGlobal(kAstI64);
- module_env.AddGlobal(kAstI64);
+ module_env.AddGlobal(kWasmI64);
+ module_env.AddGlobal(kWasmI64);
EXPECT_VERIFIES_S(sig, WASM_GET_GLOBAL(0));
EXPECT_VERIFIES_S(sig, WASM_GET_GLOBAL(1));
@@ -1591,35 +1663,35 @@ TEST_F(AstDecoderTest, Int64Globals) {
WASM_GET_LOCAL(0));
}
-TEST_F(AstDecoderTest, Float32Globals) {
+TEST_F(FunctionBodyDecoderTest, Float32Globals) {
FunctionSig* sig = sigs.f_ff();
TestModuleEnv module_env;
module = &module_env;
- module_env.AddGlobal(kAstF32);
+ module_env.AddGlobal(kWasmF32);
EXPECT_VERIFIES_S(sig, WASM_GET_GLOBAL(0));
EXPECT_VERIFIES_S(sig, WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0)),
WASM_GET_LOCAL(0));
}
-TEST_F(AstDecoderTest, Float64Globals) {
+TEST_F(FunctionBodyDecoderTest, Float64Globals) {
FunctionSig* sig = sigs.d_dd();
TestModuleEnv module_env;
module = &module_env;
- module_env.AddGlobal(kAstF64);
+ module_env.AddGlobal(kWasmF64);
EXPECT_VERIFIES_S(sig, WASM_GET_GLOBAL(0));
EXPECT_VERIFIES_S(sig, WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0)),
WASM_GET_LOCAL(0));
}
-TEST_F(AstDecoderTest, AllGetGlobalCombinations) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType local_type = kLocalTypes[i];
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType global_type = kLocalTypes[j];
+TEST_F(FunctionBodyDecoderTest, AllGetGlobalCombinations) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType local_type = kValueTypes[i];
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType global_type = kValueTypes[j];
FunctionSig sig(1, 0, &local_type);
TestModuleEnv module_env;
module = &module_env;
@@ -1633,11 +1705,11 @@ TEST_F(AstDecoderTest, AllGetGlobalCombinations) {
}
}
-TEST_F(AstDecoderTest, AllSetGlobalCombinations) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType local_type = kLocalTypes[i];
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType global_type = kLocalTypes[j];
+TEST_F(FunctionBodyDecoderTest, AllSetGlobalCombinations) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType local_type = kValueTypes[i];
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType global_type = kValueTypes[j];
FunctionSig sig(0, 1, &local_type);
TestModuleEnv module_env;
module = &module_env;
@@ -1651,29 +1723,29 @@ TEST_F(AstDecoderTest, AllSetGlobalCombinations) {
}
}
-TEST_F(AstDecoderTest, WasmGrowMemory) {
+TEST_F(FunctionBodyDecoderTest, WasmGrowMemory) {
TestModuleEnv module_env;
module = &module_env;
- module->origin = kWasmOrigin;
+ module_env.InitializeMemory();
byte code[] = {WASM_GET_LOCAL(0), kExprGrowMemory, 0};
EXPECT_VERIFIES_C(i_i, code);
EXPECT_FAILURE_C(i_d, code);
}
-TEST_F(AstDecoderTest, AsmJsGrowMemory) {
- TestModuleEnv module_env;
+TEST_F(FunctionBodyDecoderTest, AsmJsGrowMemory) {
+ TestModuleEnv module_env(kAsmJsOrigin);
module = &module_env;
- module->origin = kAsmJsOrigin;
+ module_env.InitializeMemory();
byte code[] = {WASM_GET_LOCAL(0), kExprGrowMemory, 0};
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, AsmJsBinOpsCheckOrigin) {
- LocalType float32int32float32[] = {kAstF32, kAstI32, kAstF32};
+TEST_F(FunctionBodyDecoderTest, AsmJsBinOpsCheckOrigin) {
+ ValueType float32int32float32[] = {kWasmF32, kWasmI32, kWasmF32};
FunctionSig sig_f_if(1, 2, float32int32float32);
- LocalType float64int32float64[] = {kAstF64, kAstI32, kAstF64};
+ ValueType float64int32float64[] = {kWasmF64, kWasmI32, kWasmF64};
FunctionSig sig_d_id(1, 2, float64int32float64);
struct {
WasmOpcode op;
@@ -1694,9 +1766,9 @@ TEST_F(AstDecoderTest, AsmJsBinOpsCheckOrigin) {
};
{
- TestModuleEnv module_env;
+ TestModuleEnv module_env(kAsmJsOrigin);
module = &module_env;
- module->origin = kAsmJsOrigin;
+ module_env.InitializeMemory();
for (size_t i = 0; i < arraysize(AsmJsBinOps); i++) {
TestBinop(AsmJsBinOps[i].op, AsmJsBinOps[i].sig);
}
@@ -1705,7 +1777,7 @@ TEST_F(AstDecoderTest, AsmJsBinOpsCheckOrigin) {
{
TestModuleEnv module_env;
module = &module_env;
- module->origin = kWasmOrigin;
+ module_env.InitializeMemory();
for (size_t i = 0; i < arraysize(AsmJsBinOps); i++) {
byte code[] = {
WASM_BINOP(AsmJsBinOps[i].op, WASM_GET_LOCAL(0), WASM_GET_LOCAL(1))};
@@ -1714,10 +1786,10 @@ TEST_F(AstDecoderTest, AsmJsBinOpsCheckOrigin) {
}
}
-TEST_F(AstDecoderTest, AsmJsUnOpsCheckOrigin) {
- LocalType float32int32[] = {kAstF32, kAstI32};
+TEST_F(FunctionBodyDecoderTest, AsmJsUnOpsCheckOrigin) {
+ ValueType float32int32[] = {kWasmF32, kWasmI32};
FunctionSig sig_f_i(1, 1, float32int32);
- LocalType float64int32[] = {kAstF64, kAstI32};
+ ValueType float64int32[] = {kWasmF64, kWasmI32};
FunctionSig sig_d_i(1, 1, float64int32);
struct {
WasmOpcode op;
@@ -1742,9 +1814,9 @@ TEST_F(AstDecoderTest, AsmJsUnOpsCheckOrigin) {
{kExprI32AsmjsSConvertF64, sigs.i_d()},
{kExprI32AsmjsUConvertF64, sigs.i_d()}};
{
- TestModuleEnv module_env;
+ TestModuleEnv module_env(kAsmJsOrigin);
module = &module_env;
- module->origin = kAsmJsOrigin;
+ module_env.InitializeMemory();
for (size_t i = 0; i < arraysize(AsmJsUnOps); i++) {
TestUnop(AsmJsUnOps[i].op, AsmJsUnOps[i].sig);
}
@@ -1753,7 +1825,7 @@ TEST_F(AstDecoderTest, AsmJsUnOpsCheckOrigin) {
{
TestModuleEnv module_env;
module = &module_env;
- module->origin = kWasmOrigin;
+ module_env.InitializeMemory();
for (size_t i = 0; i < arraysize(AsmJsUnOps); i++) {
byte code[] = {WASM_UNOP(AsmJsUnOps[i].op, WASM_GET_LOCAL(0))};
EXPECT_FAILURE_SC(AsmJsUnOps[i].sig, code);
@@ -1761,14 +1833,14 @@ TEST_F(AstDecoderTest, AsmJsUnOpsCheckOrigin) {
}
}
-TEST_F(AstDecoderTest, BreakEnd) {
+TEST_F(FunctionBodyDecoderTest, BreakEnd) {
EXPECT_VERIFIES(
i_i, WASM_BLOCK_I(WASM_I32_ADD(WASM_BRV(0, WASM_ZERO), WASM_ZERO)));
EXPECT_VERIFIES(
i_i, WASM_BLOCK_I(WASM_I32_ADD(WASM_ZERO, WASM_BRV(0, WASM_ZERO))));
}
-TEST_F(AstDecoderTest, BreakIfBinop) {
+TEST_F(FunctionBodyDecoderTest, BreakIfBinop) {
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_I32_ADD(
WASM_BRV_IF(0, WASM_ZERO, WASM_ZERO), WASM_ZERO)));
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_I32_ADD(
@@ -1778,7 +1850,7 @@ TEST_F(AstDecoderTest, BreakIfBinop) {
WASM_BLOCK_F(WASM_F32_ABS(WASM_BRV_IF(0, WASM_F32(0.0f), WASM_ZERO))));
}
-TEST_F(AstDecoderTest, BreakIfBinop_fail) {
+TEST_F(FunctionBodyDecoderTest, BreakIfBinop_fail) {
EXPECT_FAILURE_S(
sigs.f_ff(),
WASM_BLOCK_F(WASM_F32_ABS(WASM_BRV_IF(0, WASM_ZERO, WASM_ZERO))));
@@ -1787,12 +1859,12 @@ TEST_F(AstDecoderTest, BreakIfBinop_fail) {
WASM_BLOCK_I(WASM_F32_ABS(WASM_BRV_IF(0, WASM_F32(0.0f), WASM_ZERO))));
}
-TEST_F(AstDecoderTest, BreakNesting1) {
+TEST_F(FunctionBodyDecoderTest, BreakNesting1) {
for (int i = 0; i < 5; i++) {
// (block[2] (loop[2] (if (get p) break[N]) (set p 1)) p)
byte code[] = {WASM_BLOCK_I(
WASM_LOOP(WASM_IF(WASM_GET_LOCAL(0), WASM_BRV(i + 1, WASM_ZERO)),
- WASM_SET_LOCAL(0, WASM_I8(1))),
+ WASM_SET_LOCAL(0, WASM_I32V_1(1))),
WASM_ZERO)};
if (i < 3) {
EXPECT_VERIFIES_C(i_i, code);
@@ -1802,7 +1874,7 @@ TEST_F(AstDecoderTest, BreakNesting1) {
}
}
-TEST_F(AstDecoderTest, BreakNesting2) {
+TEST_F(FunctionBodyDecoderTest, BreakNesting2) {
for (int i = 0; i < 7; i++) {
byte code[] = {B1(WASM_LOOP(WASM_IF(WASM_ZERO, WASM_BR(i)), WASM_NOP))};
if (i <= 3) {
@@ -1813,7 +1885,7 @@ TEST_F(AstDecoderTest, BreakNesting2) {
}
}
-TEST_F(AstDecoderTest, BreakNesting3) {
+TEST_F(FunctionBodyDecoderTest, BreakNesting3) {
for (int i = 0; i < 7; i++) {
// (block[1] (loop[1] (block[1] (if 0 break[N])
byte code[] = {
@@ -1826,20 +1898,20 @@ TEST_F(AstDecoderTest, BreakNesting3) {
}
}
-TEST_F(AstDecoderTest, BreaksWithMultipleTypes) {
- EXPECT_FAILURE(i_i, B2(WASM_BRV_IF_ZERO(0, WASM_I8(7)), WASM_F32(7.7)));
+TEST_F(FunctionBodyDecoderTest, BreaksWithMultipleTypes) {
+ EXPECT_FAILURE(i_i, B2(WASM_BRV_IF_ZERO(0, WASM_I32V_1(7)), WASM_F32(7.7)));
- EXPECT_FAILURE(i_i, B2(WASM_BRV_IF_ZERO(0, WASM_I8(7)),
+ EXPECT_FAILURE(i_i, B2(WASM_BRV_IF_ZERO(0, WASM_I32V_1(7)),
WASM_BRV_IF_ZERO(0, WASM_F32(7.7))));
- EXPECT_FAILURE(
- i_i, B3(WASM_BRV_IF_ZERO(0, WASM_I8(8)), WASM_BRV_IF_ZERO(0, WASM_I8(0)),
- WASM_BRV_IF_ZERO(0, WASM_F32(7.7))));
- EXPECT_FAILURE(i_i, B3(WASM_BRV_IF_ZERO(0, WASM_I8(9)),
+ EXPECT_FAILURE(i_i, B3(WASM_BRV_IF_ZERO(0, WASM_I32V_1(8)),
+ WASM_BRV_IF_ZERO(0, WASM_I32V_1(0)),
+ WASM_BRV_IF_ZERO(0, WASM_F32(7.7))));
+ EXPECT_FAILURE(i_i, B3(WASM_BRV_IF_ZERO(0, WASM_I32V_1(9)),
WASM_BRV_IF_ZERO(0, WASM_F32(7.7)),
- WASM_BRV_IF_ZERO(0, WASM_I8(11))));
+ WASM_BRV_IF_ZERO(0, WASM_I32V_1(11))));
}
-TEST_F(AstDecoderTest, BreakNesting_6_levels) {
+TEST_F(FunctionBodyDecoderTest, BreakNesting_6_levels) {
for (int mask = 0; mask < 64; mask++) {
for (int i = 0; i < 14; i++) {
byte code[] = {WASM_BLOCK(WASM_BLOCK(
@@ -1865,7 +1937,7 @@ TEST_F(AstDecoderTest, BreakNesting_6_levels) {
}
}
-TEST_F(AstDecoderTest, Break_TypeCheck) {
+TEST_F(FunctionBodyDecoderTest, Break_TypeCheck) {
FunctionSig* sigarray[] = {sigs.i_i(), sigs.l_l(), sigs.f_ff(), sigs.d_dd()};
for (size_t i = 0; i < arraysize(sigarray); i++) {
FunctionSig* sig = sigarray[i];
@@ -1886,10 +1958,10 @@ TEST_F(AstDecoderTest, Break_TypeCheck) {
WASM_F64(1.2)));
}
-TEST_F(AstDecoderTest, Break_TypeCheckAll1) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType storage[] = {kLocalTypes[i], kLocalTypes[i], kLocalTypes[j]};
+TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll1) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType storage[] = {kValueTypes[i], kValueTypes[i], kValueTypes[j]};
FunctionSig sig(1, 2, storage);
byte code[] = {WASM_BLOCK_T(
sig.GetReturn(), WASM_IF(WASM_ZERO, WASM_BRV(0, WASM_GET_LOCAL(0))),
@@ -1904,10 +1976,10 @@ TEST_F(AstDecoderTest, Break_TypeCheckAll1) {
}
}
-TEST_F(AstDecoderTest, Break_TypeCheckAll2) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType storage[] = {kLocalTypes[i], kLocalTypes[i], kLocalTypes[j]};
+TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll2) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType storage[] = {kValueTypes[i], kValueTypes[i], kValueTypes[j]};
FunctionSig sig(1, 2, storage);
byte code[] = {WASM_IF_ELSE_T(sig.GetReturn(0), WASM_ZERO,
WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0)),
@@ -1922,10 +1994,10 @@ TEST_F(AstDecoderTest, Break_TypeCheckAll2) {
}
}
-TEST_F(AstDecoderTest, Break_TypeCheckAll3) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType storage[] = {kLocalTypes[i], kLocalTypes[i], kLocalTypes[j]};
+TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll3) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType storage[] = {kValueTypes[i], kValueTypes[i], kValueTypes[j]};
FunctionSig sig(1, 2, storage);
byte code[] = {WASM_IF_ELSE_T(sig.GetReturn(), WASM_ZERO,
WASM_GET_LOCAL(1),
@@ -1940,18 +2012,18 @@ TEST_F(AstDecoderTest, Break_TypeCheckAll3) {
}
}
-TEST_F(AstDecoderTest, Break_Unify) {
+TEST_F(FunctionBodyDecoderTest, Break_Unify) {
for (int which = 0; which < 2; which++) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType type = kLocalTypes[i];
- LocalType storage[] = {kAstI32, kAstI32, type};
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType type = kValueTypes[i];
+ ValueType storage[] = {kWasmI32, kWasmI32, type};
FunctionSig sig(1, 2, storage);
byte code1[] = {WASM_BLOCK_T(
type, WASM_IF(WASM_ZERO, WASM_BRV(1, WASM_GET_LOCAL(which))),
WASM_GET_LOCAL(which ^ 1))};
- if (type == kAstI32) {
+ if (type == kWasmI32) {
EXPECT_VERIFIES_SC(&sig, code1);
} else {
EXPECT_FAILURE_SC(&sig, code1);
@@ -1960,15 +2032,15 @@ TEST_F(AstDecoderTest, Break_Unify) {
}
}
-TEST_F(AstDecoderTest, BreakIf_cond_type) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType types[] = {kLocalTypes[i], kLocalTypes[i], kLocalTypes[j]};
+TEST_F(FunctionBodyDecoderTest, BreakIf_cond_type) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType types[] = {kValueTypes[i], kValueTypes[i], kValueTypes[j]};
FunctionSig sig(1, 2, types);
byte code[] = {WASM_BLOCK_T(
types[0], WASM_BRV_IF(0, WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)))};
- if (types[2] == kAstI32) {
+ if (types[2] == kWasmI32) {
EXPECT_VERIFIES_SC(&sig, code);
} else {
EXPECT_FAILURE_SC(&sig, code);
@@ -1977,11 +2049,11 @@ TEST_F(AstDecoderTest, BreakIf_cond_type) {
}
}
-TEST_F(AstDecoderTest, BreakIf_val_type) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalType types[] = {kLocalTypes[i], kLocalTypes[i], kLocalTypes[j],
- kAstI32};
+TEST_F(FunctionBodyDecoderTest, BreakIf_val_type) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueType types[] = {kValueTypes[i], kValueTypes[i], kValueTypes[j],
+ kWasmI32};
FunctionSig sig(1, 3, types);
byte code[] = {WASM_BLOCK_T(
types[1], WASM_BRV_IF(0, WASM_GET_LOCAL(1), WASM_GET_LOCAL(2)),
@@ -1996,16 +2068,16 @@ TEST_F(AstDecoderTest, BreakIf_val_type) {
}
}
-TEST_F(AstDecoderTest, BreakIf_Unify) {
+TEST_F(FunctionBodyDecoderTest, BreakIf_Unify) {
for (int which = 0; which < 2; which++) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType type = kLocalTypes[i];
- LocalType storage[] = {kAstI32, kAstI32, type};
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType type = kValueTypes[i];
+ ValueType storage[] = {kWasmI32, kWasmI32, type};
FunctionSig sig(1, 2, storage);
byte code[] = {WASM_BLOCK_I(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(which)),
WASM_DROP, WASM_GET_LOCAL(which ^ 1))};
- if (type == kAstI32) {
+ if (type == kWasmI32) {
EXPECT_VERIFIES_SC(&sig, code);
} else {
EXPECT_FAILURE_SC(&sig, code);
@@ -2014,29 +2086,29 @@ TEST_F(AstDecoderTest, BreakIf_Unify) {
}
}
-TEST_F(AstDecoderTest, BrTable0) {
+TEST_F(FunctionBodyDecoderTest, BrTable0) {
static byte code[] = {kExprBrTable, 0, BR_TARGET(0)};
EXPECT_FAILURE_C(v_v, code);
}
-TEST_F(AstDecoderTest, BrTable0b) {
+TEST_F(FunctionBodyDecoderTest, BrTable0b) {
static byte code[] = {kExprI32Const, 11, kExprBrTable, 0, BR_TARGET(0)};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, BrTable0c) {
+TEST_F(FunctionBodyDecoderTest, BrTable0c) {
static byte code[] = {kExprI32Const, 11, kExprBrTable, 0, BR_TARGET(1)};
EXPECT_FAILURE_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
}
-TEST_F(AstDecoderTest, BrTable1a) {
- static byte code[] = {B1(WASM_BR_TABLE(WASM_I8(67), 0, BR_TARGET(0)))};
+TEST_F(FunctionBodyDecoderTest, BrTable1a) {
+ static byte code[] = {B1(WASM_BR_TABLE(WASM_I32V_2(67), 0, BR_TARGET(0)))};
EXPECT_VERIFIES_C(v_v, code);
}
-TEST_F(AstDecoderTest, BrTable1b) {
+TEST_F(FunctionBodyDecoderTest, BrTable1b) {
static byte code[] = {B1(WASM_BR_TABLE(WASM_ZERO, 0, BR_TARGET(0)))};
EXPECT_VERIFIES_C(v_v, code);
EXPECT_FAILURE_C(i_i, code);
@@ -2044,26 +2116,26 @@ TEST_F(AstDecoderTest, BrTable1b) {
EXPECT_FAILURE_C(d_dd, code);
}
-TEST_F(AstDecoderTest, BrTable2a) {
+TEST_F(FunctionBodyDecoderTest, BrTable2a) {
static byte code[] = {
- B1(WASM_BR_TABLE(WASM_I8(67), 1, BR_TARGET(0), BR_TARGET(0)))};
+ B1(WASM_BR_TABLE(WASM_I32V_2(67), 1, BR_TARGET(0), BR_TARGET(0)))};
EXPECT_VERIFIES_C(v_v, code);
}
-TEST_F(AstDecoderTest, BrTable2b) {
- static byte code[] = {WASM_BLOCK(
- WASM_BLOCK(WASM_BR_TABLE(WASM_I8(67), 1, BR_TARGET(0), BR_TARGET(1))))};
+TEST_F(FunctionBodyDecoderTest, BrTable2b) {
+ static byte code[] = {WASM_BLOCK(WASM_BLOCK(
+ WASM_BR_TABLE(WASM_I32V_2(67), 1, BR_TARGET(0), BR_TARGET(1))))};
EXPECT_VERIFIES_C(v_v, code);
}
-TEST_F(AstDecoderTest, BrTable_off_end) {
+TEST_F(FunctionBodyDecoderTest, BrTable_off_end) {
static byte code[] = {B1(WASM_BR_TABLE(WASM_GET_LOCAL(0), 0, BR_TARGET(0)))};
for (size_t len = 1; len < sizeof(code); len++) {
Verify(kError, sigs.i_i(), code, code + len);
}
}
-TEST_F(AstDecoderTest, BrTable_invalid_br1) {
+TEST_F(FunctionBodyDecoderTest, BrTable_invalid_br1) {
for (int depth = 0; depth < 4; depth++) {
byte code[] = {B1(WASM_BR_TABLE(WASM_GET_LOCAL(0), 0, BR_TARGET(depth)))};
if (depth <= 1) {
@@ -2074,7 +2146,7 @@ TEST_F(AstDecoderTest, BrTable_invalid_br1) {
}
}
-TEST_F(AstDecoderTest, BrTable_invalid_br2) {
+TEST_F(FunctionBodyDecoderTest, BrTable_invalid_br2) {
for (int depth = 0; depth < 7; depth++) {
byte code[] = {
WASM_LOOP(WASM_BR_TABLE(WASM_GET_LOCAL(0), 0, BR_TARGET(depth)))};
@@ -2086,40 +2158,51 @@ TEST_F(AstDecoderTest, BrTable_invalid_br2) {
}
}
-TEST_F(AstDecoderTest, Brv1) {
+TEST_F(FunctionBodyDecoderTest, BrUnreachable1) {
+ EXPECT_VERIFIES(v_i, WASM_GET_LOCAL(0), kExprBrTable, 0, BR_TARGET(0));
+}
+
+TEST_F(FunctionBodyDecoderTest, BrUnreachable2) {
+ EXPECT_VERIFIES(v_i, WASM_GET_LOCAL(0), kExprBrTable, 0, BR_TARGET(0),
+ WASM_NOP);
+ EXPECT_FAILURE(v_i, WASM_GET_LOCAL(0), kExprBrTable, 0, BR_TARGET(0),
+ WASM_ZERO);
+}
+
+TEST_F(FunctionBodyDecoderTest, Brv1) {
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_BRV(0, WASM_ZERO)));
- EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_LOOP(WASM_BRV(2, WASM_ZERO))));
+ EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_LOOP_I(WASM_BRV(2, WASM_ZERO))));
}
-TEST_F(AstDecoderTest, Brv1_type) {
+TEST_F(FunctionBodyDecoderTest, Brv1_type) {
EXPECT_VERIFIES(i_ii, WASM_BLOCK_I(WASM_BRV(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(l_ll, WASM_BLOCK_L(WASM_BRV(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(f_ff, WASM_BLOCK_F(WASM_BRV(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(d_dd, WASM_BLOCK_D(WASM_BRV(0, WASM_GET_LOCAL(0))));
}
-TEST_F(AstDecoderTest, Brv1_type_n) {
+TEST_F(FunctionBodyDecoderTest, Brv1_type_n) {
EXPECT_FAILURE(i_f, WASM_BLOCK_I(WASM_BRV(0, WASM_GET_LOCAL(0))));
EXPECT_FAILURE(i_d, WASM_BLOCK_I(WASM_BRV(0, WASM_GET_LOCAL(0))));
}
-TEST_F(AstDecoderTest, BrvIf1) {
+TEST_F(FunctionBodyDecoderTest, BrvIf1) {
EXPECT_VERIFIES(i_v, WASM_BLOCK_I(WASM_BRV_IF_ZERO(0, WASM_ZERO)));
}
-TEST_F(AstDecoderTest, BrvIf1_type) {
+TEST_F(FunctionBodyDecoderTest, BrvIf1_type) {
EXPECT_VERIFIES(i_i, WASM_BLOCK_I(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(l_l, WASM_BLOCK_L(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(f_ff, WASM_BLOCK_F(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
EXPECT_VERIFIES(d_dd, WASM_BLOCK_D(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
}
-TEST_F(AstDecoderTest, BrvIf1_type_n) {
+TEST_F(FunctionBodyDecoderTest, BrvIf1_type_n) {
EXPECT_FAILURE(i_f, WASM_BLOCK_I(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
EXPECT_FAILURE(i_d, WASM_BLOCK_I(WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0))));
}
-TEST_F(AstDecoderTest, Select) {
+TEST_F(FunctionBodyDecoderTest, Select) {
EXPECT_VERIFIES(i_i,
WASM_SELECT(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0), WASM_ZERO));
EXPECT_VERIFIES(f_ff, WASM_SELECT(WASM_F32(0.0), WASM_F32(0.0), WASM_ZERO));
@@ -2127,7 +2210,7 @@ TEST_F(AstDecoderTest, Select) {
EXPECT_VERIFIES(l_l, WASM_SELECT(WASM_I64V_1(0), WASM_I64V_1(0), WASM_ZERO));
}
-TEST_F(AstDecoderTest, Select_fail1) {
+TEST_F(FunctionBodyDecoderTest, Select_fail1) {
EXPECT_FAILURE(
i_i, WASM_SELECT(WASM_F32(0.0), WASM_GET_LOCAL(0), WASM_GET_LOCAL(0)));
EXPECT_FAILURE(
@@ -2136,12 +2219,12 @@ TEST_F(AstDecoderTest, Select_fail1) {
i_i, WASM_SELECT(WASM_GET_LOCAL(0), WASM_GET_LOCAL(0), WASM_F32(0.0)));
}
-TEST_F(AstDecoderTest, Select_fail2) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType type = kLocalTypes[i];
- if (type == kAstI32) continue;
+TEST_F(FunctionBodyDecoderTest, Select_fail2) {
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType type = kValueTypes[i];
+ if (type == kWasmI32) continue;
- LocalType types[] = {type, kAstI32, type};
+ ValueType types[] = {type, kWasmI32, type};
FunctionSig sig(1, 2, types);
EXPECT_VERIFIES_S(&sig, WASM_SELECT(WASM_GET_LOCAL(1), WASM_GET_LOCAL(1),
@@ -2158,7 +2241,7 @@ TEST_F(AstDecoderTest, Select_fail2) {
}
}
-TEST_F(AstDecoderTest, Select_TypeCheck) {
+TEST_F(FunctionBodyDecoderTest, Select_TypeCheck) {
EXPECT_FAILURE(
i_i, WASM_SELECT(WASM_F32(9.9), WASM_GET_LOCAL(0), WASM_GET_LOCAL(0)));
@@ -2169,7 +2252,7 @@ TEST_F(AstDecoderTest, Select_TypeCheck) {
WASM_SELECT(WASM_F32(9.9), WASM_GET_LOCAL(0), WASM_I64V_1(0)));
}
-TEST_F(AstDecoderTest, Throw) {
+TEST_F(FunctionBodyDecoderTest, Throw) {
FLAG_wasm_eh_prototype = true;
EXPECT_VERIFIES(v_i, WASM_GET_LOCAL(0), kExprThrow);
@@ -2178,11 +2261,17 @@ TEST_F(AstDecoderTest, Throw) {
EXPECT_FAILURE(l_l, WASM_GET_LOCAL(0), kExprThrow, WASM_I64V(0));
}
+TEST_F(FunctionBodyDecoderTest, ThrowUnreachable) {
+ // TODO(titzer): unreachable code after throw should validate.
+ // FLAG_wasm_eh_prototype = true;
+ // EXPECT_VERIFIES(v_i, WASM_GET_LOCAL(0), kExprThrow, kExprSetLocal, 0);
+}
+
#define WASM_TRY_OP kExprTry, kLocalVoid
#define WASM_CATCH(local) kExprCatch, static_cast<byte>(local)
-TEST_F(AstDecoderTest, TryCatch) {
+TEST_F(FunctionBodyDecoderTest, TryCatch) {
FLAG_wasm_eh_prototype = true;
EXPECT_VERIFIES(v_i, WASM_TRY_OP, WASM_CATCH(0), kExprEnd);
@@ -2196,44 +2285,44 @@ TEST_F(AstDecoderTest, TryCatch) {
EXPECT_FAILURE(v_i, WASM_TRY_OP, WASM_CATCH(0), WASM_CATCH(0), kExprEnd);
}
-TEST_F(AstDecoderTest, MultiValBlock1) {
+TEST_F(FunctionBodyDecoderTest, MultiValBlock1) {
FLAG_wasm_mv_prototype = true;
- EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_GET_LOCAL(1)),
kExprI32Add);
}
-TEST_F(AstDecoderTest, MultiValBlock2) {
+TEST_F(FunctionBodyDecoderTest, MultiValBlock2) {
FLAG_wasm_mv_prototype = true;
- EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_GET_LOCAL(1)),
WASM_I32_ADD(WASM_NOP, WASM_NOP));
}
-TEST_F(AstDecoderTest, MultiValBlockBr1) {
+TEST_F(FunctionBodyDecoderTest, MultiValBlockBr1) {
FLAG_wasm_mv_prototype = true;
- EXPECT_FAILURE(i_ii,
- WASM_BLOCK_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0), WASM_BR(0)),
- kExprI32Add);
- EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ EXPECT_FAILURE(
+ i_ii, WASM_BLOCK_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0), WASM_BR(0)),
+ kExprI32Add);
+ EXPECT_VERIFIES(i_ii, WASM_BLOCK_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_GET_LOCAL(1), WASM_BR(0)),
kExprI32Add);
}
-TEST_F(AstDecoderTest, MultiValIf1) {
+TEST_F(FunctionBodyDecoderTest, MultiValIf1) {
FLAG_wasm_mv_prototype = true;
EXPECT_FAILURE(
- i_ii, WASM_IF_ELSE_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ i_ii, WASM_IF_ELSE_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_SEQ(WASM_GET_LOCAL(0)),
WASM_SEQ(WASM_GET_LOCAL(1), WASM_GET_LOCAL(0))),
kExprI32Add);
EXPECT_FAILURE(i_ii,
- WASM_IF_ELSE_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ WASM_IF_ELSE_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_SEQ(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)),
WASM_SEQ(WASM_GET_LOCAL(1))),
kExprI32Add);
EXPECT_VERIFIES(
- i_ii, WASM_IF_ELSE_TT(kAstI32, kAstI32, WASM_GET_LOCAL(0),
+ i_ii, WASM_IF_ELSE_TT(kWasmI32, kWasmI32, WASM_GET_LOCAL(0),
WASM_SEQ(WASM_GET_LOCAL(0), WASM_GET_LOCAL(1)),
WASM_SEQ(WASM_GET_LOCAL(1), WASM_GET_LOCAL(0))),
kExprI32Add);
@@ -2334,7 +2423,6 @@ TEST_F(WasmOpcodeLengthTest, Statements) {
}
TEST_F(WasmOpcodeLengthTest, MiscExpressions) {
- EXPECT_LENGTH(2, kExprI8Const);
EXPECT_LENGTH(5, kExprF32Const);
EXPECT_LENGTH(9, kExprF64Const);
EXPECT_LENGTH(2, kExprGetLocal);
@@ -2539,71 +2627,62 @@ TEST_F(WasmOpcodeLengthTest, SimdExpressions) {
EXPECT_LENGTH_N(2, kSimdPrefix, 0xff);
}
-typedef ZoneVector<LocalType> LocalTypeMap;
+typedef ZoneVector<ValueType> TypesOfLocals;
class LocalDeclDecoderTest : public TestWithZone {
public:
v8::internal::AccountingAllocator allocator;
- size_t ExpectRun(LocalTypeMap map, size_t pos, LocalType expected,
+ size_t ExpectRun(TypesOfLocals map, size_t pos, ValueType expected,
size_t count) {
for (size_t i = 0; i < count; i++) {
EXPECT_EQ(expected, map[pos++]);
}
return pos;
}
-
- LocalTypeMap Expand(AstLocalDecls& decls) {
- ZoneVector<LocalType> map(zone());
- for (auto p : decls.local_types) {
- map.insert(map.end(), p.second, p.first);
- }
- return map;
- }
};
TEST_F(LocalDeclDecoderTest, EmptyLocals) {
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, nullptr, nullptr);
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, nullptr, nullptr);
EXPECT_FALSE(result);
}
TEST_F(LocalDeclDecoderTest, NoLocals) {
static const byte data[] = {0};
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, data, data + sizeof(data));
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
EXPECT_TRUE(result);
- EXPECT_EQ(0u, decls.total_local_count);
+ EXPECT_TRUE(decls.type_list.empty());
}
TEST_F(LocalDeclDecoderTest, OneLocal) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType type = kLocalTypes[i];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType type = kValueTypes[i];
const byte data[] = {
- 1, 1, static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(type))};
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, data, data + sizeof(data));
+ 1, 1, static_cast<byte>(WasmOpcodes::ValueTypeCodeFor(type))};
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
EXPECT_TRUE(result);
- EXPECT_EQ(1u, decls.total_local_count);
+ EXPECT_EQ(1u, decls.type_list.size());
- LocalTypeMap map = Expand(decls);
- EXPECT_EQ(1u, map.size());
+ TypesOfLocals map = decls.type_list;
EXPECT_EQ(type, map[0]);
}
}
TEST_F(LocalDeclDecoderTest, FiveLocals) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalType type = kLocalTypes[i];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueType type = kValueTypes[i];
const byte data[] = {
- 1, 5, static_cast<byte>(WasmOpcodes::LocalTypeCodeFor(type))};
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, data, data + sizeof(data));
+ 1, 5, static_cast<byte>(WasmOpcodes::ValueTypeCodeFor(type))};
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
EXPECT_TRUE(result);
- EXPECT_EQ(sizeof(data), decls.decls_encoded_size);
- EXPECT_EQ(5u, decls.total_local_count);
+ EXPECT_EQ(sizeof(data), decls.encoded_size);
+ EXPECT_EQ(5u, decls.type_list.size());
- LocalTypeMap map = Expand(decls);
+ TypesOfLocals map = decls.type_list;
EXPECT_EQ(5u, map.size());
ExpectRun(map, 0, type, 5);
}
@@ -2616,21 +2695,20 @@ TEST_F(LocalDeclDecoderTest, MixedLocals) {
for (byte d = 0; d < 3; d++) {
const byte data[] = {4, a, kLocalI32, b, kLocalI64,
c, kLocalF32, d, kLocalF64};
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, data, data + sizeof(data));
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
EXPECT_TRUE(result);
- EXPECT_EQ(sizeof(data), decls.decls_encoded_size);
+ EXPECT_EQ(sizeof(data), decls.encoded_size);
EXPECT_EQ(static_cast<uint32_t>(a + b + c + d),
- decls.total_local_count);
+ decls.type_list.size());
- LocalTypeMap map = Expand(decls);
- EXPECT_EQ(static_cast<uint32_t>(a + b + c + d), map.size());
+ TypesOfLocals map = decls.type_list;
size_t pos = 0;
- pos = ExpectRun(map, pos, kAstI32, a);
- pos = ExpectRun(map, pos, kAstI64, b);
- pos = ExpectRun(map, pos, kAstF32, c);
- pos = ExpectRun(map, pos, kAstF64, d);
+ pos = ExpectRun(map, pos, kWasmI32, a);
+ pos = ExpectRun(map, pos, kWasmI64, b);
+ pos = ExpectRun(map, pos, kWasmF32, c);
+ pos = ExpectRun(map, pos, kWasmF64, d);
}
}
}
@@ -2642,21 +2720,21 @@ TEST_F(LocalDeclDecoderTest, UseEncoder) {
const byte* end = nullptr;
LocalDeclEncoder local_decls(zone());
- local_decls.AddLocals(5, kAstF32);
- local_decls.AddLocals(1337, kAstI32);
- local_decls.AddLocals(212, kAstI64);
+ local_decls.AddLocals(5, kWasmF32);
+ local_decls.AddLocals(1337, kWasmI32);
+ local_decls.AddLocals(212, kWasmI64);
local_decls.Prepend(zone(), &data, &end);
- AstLocalDecls decls(zone());
- bool result = DecodeLocalDecls(decls, data, end);
+ BodyLocalDecls decls(zone());
+ bool result = DecodeLocalDecls(&decls, data, end);
EXPECT_TRUE(result);
- EXPECT_EQ(5u + 1337u + 212u, decls.total_local_count);
+ EXPECT_EQ(5u + 1337u + 212u, decls.type_list.size());
- LocalTypeMap map = Expand(decls);
+ TypesOfLocals map = decls.type_list;
size_t pos = 0;
- pos = ExpectRun(map, pos, kAstF32, 5);
- pos = ExpectRun(map, pos, kAstI32, 1337);
- pos = ExpectRun(map, pos, kAstI64, 212);
+ pos = ExpectRun(map, pos, kWasmF32, 5);
+ pos = ExpectRun(map, pos, kWasmI32, 1337);
+ pos = ExpectRun(map, pos, kWasmI64, 212);
}
class BytecodeIteratorTest : public TestWithZone {};
@@ -2664,10 +2742,10 @@ class BytecodeIteratorTest : public TestWithZone {};
TEST_F(BytecodeIteratorTest, SimpleForeach) {
byte code[] = {WASM_IF_ELSE(WASM_ZERO, WASM_ZERO, WASM_ZERO)};
BytecodeIterator iter(code, code + sizeof(code));
- WasmOpcode expected[] = {kExprI8Const, kExprIf, kExprI8Const,
- kExprElse, kExprI8Const, kExprEnd};
+ WasmOpcode expected[] = {kExprI32Const, kExprIf, kExprI32Const,
+ kExprElse, kExprI32Const, kExprEnd};
size_t pos = 0;
- for (WasmOpcode opcode : iter) {
+ for (WasmOpcode opcode : iter.opcodes()) {
if (pos >= arraysize(expected)) {
EXPECT_TRUE(false);
break;
@@ -2683,32 +2761,52 @@ TEST_F(BytecodeIteratorTest, ForeachTwice) {
int count = 0;
count = 0;
- for (WasmOpcode opcode : iter) {
+ for (WasmOpcode opcode : iter.opcodes()) {
USE(opcode);
count++;
}
EXPECT_EQ(6, count);
count = 0;
- for (WasmOpcode opcode : iter) {
+ for (WasmOpcode opcode : iter.opcodes()) {
USE(opcode);
count++;
}
EXPECT_EQ(6, count);
}
-TEST_F(BytecodeIteratorTest, WithAstDecls) {
- byte code[] = {1, 1, kLocalI32, WASM_I8(9), WASM_I8(11)};
- AstLocalDecls decls(zone());
+TEST_F(BytecodeIteratorTest, ForeachOffset) {
+ byte code[] = {WASM_IF_ELSE(WASM_ZERO, WASM_ZERO, WASM_ZERO)};
+ BytecodeIterator iter(code, code + sizeof(code));
+ int count = 0;
+
+ count = 0;
+ for (auto offset : iter.offsets()) {
+ USE(offset);
+ count++;
+ }
+ EXPECT_EQ(6, count);
+
+ count = 0;
+ for (auto offset : iter.offsets()) {
+ USE(offset);
+ count++;
+ }
+ EXPECT_EQ(6, count);
+}
+
+TEST_F(BytecodeIteratorTest, WithLocalDecls) {
+ byte code[] = {1, 1, kLocalI32, WASM_I32V_1(9), WASM_I32V_1(11)};
+ BodyLocalDecls decls(zone());
BytecodeIterator iter(code, code + sizeof(code), &decls);
- EXPECT_EQ(3u, decls.decls_encoded_size);
+ EXPECT_EQ(3u, decls.encoded_size);
EXPECT_EQ(3u, iter.pc_offset());
EXPECT_TRUE(iter.has_next());
- EXPECT_EQ(kExprI8Const, iter.current());
+ EXPECT_EQ(kExprI32Const, iter.current());
iter.next();
EXPECT_TRUE(iter.has_next());
- EXPECT_EQ(kExprI8Const, iter.current());
+ EXPECT_EQ(kExprI32Const, iter.current());
iter.next();
EXPECT_FALSE(iter.has_next());
}
diff --git a/deps/v8/test/unittests/wasm/loop-assignment-analysis-unittest.cc b/deps/v8/test/unittests/wasm/loop-assignment-analysis-unittest.cc
index cb452445bf..3f2d7665a3 100644
--- a/deps/v8/test/unittests/wasm/loop-assignment-analysis-unittest.cc
+++ b/deps/v8/test/unittests/wasm/loop-assignment-analysis-unittest.cc
@@ -11,7 +11,7 @@
#include "src/bit-vector.h"
#include "src/objects.h"
-#include "src/wasm/ast-decoder.h"
+#include "src/wasm/function-body-decoder.h"
#include "src/wasm/wasm-macro-gen.h"
#include "src/wasm/wasm-module.h"
@@ -112,7 +112,7 @@ TEST_F(WasmLoopAssignmentAnalyzerTest, NestedIf) {
TEST_F(WasmLoopAssignmentAnalyzerTest, BigLocal) {
num_locals = 65000;
for (int i = 13; i < 65000; i = static_cast<int>(i * 1.5)) {
- byte code[] = {WASM_LOOP(WASM_I8(11), kExprSetLocal, U32V_3(i))};
+ byte code[] = {WASM_LOOP(WASM_I32V_1(11), kExprSetLocal, U32V_3(i))};
BitVector* assigned = Analyze(code, code + arraysize(code));
for (int j = 0; j < assigned->length(); j++) {
@@ -140,8 +140,8 @@ TEST_F(WasmLoopAssignmentAnalyzerTest, Loop1) {
byte code[] = {
WASM_LOOP(WASM_IF(
WASM_GET_LOCAL(0),
- WASM_BRV(0, WASM_SET_LOCAL(
- 3, WASM_I32_SUB(WASM_GET_LOCAL(0), WASM_I8(1)))))),
+ WASM_BRV(0, WASM_SET_LOCAL(3, WASM_I32_SUB(WASM_GET_LOCAL(0),
+ WASM_I32V_1(1)))))),
WASM_GET_LOCAL(0)};
BitVector* assigned = Analyze(code, code + arraysize(code));
@@ -164,8 +164,8 @@ TEST_F(WasmLoopAssignmentAnalyzerTest, Loop2) {
kSum, WASM_F32_ADD(WASM_GET_LOCAL(kSum),
WASM_LOAD_MEM(MachineType::Float32(),
WASM_GET_LOCAL(kIter)))),
- WASM_SET_LOCAL(kIter,
- WASM_I32_SUB(WASM_GET_LOCAL(kIter), WASM_I8(4))))),
+ WASM_SET_LOCAL(
+ kIter, WASM_I32_SUB(WASM_GET_LOCAL(kIter), WASM_I32V_1(4))))),
WASM_STORE_MEM(MachineType::Float32(), WASM_ZERO, WASM_GET_LOCAL(kSum)),
WASM_GET_LOCAL(kIter))};
diff --git a/deps/v8/test/unittests/wasm/module-decoder-unittest.cc b/deps/v8/test/unittests/wasm/module-decoder-unittest.cc
index baf6909499..258ac7311a 100644
--- a/deps/v8/test/unittests/wasm/module-decoder-unittest.cc
+++ b/deps/v8/test/unittests/wasm/module-decoder-unittest.cc
@@ -7,6 +7,7 @@
#include "src/handles.h"
#include "src/objects-inl.h"
#include "src/wasm/module-decoder.h"
+#include "src/wasm/wasm-limits.h"
#include "src/wasm/wasm-macro-gen.h"
#include "src/wasm/wasm-opcodes.h"
@@ -125,13 +126,13 @@ static size_t SizeOfVarInt(size_t value) {
return size;
}
-struct LocalTypePair {
+struct ValueTypePair {
uint8_t code;
- LocalType type;
-} kLocalTypes[] = {{kLocalI32, kAstI32},
- {kLocalI64, kAstI64},
- {kLocalF32, kAstF32},
- {kLocalF64, kAstF64}};
+ ValueType type;
+} kValueTypes[] = {{kLocalI32, kWasmI32},
+ {kLocalI64, kWasmI64},
+ {kLocalF32, kWasmF32},
+ {kLocalF64, kWasmF64}};
class WasmModuleVerifyTest : public TestWithIsolateAndZone {
public:
@@ -198,7 +199,7 @@ TEST_F(WasmModuleVerifyTest, OneGlobal) {
const WasmGlobal* global = &result.val->globals.back();
- EXPECT_EQ(kAstI32, global->type);
+ EXPECT_EQ(kWasmI32, global->type);
EXPECT_EQ(0u, global->offset);
EXPECT_FALSE(global->mutability);
EXPECT_EQ(WasmInitExpr::kI32Const, global->init.kind);
@@ -310,7 +311,7 @@ TEST_F(WasmModuleVerifyTest, NGlobals) {
WASM_INIT_EXPR_F32(7.7), // init
};
- for (uint32_t i = 0; i < 1000000; i = i * 13 + 1) {
+ for (uint32_t i = 0; i < kV8MaxWasmGlobals; i = i * 13 + 1) {
std::vector<byte> buffer;
size_t size = SizeOfVarInt(i) + i * sizeof(data);
const byte globals[] = {kGlobalSectionCode, U32V_5(size)};
@@ -359,14 +360,14 @@ TEST_F(WasmModuleVerifyTest, TwoGlobals) {
const WasmGlobal* g0 = &result.val->globals[0];
- EXPECT_EQ(kAstF32, g0->type);
+ EXPECT_EQ(kWasmF32, g0->type);
EXPECT_EQ(0u, g0->offset);
EXPECT_FALSE(g0->mutability);
EXPECT_EQ(WasmInitExpr::kF32Const, g0->init.kind);
const WasmGlobal* g1 = &result.val->globals[1];
- EXPECT_EQ(kAstF64, g1->type);
+ EXPECT_EQ(kWasmF64, g1->type);
EXPECT_EQ(8u, g1->offset);
EXPECT_TRUE(g1->mutability);
EXPECT_EQ(WasmInitExpr::kF64Const, g1->init.kind);
@@ -783,8 +784,8 @@ TEST_F(WasmSignatureDecodeTest, Ok_v_v) {
}
TEST_F(WasmSignatureDecodeTest, Ok_t_v) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalTypePair ret_type = kLocalTypes[i];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueTypePair ret_type = kValueTypes[i];
const byte data[] = {SIG_ENTRY_x(ret_type.code)};
FunctionSig* sig =
DecodeWasmSignatureForTesting(zone(), data, data + sizeof(data));
@@ -797,8 +798,8 @@ TEST_F(WasmSignatureDecodeTest, Ok_t_v) {
}
TEST_F(WasmSignatureDecodeTest, Ok_v_t) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalTypePair param_type = kLocalTypes[i];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueTypePair param_type = kValueTypes[i];
const byte data[] = {SIG_ENTRY_v_x(param_type.code)};
FunctionSig* sig =
DecodeWasmSignatureForTesting(zone(), data, data + sizeof(data));
@@ -811,10 +812,10 @@ TEST_F(WasmSignatureDecodeTest, Ok_v_t) {
}
TEST_F(WasmSignatureDecodeTest, Ok_t_t) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalTypePair ret_type = kLocalTypes[i];
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalTypePair param_type = kLocalTypes[j];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueTypePair ret_type = kValueTypes[i];
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueTypePair param_type = kValueTypes[j];
const byte data[] = {SIG_ENTRY_x_x(ret_type.code, param_type.code)};
FunctionSig* sig =
DecodeWasmSignatureForTesting(zone(), data, data + sizeof(data));
@@ -829,10 +830,10 @@ TEST_F(WasmSignatureDecodeTest, Ok_t_t) {
}
TEST_F(WasmSignatureDecodeTest, Ok_i_tt) {
- for (size_t i = 0; i < arraysize(kLocalTypes); i++) {
- LocalTypePair p0_type = kLocalTypes[i];
- for (size_t j = 0; j < arraysize(kLocalTypes); j++) {
- LocalTypePair p1_type = kLocalTypes[j];
+ for (size_t i = 0; i < arraysize(kValueTypes); i++) {
+ ValueTypePair p0_type = kValueTypes[i];
+ for (size_t j = 0; j < arraysize(kValueTypes); j++) {
+ ValueTypePair p1_type = kValueTypes[j];
const byte data[] = {
SIG_ENTRY_x_xx(kLocalI32, p0_type.code, p1_type.code)};
FunctionSig* sig =
@@ -847,6 +848,31 @@ TEST_F(WasmSignatureDecodeTest, Ok_i_tt) {
}
}
+TEST_F(WasmSignatureDecodeTest, TooManyParams) {
+ static const byte data[] = {kWasmFunctionTypeForm,
+ WASM_I32V_3(kV8MaxWasmFunctionParams + 1),
+ kLocalI32, 0};
+ FunctionSig* sig =
+ DecodeWasmSignatureForTesting(zone(), data, data + sizeof(data));
+ EXPECT_FALSE(sig != nullptr);
+}
+
+TEST_F(WasmSignatureDecodeTest, TooManyReturns) {
+ bool prev = FLAG_wasm_mv_prototype;
+ for (int i = 0; i < 2; i++) {
+ FLAG_wasm_mv_prototype = i != 0;
+ const int max_return_count =
+ static_cast<int>(FLAG_wasm_mv_prototype ? kV8MaxWasmFunctionMultiReturns
+ : kV8MaxWasmFunctionReturns);
+ byte data[] = {kWasmFunctionTypeForm, 0, WASM_I32V_3(max_return_count + 1),
+ kLocalI32};
+ FunctionSig* sig =
+ DecodeWasmSignatureForTesting(zone(), data, data + sizeof(data));
+ EXPECT_EQ(nullptr, sig);
+ FLAG_wasm_mv_prototype = prev;
+ }
+}
+
TEST_F(WasmSignatureDecodeTest, Fail_off_end) {
byte data[256];
for (int p = 0; p <= 255; p = p + 1 + p * 3) {
@@ -907,7 +933,7 @@ TEST_F(WasmFunctionVerifyTest, Ok_v_v_empty) {
kLocalF32, // --
6,
kLocalF64, // --
- kExprNop // body
+ kExprEnd // body
};
FunctionResult result =
@@ -1028,7 +1054,7 @@ TEST_F(WasmModuleVerifyTest, UnknownSectionSkipped) {
const WasmGlobal* global = &result.val->globals.back();
- EXPECT_EQ(kAstI32, global->type);
+ EXPECT_EQ(kWasmI32, global->type);
EXPECT_EQ(0u, global->offset);
if (result.val) delete result.val;
@@ -1075,6 +1101,21 @@ TEST_F(WasmModuleVerifyTest, ImportTable_mutable_global) {
}
}
+TEST_F(WasmModuleVerifyTest, ImportTable_mutability_malformed) {
+ static const byte data[] = {
+ SECTION(Import, 8),
+ 1, // --
+ NAME_LENGTH(1), // --
+ 'm', // module name
+ NAME_LENGTH(1), // --
+ 'g', // global name
+ kExternalGlobal, // import kind
+ kLocalI32, // type
+ 2, // invalid mutability
+ };
+ EXPECT_FAILURE(data);
+}
+
TEST_F(WasmModuleVerifyTest, ImportTable_nosigs2) {
static const byte data[] = {
SECTION(Import, 6), 1, // sig table
@@ -1482,6 +1523,83 @@ TEST_F(WasmModuleVerifyTest, Multiple_Named_Sections) {
EXPECT_VERIFIES(data);
}
+class WasmModuleCustomSectionTest : public TestWithIsolateAndZone {
+ public:
+ void CheckSections(const byte* module_start, const byte* module_end,
+ CustomSectionOffset expected[], size_t num_expected) {
+ // Add the WASM magic and version number automatically.
+ size_t size = static_cast<size_t>(module_end - module_start);
+ byte header[] = {WASM_MODULE_HEADER};
+ size_t total = sizeof(header) + size;
+ auto temp = new byte[total];
+ memcpy(temp, header, sizeof(header));
+ memcpy(temp + sizeof(header), module_start, size);
+ std::vector<CustomSectionOffset> custom_sections =
+ DecodeCustomSections(module_start, module_end);
+
+ CHECK_EQ(num_expected, custom_sections.size());
+
+ for (size_t i = 0; i < num_expected; i++) {
+ EXPECT_EQ(expected[i].section_start, custom_sections[i].section_start);
+ EXPECT_EQ(expected[i].name_offset, custom_sections[i].name_offset);
+ EXPECT_EQ(expected[i].name_length, custom_sections[i].name_length);
+ EXPECT_EQ(expected[i].payload_offset, custom_sections[i].payload_offset);
+ EXPECT_EQ(expected[i].payload_length, custom_sections[i].payload_length);
+ EXPECT_EQ(expected[i].section_length, custom_sections[i].section_length);
+ }
+ }
+};
+
+TEST_F(WasmModuleCustomSectionTest, ThreeUnknownSections) {
+ static const byte data[] = {
+ U32_LE(kWasmMagic), // --
+ U32_LE(kWasmVersion), // --
+ SECTION(Unknown, 4), 1, 'X', 17, 18, // --
+ SECTION(Unknown, 9), 3, 'f', 'o', 'o', 5, 6, 7, 8, 9, // --
+ SECTION(Unknown, 8), 5, 'o', 't', 'h', 'e', 'r', 7, 8, // --
+ };
+
+ static CustomSectionOffset expected[] = {
+ // sec_start, nm_offset, nm_length, py_offset, py_length, sec_length
+ {10, 11, 1, 12, 2, 4}, // --
+ {16, 17, 3, 20, 5, 9}, // --
+ {27, 28, 5, 33, 2, 8}, // --
+ };
+
+ CheckSections(data, data + sizeof(data), expected, arraysize(expected));
+}
+
+TEST_F(WasmModuleCustomSectionTest, TwoKnownTwoUnknownSections) {
+ static const byte data[] = {
+ U32_LE(kWasmMagic), // --
+ U32_LE(kWasmVersion), // --
+ SIGNATURES_SECTION(2, SIG_ENTRY_v_v, SIG_ENTRY_v_v), // --
+ SECTION(Unknown, 4),
+ 1,
+ 'X',
+ 17,
+ 18, // --
+ ONE_EMPTY_FUNCTION,
+ SECTION(Unknown, 8),
+ 5,
+ 'o',
+ 't',
+ 'h',
+ 'e',
+ 'r',
+ 7,
+ 8, // --
+ };
+
+ static CustomSectionOffset expected[] = {
+ // sec_start, nm_offset, nm_length, py_offset, py_length, sec_length
+ {19, 20, 1, 21, 2, 4}, // --
+ {29, 30, 5, 35, 2, 8}, // --
+ };
+
+ CheckSections(data, data + sizeof(data), expected, arraysize(expected));
+}
+
} // namespace wasm
} // namespace internal
} // namespace v8
diff --git a/deps/v8/test/unittests/wasm/wasm-macro-gen-unittest.cc b/deps/v8/test/unittests/wasm/wasm-macro-gen-unittest.cc
index d047e04032..fb98fec3c1 100644
--- a/deps/v8/test/unittests/wasm/wasm-macro-gen-unittest.cc
+++ b/deps/v8/test/unittests/wasm/wasm-macro-gen-unittest.cc
@@ -22,8 +22,8 @@ TEST_F(WasmMacroGenTest, Constants) {
EXPECT_SIZE(2, WASM_ONE);
EXPECT_SIZE(2, WASM_ZERO);
- EXPECT_SIZE(2, WASM_I8(122));
- EXPECT_SIZE(2, WASM_I8(254));
+ EXPECT_SIZE(2, WASM_I32V_1(-22));
+ EXPECT_SIZE(2, WASM_I32V_1(54));
EXPECT_SIZE(2, WASM_I32V_1(1));
EXPECT_SIZE(3, WASM_I32V_2(200));
@@ -49,6 +49,7 @@ TEST_F(WasmMacroGenTest, Constants) {
TEST_F(WasmMacroGenTest, Statements) {
EXPECT_SIZE(1, WASM_NOP);
+ EXPECT_SIZE(1, WASM_END);
EXPECT_SIZE(4, WASM_SET_LOCAL(0, WASM_ZERO));
@@ -84,7 +85,7 @@ TEST_F(WasmMacroGenTest, Statements) {
}
TEST_F(WasmMacroGenTest, MacroStatements) {
- EXPECT_SIZE(11, WASM_WHILE(WASM_I8(0), WASM_NOP));
+ EXPECT_SIZE(11, WASM_WHILE(WASM_ZERO, WASM_NOP));
EXPECT_SIZE(7, WASM_INC_LOCAL(0));
EXPECT_SIZE(7, WASM_INC_LOCAL_BY(0, 3));
diff --git a/deps/v8/test/unittests/wasm/wasm-module-builder-unittest.cc b/deps/v8/test/unittests/wasm/wasm-module-builder-unittest.cc
index 50049d557e..372f0ae922 100644
--- a/deps/v8/test/unittests/wasm/wasm-module-builder-unittest.cc
+++ b/deps/v8/test/unittests/wasm/wasm-module-builder-unittest.cc
@@ -6,7 +6,7 @@
#include "src/v8.h"
-#include "src/wasm/ast-decoder.h"
+#include "src/wasm/function-body-decoder.h"
#include "src/wasm/wasm-module-builder.h"
#include "test/common/wasm/test-signatures.h"
@@ -17,7 +17,7 @@ namespace wasm {
class WasmModuleBuilderTest : public TestWithZone {
protected:
- void AddLocal(WasmFunctionBuilder* f, LocalType type) {
+ void AddLocal(WasmFunctionBuilder* f, ValueType type) {
uint16_t index = f->AddLocal(type);
f->EmitGetLocal(index);
}
diff --git a/deps/v8/test/unittests/zone/zone-chunk-list-unittest.cc b/deps/v8/test/unittests/zone/zone-chunk-list-unittest.cc
index 2969366bc7..73e43a30d9 100644
--- a/deps/v8/test/unittests/zone/zone-chunk-list-unittest.cc
+++ b/deps/v8/test/unittests/zone/zone-chunk-list-unittest.cc
@@ -4,7 +4,6 @@
#include "src/zone/zone-chunk-list.h"
-#include "src/list-inl.h"
#include "src/zone/accounting-allocator.h"
#include "src/zone/zone.h"
#include "testing/gtest/include/gtest/gtest.h"