summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2019-11-08 15:39:11 +0100
committerMichaël Zasso <targos@protonmail.com>2019-11-08 15:46:25 +0100
commit6ca81ad72a3c6fdf16c683335be748f22aaa9a0d (patch)
tree33c8ee75f729aed76c2c0b89c63f9bf1b4dd66aa /deps/v8/src/objects
parent1eee0b8bf8bba39b600fb16a9223e545e3bac2bc (diff)
downloadandroid-node-v8-6ca81ad72a3c6fdf16c683335be748f22aaa9a0d.tar.gz
android-node-v8-6ca81ad72a3c6fdf16c683335be748f22aaa9a0d.tar.bz2
android-node-v8-6ca81ad72a3c6fdf16c683335be748f22aaa9a0d.zip
deps: update V8 to 7.9.317.20
PR-URL: https://github.com/nodejs/node/pull/30020 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Jiawen Geng <technicalcute@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Diffstat (limited to 'deps/v8/src/objects')
-rw-r--r--deps/v8/src/objects/arguments.h13
-rw-r--r--deps/v8/src/objects/backing-store.cc648
-rw-r--r--deps/v8/src/objects/backing-store.h206
-rw-r--r--deps/v8/src/objects/bigint.cc5
-rw-r--r--deps/v8/src/objects/bigint.h12
-rw-r--r--deps/v8/src/objects/code.cc10
-rw-r--r--deps/v8/src/objects/code.h1
-rw-r--r--deps/v8/src/objects/contexts-inl.h41
-rw-r--r--deps/v8/src/objects/contexts.cc42
-rw-r--r--deps/v8/src/objects/contexts.h67
-rw-r--r--deps/v8/src/objects/data-handler.h2
-rw-r--r--deps/v8/src/objects/debug-objects-inl.h14
-rw-r--r--deps/v8/src/objects/debug-objects.h31
-rw-r--r--deps/v8/src/objects/descriptor-array-inl.h91
-rw-r--r--deps/v8/src/objects/descriptor-array.h57
-rw-r--r--deps/v8/src/objects/elements.cc493
-rw-r--r--deps/v8/src/objects/elements.h24
-rw-r--r--deps/v8/src/objects/feedback-cell-inl.h7
-rw-r--r--deps/v8/src/objects/feedback-cell.h15
-rw-r--r--deps/v8/src/objects/feedback-vector-inl.h4
-rw-r--r--deps/v8/src/objects/feedback-vector.cc38
-rw-r--r--deps/v8/src/objects/feedback-vector.h8
-rw-r--r--deps/v8/src/objects/field-index-inl.h4
-rw-r--r--deps/v8/src/objects/field-index.h7
-rw-r--r--deps/v8/src/objects/fixed-array-inl.h6
-rw-r--r--deps/v8/src/objects/fixed-array.h19
-rw-r--r--deps/v8/src/objects/function-kind.h9
-rw-r--r--deps/v8/src/objects/heap-number-inl.h4
-rw-r--r--deps/v8/src/objects/heap-number.h8
-rw-r--r--deps/v8/src/objects/instance-type.h381
-rw-r--r--deps/v8/src/objects/internal-index.h79
-rw-r--r--deps/v8/src/objects/intl-objects.cc185
-rw-r--r--deps/v8/src/objects/intl-objects.h17
-rw-r--r--deps/v8/src/objects/js-array-buffer-inl.h80
-rw-r--r--deps/v8/src/objects/js-array-buffer.cc211
-rw-r--r--deps/v8/src/objects/js-array-buffer.h113
-rw-r--r--deps/v8/src/objects/js-array.h4
-rw-r--r--deps/v8/src/objects/js-break-iterator.cc13
-rw-r--r--deps/v8/src/objects/js-break-iterator.h4
-rw-r--r--deps/v8/src/objects/js-collator.cc26
-rw-r--r--deps/v8/src/objects/js-collator.h4
-rw-r--r--deps/v8/src/objects/js-collection-iterator.h4
-rw-r--r--deps/v8/src/objects/js-date-time-format.cc216
-rw-r--r--deps/v8/src/objects/js-date-time-format.h7
-rw-r--r--deps/v8/src/objects/js-list-format.cc35
-rw-r--r--deps/v8/src/objects/js-list-format.h2
-rw-r--r--deps/v8/src/objects/js-locale.cc15
-rw-r--r--deps/v8/src/objects/js-locale.h9
-rw-r--r--deps/v8/src/objects/js-number-format-inl.h38
-rw-r--r--deps/v8/src/objects/js-number-format.cc386
-rw-r--r--deps/v8/src/objects/js-number-format.h28
-rw-r--r--deps/v8/src/objects/js-objects-inl.h17
-rw-r--r--deps/v8/src/objects/js-objects.cc190
-rw-r--r--deps/v8/src/objects/js-objects.h79
-rw-r--r--deps/v8/src/objects/js-plural-rules.cc11
-rw-r--r--deps/v8/src/objects/js-plural-rules.h2
-rw-r--r--deps/v8/src/objects/js-proxy.h2
-rw-r--r--deps/v8/src/objects/js-regexp-inl.h10
-rw-r--r--deps/v8/src/objects/js-regexp.cc118
-rw-r--r--deps/v8/src/objects/js-regexp.h68
-rw-r--r--deps/v8/src/objects/js-relative-time-format.cc3
-rw-r--r--deps/v8/src/objects/js-relative-time-format.h2
-rw-r--r--deps/v8/src/objects/js-segment-iterator.h2
-rw-r--r--deps/v8/src/objects/js-segmenter.h2
-rw-r--r--deps/v8/src/objects/js-weak-refs-inl.h29
-rw-r--r--deps/v8/src/objects/js-weak-refs.h57
-rw-r--r--deps/v8/src/objects/keys.cc47
-rw-r--r--deps/v8/src/objects/keys.h6
-rw-r--r--deps/v8/src/objects/layout-descriptor-inl.h4
-rw-r--r--deps/v8/src/objects/layout-descriptor.cc3
-rw-r--r--deps/v8/src/objects/literal-objects.cc18
-rw-r--r--deps/v8/src/objects/lookup-inl.h5
-rw-r--r--deps/v8/src/objects/lookup.cc133
-rw-r--r--deps/v8/src/objects/lookup.h2
-rw-r--r--deps/v8/src/objects/map-inl.h23
-rw-r--r--deps/v8/src/objects/map-updater.cc51
-rw-r--r--deps/v8/src/objects/map-updater.h20
-rw-r--r--deps/v8/src/objects/map.cc153
-rw-r--r--deps/v8/src/objects/map.h51
-rw-r--r--deps/v8/src/objects/module-inl.h41
-rw-r--r--deps/v8/src/objects/module.cc64
-rw-r--r--deps/v8/src/objects/module.h14
-rw-r--r--deps/v8/src/objects/name-inl.h5
-rw-r--r--deps/v8/src/objects/name.h19
-rw-r--r--deps/v8/src/objects/object-list-macros.h8
-rw-r--r--deps/v8/src/objects/objects-body-descriptors-inl.h38
-rw-r--r--deps/v8/src/objects/objects-definitions.h244
-rw-r--r--deps/v8/src/objects/objects-inl.h21
-rw-r--r--deps/v8/src/objects/objects.cc196
-rw-r--r--deps/v8/src/objects/objects.h71
-rw-r--r--deps/v8/src/objects/oddball.h5
-rw-r--r--deps/v8/src/objects/ordered-hash-table-inl.h8
-rw-r--r--deps/v8/src/objects/ordered-hash-table.cc26
-rw-r--r--deps/v8/src/objects/ordered-hash-table.h5
-rw-r--r--deps/v8/src/objects/osr-optimized-code-cache-inl.h25
-rw-r--r--deps/v8/src/objects/osr-optimized-code-cache.cc223
-rw-r--r--deps/v8/src/objects/osr-optimized-code-cache.h77
-rw-r--r--deps/v8/src/objects/primitive-heap-object-inl.h26
-rw-r--r--deps/v8/src/objects/primitive-heap-object.h33
-rw-r--r--deps/v8/src/objects/property-descriptor.cc2
-rw-r--r--deps/v8/src/objects/property-details.h14
-rw-r--r--deps/v8/src/objects/scope-info.cc89
-rw-r--r--deps/v8/src/objects/scope-info.h43
-rw-r--r--deps/v8/src/objects/script-inl.h30
-rw-r--r--deps/v8/src/objects/script.h18
-rw-r--r--deps/v8/src/objects/shared-function-info-inl.h48
-rw-r--r--deps/v8/src/objects/shared-function-info.h113
-rw-r--r--deps/v8/src/objects/slots-inl.h2
-rw-r--r--deps/v8/src/objects/source-text-module.cc532
-rw-r--r--deps/v8/src/objects/source-text-module.h91
-rw-r--r--deps/v8/src/objects/stack-frame-info.cc81
-rw-r--r--deps/v8/src/objects/stack-frame-info.h6
-rw-r--r--deps/v8/src/objects/string-inl.h8
-rw-r--r--deps/v8/src/objects/string.cc53
-rw-r--r--deps/v8/src/objects/string.h9
-rw-r--r--deps/v8/src/objects/struct-inl.h7
-rw-r--r--deps/v8/src/objects/struct.h17
-rw-r--r--deps/v8/src/objects/synthetic-module.cc32
-rw-r--r--deps/v8/src/objects/synthetic-module.h18
-rw-r--r--deps/v8/src/objects/transitions-inl.h8
-rw-r--r--deps/v8/src/objects/transitions.cc5
-rw-r--r--deps/v8/src/objects/transitions.h1
-rw-r--r--deps/v8/src/objects/value-serializer.cc101
-rw-r--r--deps/v8/src/objects/value-serializer.h1
124 files changed, 4521 insertions, 2808 deletions
diff --git a/deps/v8/src/objects/arguments.h b/deps/v8/src/objects/arguments.h
index a306ef592a..0a1e3e4ac9 100644
--- a/deps/v8/src/objects/arguments.h
+++ b/deps/v8/src/objects/arguments.h
@@ -16,7 +16,7 @@
namespace v8 {
namespace internal {
-// Superclass for all objects with instance type {JS_ARGUMENTS_TYPE}
+// Superclass for all objects with instance type {JS_ARGUMENTS_OBJECT_TYPE}
class JSArgumentsObject
: public TorqueGeneratedJSArgumentsObject<JSArgumentsObject, JSObject> {
public:
@@ -25,15 +25,16 @@ class JSArgumentsObject
};
// Common superclass for JSSloppyArgumentsObject and JSStrictArgumentsObject.
-// Note that the instance type {JS_ARGUMENTS_TYPE} does _not_ guarantee the
-// below layout, the in-object properties might have transitioned to dictionary
-// mode already. Only use the below layout with the specific initial maps.
+// Note that the instance type {JS_ARGUMENTS_OBJECT_TYPE} does _not_ guarantee
+// the below layout, the in-object properties might have transitioned to
+// dictionary mode already. Only use the below layout with the specific initial
+// maps.
class JSArgumentsObjectWithLength : public JSArgumentsObject {
public:
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(
JSObject::kHeaderSize,
- TORQUE_GENERATED_JSARGUMENTS_OBJECT_WITH_LENGTH_FIELDS)
+ TORQUE_GENERATED_JS_ARGUMENTS_OBJECT_WITH_LENGTH_FIELDS)
// Indices of in-object properties.
static const int kLengthIndex = 0;
@@ -50,7 +51,7 @@ class JSSloppyArgumentsObject : public JSArgumentsObjectWithLength {
public:
DEFINE_FIELD_OFFSET_CONSTANTS(
JSArgumentsObjectWithLength::kSize,
- TORQUE_GENERATED_JSSLOPPY_ARGUMENTS_OBJECT_FIELDS)
+ TORQUE_GENERATED_JS_SLOPPY_ARGUMENTS_OBJECT_FIELDS)
// Indices of in-object properties.
static const int kCalleeIndex = kLengthIndex + 1;
diff --git a/deps/v8/src/objects/backing-store.cc b/deps/v8/src/objects/backing-store.cc
new file mode 100644
index 0000000000..55957e001b
--- /dev/null
+++ b/deps/v8/src/objects/backing-store.cc
@@ -0,0 +1,648 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/objects/backing-store.h"
+#include "src/execution/isolate.h"
+#include "src/handles/global-handles.h"
+#include "src/logging/counters.h"
+#include "src/wasm/wasm-engine.h"
+#include "src/wasm/wasm-limits.h"
+#include "src/wasm/wasm-objects-inl.h"
+
+#define TRACE_BS(...) \
+ do { \
+ if (FLAG_trace_backing_store) PrintF(__VA_ARGS__); \
+ } while (false)
+
+namespace v8 {
+namespace internal {
+
+namespace {
+#if V8_TARGET_ARCH_64_BIT
+constexpr bool kUseGuardRegions = true;
+#else
+constexpr bool kUseGuardRegions = false;
+#endif
+
+#if V8_TARGET_ARCH_MIPS64
+// MIPS64 has a user space of 2^40 bytes on most processors,
+// address space limits needs to be smaller.
+constexpr size_t kAddressSpaceLimit = 0x8000000000L; // 512 GiB
+#elif V8_TARGET_ARCH_64_BIT
+constexpr size_t kAddressSpaceLimit = 0x10100000000L; // 1 TiB + 4 GiB
+#else
+constexpr size_t kAddressSpaceLimit = 0xC0000000; // 3 GiB
+#endif
+
+constexpr uint64_t kOneGiB = 1024 * 1024 * 1024;
+constexpr uint64_t kNegativeGuardSize = 2 * kOneGiB;
+constexpr uint64_t kFullGuardSize = 10 * kOneGiB;
+
+std::atomic<uint64_t> reserved_address_space_{0};
+
+// Allocation results are reported to UMA
+//
+// See wasm_memory_allocation_result in counters.h
+enum class AllocationStatus {
+ kSuccess, // Succeeded on the first try
+
+ kSuccessAfterRetry, // Succeeded after garbage collection
+
+ kAddressSpaceLimitReachedFailure, // Failed because Wasm is at its address
+ // space limit
+
+ kOtherFailure // Failed for an unknown reason
+};
+
+base::AddressRegion GetGuardedRegion(void* buffer_start, size_t byte_length) {
+ // Guard regions always look like this:
+ // |xxx(2GiB)xxx|.......(4GiB)..xxxxx|xxxxxx(4GiB)xxxxxx|
+ // ^ buffer_start
+ // ^ byte_length
+ // ^ negative guard region ^ positive guard region
+
+ Address start = reinterpret_cast<Address>(buffer_start);
+ DCHECK_EQ(8, sizeof(size_t)); // only use on 64-bit
+ DCHECK_EQ(0, start % AllocatePageSize());
+ return base::AddressRegion(start - (2 * kOneGiB),
+ static_cast<size_t>(kFullGuardSize));
+}
+
+void RecordStatus(Isolate* isolate, AllocationStatus status) {
+ isolate->counters()->wasm_memory_allocation_result()->AddSample(
+ static_cast<int>(status));
+}
+
+inline void DebugCheckZero(void* start, size_t byte_length) {
+#if DEBUG
+ // Double check memory is zero-initialized.
+ const byte* bytes = reinterpret_cast<const byte*>(start);
+ for (size_t i = 0; i < byte_length; i++) {
+ DCHECK_EQ(0, bytes[i]);
+ }
+#endif
+}
+} // namespace
+
+bool BackingStore::ReserveAddressSpace(uint64_t num_bytes) {
+ uint64_t reservation_limit = kAddressSpaceLimit;
+ while (true) {
+ uint64_t old_count = reserved_address_space_.load();
+ if (old_count > reservation_limit) return false;
+ if (reservation_limit - old_count < num_bytes) return false;
+ if (reserved_address_space_.compare_exchange_weak(old_count,
+ old_count + num_bytes)) {
+ return true;
+ }
+ }
+}
+
+void BackingStore::ReleaseReservation(uint64_t num_bytes) {
+ uint64_t old_reserved = reserved_address_space_.fetch_sub(num_bytes);
+ USE(old_reserved);
+ DCHECK_LE(num_bytes, old_reserved);
+}
+
+// The backing store for a Wasm shared memory remembers all the isolates
+// with which it has been shared.
+struct SharedWasmMemoryData {
+ std::vector<Isolate*> isolates_;
+};
+
+void BackingStore::Clear() {
+ buffer_start_ = nullptr;
+ byte_length_ = 0;
+ has_guard_regions_ = false;
+ type_specific_data_.v8_api_array_buffer_allocator = nullptr;
+}
+
+BackingStore::~BackingStore() {
+ GlobalBackingStoreRegistry::Unregister(this);
+
+ if (buffer_start_ == nullptr) return; // nothing to deallocate
+
+ if (is_wasm_memory_) {
+ DCHECK(free_on_destruct_);
+ TRACE_BS("BSw:free bs=%p mem=%p (length=%zu, capacity=%zu)\n", this,
+ buffer_start_, byte_length(), byte_capacity_);
+ if (is_shared_) {
+ // Deallocate the list of attached memory objects.
+ SharedWasmMemoryData* shared_data = get_shared_wasm_memory_data();
+ delete shared_data;
+ type_specific_data_.shared_wasm_memory_data = nullptr;
+ }
+
+ // Wasm memories are always allocated through the page allocator.
+ auto region =
+ has_guard_regions_
+ ? GetGuardedRegion(buffer_start_, byte_length_)
+ : base::AddressRegion(reinterpret_cast<Address>(buffer_start_),
+ byte_capacity_);
+ bool pages_were_freed =
+ region.size() == 0 /* no need to free any pages */ ||
+ FreePages(GetPlatformPageAllocator(),
+ reinterpret_cast<void*>(region.begin()), region.size());
+ CHECK(pages_were_freed);
+ BackingStore::ReleaseReservation(has_guard_regions_ ? kFullGuardSize
+ : byte_capacity_);
+ Clear();
+ return;
+ }
+ if (free_on_destruct_) {
+ // JSArrayBuffer backing store. Deallocate through the embedder's allocator.
+ auto allocator = reinterpret_cast<v8::ArrayBuffer::Allocator*>(
+ get_v8_api_array_buffer_allocator());
+ TRACE_BS("BS:free bs=%p mem=%p (length=%zu, capacity=%zu)\n", this,
+ buffer_start_, byte_length(), byte_capacity_);
+ allocator->Free(buffer_start_, byte_length_);
+ }
+ Clear();
+}
+
+// Allocate a backing store using the array buffer allocator from the embedder.
+std::unique_ptr<BackingStore> BackingStore::Allocate(
+ Isolate* isolate, size_t byte_length, SharedFlag shared,
+ InitializedFlag initialized) {
+ void* buffer_start = nullptr;
+ auto allocator = isolate->array_buffer_allocator();
+ CHECK_NOT_NULL(allocator);
+ if (byte_length != 0) {
+ auto counters = isolate->counters();
+ int mb_length = static_cast<int>(byte_length / MB);
+ if (mb_length > 0) {
+ counters->array_buffer_big_allocations()->AddSample(mb_length);
+ }
+ if (shared == SharedFlag::kShared) {
+ counters->shared_array_allocations()->AddSample(mb_length);
+ }
+ auto allocate_buffer = [allocator, initialized](size_t byte_length) {
+ if (initialized == InitializedFlag::kUninitialized) {
+ return allocator->AllocateUninitialized(byte_length);
+ }
+ void* buffer_start = allocator->Allocate(byte_length);
+ if (buffer_start) {
+ // TODO(wasm): node does not implement the zero-initialization API.
+ // Reenable this debug check when node does implement it properly.
+ constexpr bool
+ kDebugCheckZeroDisabledDueToNodeNotImplementingZeroInitAPI = true;
+ if ((!(kDebugCheckZeroDisabledDueToNodeNotImplementingZeroInitAPI)) &&
+ !FLAG_mock_arraybuffer_allocator) {
+ DebugCheckZero(buffer_start, byte_length);
+ }
+ }
+ return buffer_start;
+ };
+
+ buffer_start = isolate->heap()->AllocateExternalBackingStore(
+ allocate_buffer, byte_length);
+
+ if (buffer_start == nullptr) {
+ // Allocation failed.
+ counters->array_buffer_new_size_failures()->AddSample(mb_length);
+ return {};
+ }
+ }
+
+ auto result = new BackingStore(buffer_start, // start
+ byte_length, // length
+ byte_length, // capacity
+ shared, // shared
+ false, // is_wasm_memory
+ true, // free_on_destruct
+ false); // has_guard_regions
+
+ TRACE_BS("BS:alloc bs=%p mem=%p (length=%zu)\n", result,
+ result->buffer_start(), byte_length);
+ result->type_specific_data_.v8_api_array_buffer_allocator = allocator;
+ return std::unique_ptr<BackingStore>(result);
+}
+
+// Allocate a backing store for a Wasm memory. Always use the page allocator
+// and add guard regions.
+std::unique_ptr<BackingStore> BackingStore::TryAllocateWasmMemory(
+ Isolate* isolate, size_t initial_pages, size_t maximum_pages,
+ SharedFlag shared) {
+ // Cannot reserve 0 pages on some OSes.
+ if (maximum_pages == 0) maximum_pages = 1;
+
+ TRACE_BS("BSw:try %zu pages, %zu max\n", initial_pages, maximum_pages);
+
+ bool guards = kUseGuardRegions;
+
+ // For accounting purposes, whether a GC was necessary.
+ bool did_retry = false;
+
+ // A helper to try running a function up to 3 times, executing a GC
+ // if the first and second attempts failed.
+ auto gc_retry = [&](const std::function<bool()>& fn) {
+ for (int i = 0; i < 3; i++) {
+ if (fn()) return true;
+ // Collect garbage and retry.
+ did_retry = true;
+ // TODO(wasm): try Heap::EagerlyFreeExternalMemory() first?
+ isolate->heap()->MemoryPressureNotification(
+ MemoryPressureLevel::kCritical, true);
+ }
+ return false;
+ };
+
+ // Compute size of reserved memory.
+
+ size_t engine_max_pages = wasm::max_mem_pages();
+ size_t byte_capacity =
+ std::min(engine_max_pages, maximum_pages) * wasm::kWasmPageSize;
+ size_t reservation_size =
+ guards ? static_cast<size_t>(kFullGuardSize) : byte_capacity;
+
+ //--------------------------------------------------------------------------
+ // 1. Enforce maximum address space reservation per engine.
+ //--------------------------------------------------------------------------
+ auto reserve_memory_space = [&] {
+ return BackingStore::ReserveAddressSpace(reservation_size);
+ };
+
+ if (!gc_retry(reserve_memory_space)) {
+ // Crash on out-of-memory if the correctness fuzzer is running.
+ if (FLAG_correctness_fuzzer_suppressions) {
+ FATAL("could not allocate wasm memory backing store");
+ }
+ RecordStatus(isolate, AllocationStatus::kAddressSpaceLimitReachedFailure);
+ TRACE_BS("BSw:try failed to reserve address space\n");
+ return {};
+ }
+
+ //--------------------------------------------------------------------------
+ // 2. Allocate pages (inaccessible by default).
+ //--------------------------------------------------------------------------
+ void* allocation_base = nullptr;
+ auto allocate_pages = [&] {
+ allocation_base =
+ AllocatePages(GetPlatformPageAllocator(), nullptr, reservation_size,
+ wasm::kWasmPageSize, PageAllocator::kNoAccess);
+ return allocation_base != nullptr;
+ };
+ if (!gc_retry(allocate_pages)) {
+ // Page allocator could not reserve enough pages.
+ BackingStore::ReleaseReservation(reservation_size);
+ RecordStatus(isolate, AllocationStatus::kOtherFailure);
+ TRACE_BS("BSw:try failed to allocate pages\n");
+ return {};
+ }
+
+ // Get a pointer to the start of the buffer, skipping negative guard region
+ // if necessary.
+ byte* buffer_start = reinterpret_cast<byte*>(allocation_base) +
+ (guards ? kNegativeGuardSize : 0);
+
+ //--------------------------------------------------------------------------
+ // 3. Commit the initial pages (allow read/write).
+ //--------------------------------------------------------------------------
+ size_t byte_length = initial_pages * wasm::kWasmPageSize;
+ auto commit_memory = [&] {
+ return byte_length == 0 ||
+ SetPermissions(GetPlatformPageAllocator(), buffer_start, byte_length,
+ PageAllocator::kReadWrite);
+ };
+ if (!gc_retry(commit_memory)) {
+ // SetPermissions put us over the process memory limit.
+ V8::FatalProcessOutOfMemory(nullptr, "BackingStore::AllocateWasmMemory()");
+ TRACE_BS("BSw:try failed to set permissions\n");
+ }
+
+ DebugCheckZero(buffer_start, byte_length); // touch the bytes.
+
+ RecordStatus(isolate, did_retry ? AllocationStatus::kSuccessAfterRetry
+ : AllocationStatus::kSuccess);
+
+ auto result = new BackingStore(buffer_start, // start
+ byte_length, // length
+ byte_capacity, // capacity
+ shared, // shared
+ true, // is_wasm_memory
+ true, // free_on_destruct
+ guards); // has_guard_regions
+
+ TRACE_BS("BSw:alloc bs=%p mem=%p (length=%zu, capacity=%zu)\n", result,
+ result->buffer_start(), byte_length, byte_capacity);
+
+ // Shared Wasm memories need an anchor for the memory object list.
+ if (shared == SharedFlag::kShared) {
+ result->type_specific_data_.shared_wasm_memory_data =
+ new SharedWasmMemoryData();
+ }
+
+ return std::unique_ptr<BackingStore>(result);
+}
+
+// Allocate a backing store for a Wasm memory. Always use the page allocator
+// and add guard regions.
+std::unique_ptr<BackingStore> BackingStore::AllocateWasmMemory(
+ Isolate* isolate, size_t initial_pages, size_t maximum_pages,
+ SharedFlag shared) {
+ // Wasm pages must be a multiple of the allocation page size.
+ DCHECK_EQ(0, wasm::kWasmPageSize % AllocatePageSize());
+
+ // Enforce engine limitation on the maximum number of pages.
+ if (initial_pages > wasm::max_mem_pages()) return nullptr;
+
+ auto backing_store =
+ TryAllocateWasmMemory(isolate, initial_pages, maximum_pages, shared);
+ if (!backing_store && maximum_pages > initial_pages) {
+ // If reserving {maximum_pages} failed, try with maximum = initial.
+ backing_store =
+ TryAllocateWasmMemory(isolate, initial_pages, initial_pages, shared);
+ }
+ return backing_store;
+}
+
+std::unique_ptr<BackingStore> BackingStore::CopyWasmMemory(Isolate* isolate,
+ size_t new_pages) {
+ DCHECK_GE(new_pages * wasm::kWasmPageSize, byte_length_);
+ // Note that we could allocate uninitialized to save initialization cost here,
+ // but since Wasm memories are allocated by the page allocator, the zeroing
+ // cost is already built-in.
+ // TODO(titzer): should we use a suitable maximum here?
+ auto new_backing_store = BackingStore::AllocateWasmMemory(
+ isolate, new_pages, new_pages,
+ is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared);
+
+ if (!new_backing_store ||
+ new_backing_store->has_guard_regions() != has_guard_regions_) {
+ return {};
+ }
+
+ if (byte_length_ > 0) {
+ memcpy(new_backing_store->buffer_start(), buffer_start_, byte_length_);
+ }
+
+ return new_backing_store;
+}
+
+// Try to grow the size of a wasm memory in place, without realloc + copy.
+bool BackingStore::GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages,
+ size_t max_pages) {
+ DCHECK(is_wasm_memory_);
+ max_pages = std::min(max_pages, byte_capacity_ / wasm::kWasmPageSize);
+
+ if (delta_pages == 0) return true; // degenerate grow.
+ if (delta_pages > max_pages) return false; // would never work.
+
+ // Do a compare-exchange loop, because we also need to adjust page
+ // permissions. Note that multiple racing grows both try to set page
+ // permissions for the entire range (to be RW), so the operating system
+ // should deal with that raciness. We know we succeeded when we can
+ // compare/swap the old length with the new length.
+ size_t old_length = 0;
+ size_t new_length = 0;
+ while (true) {
+ old_length = byte_length_.load(std::memory_order_acquire);
+ size_t current_pages = old_length / wasm::kWasmPageSize;
+
+ // Check if we have exceed the supplied maximum.
+ if (current_pages > (max_pages - delta_pages)) return false;
+
+ new_length = (current_pages + delta_pages) * wasm::kWasmPageSize;
+
+ // Try to adjust the permissions on the memory.
+ if (!i::SetPermissions(GetPlatformPageAllocator(), buffer_start_,
+ new_length, PageAllocator::kReadWrite)) {
+ return false;
+ }
+ if (byte_length_.compare_exchange_weak(old_length, new_length,
+ std::memory_order_acq_rel)) {
+ // Successfully updated both the length and permissions.
+ break;
+ }
+ }
+
+ if (!is_shared_) {
+ // Only do per-isolate accounting for non-shared backing stores.
+ reinterpret_cast<v8::Isolate*>(isolate)
+ ->AdjustAmountOfExternalAllocatedMemory(new_length - old_length);
+ }
+ return true;
+}
+
+void BackingStore::AttachSharedWasmMemoryObject(
+ Isolate* isolate, Handle<WasmMemoryObject> memory_object) {
+ DCHECK(is_wasm_memory_);
+ DCHECK(is_shared_);
+ // We need to take the global registry lock for this operation.
+ GlobalBackingStoreRegistry::AddSharedWasmMemoryObject(isolate, this,
+ memory_object);
+}
+
+void BackingStore::BroadcastSharedWasmMemoryGrow(
+ Isolate* isolate, std::shared_ptr<BackingStore> backing_store,
+ size_t new_pages) {
+ GlobalBackingStoreRegistry::BroadcastSharedWasmMemoryGrow(
+ isolate, backing_store, new_pages);
+}
+
+void BackingStore::RemoveSharedWasmMemoryObjects(Isolate* isolate) {
+ GlobalBackingStoreRegistry::Purge(isolate);
+}
+
+void BackingStore::UpdateSharedWasmMemoryObjects(Isolate* isolate) {
+ GlobalBackingStoreRegistry::UpdateSharedWasmMemoryObjects(isolate);
+}
+
+std::unique_ptr<BackingStore> BackingStore::WrapAllocation(
+ Isolate* isolate, void* allocation_base, size_t allocation_length,
+ SharedFlag shared, bool free_on_destruct) {
+ auto result =
+ new BackingStore(allocation_base, allocation_length, allocation_length,
+ shared, false, free_on_destruct, false);
+ result->type_specific_data_.v8_api_array_buffer_allocator =
+ isolate->array_buffer_allocator();
+ TRACE_BS("BS:wrap bs=%p mem=%p (length=%zu)\n", result,
+ result->buffer_start(), result->byte_length());
+ return std::unique_ptr<BackingStore>(result);
+}
+
+std::unique_ptr<BackingStore> BackingStore::EmptyBackingStore(
+ SharedFlag shared) {
+ auto result = new BackingStore(nullptr, // start
+ 0, // length
+ 0, // capacity
+ shared, // shared
+ false, // is_wasm_memory
+ false, // free_on_destruct
+ false); // has_guard_regions
+
+ return std::unique_ptr<BackingStore>(result);
+}
+
+void* BackingStore::get_v8_api_array_buffer_allocator() {
+ CHECK(!is_wasm_memory_);
+ auto array_buffer_allocator =
+ type_specific_data_.v8_api_array_buffer_allocator;
+ CHECK_NOT_NULL(array_buffer_allocator);
+ return array_buffer_allocator;
+}
+
+SharedWasmMemoryData* BackingStore::get_shared_wasm_memory_data() {
+ CHECK(is_wasm_memory_ && is_shared_);
+ auto shared_wasm_memory_data = type_specific_data_.shared_wasm_memory_data;
+ CHECK(shared_wasm_memory_data);
+ return shared_wasm_memory_data;
+}
+
+namespace {
+// Implementation details of GlobalBackingStoreRegistry.
+struct GlobalBackingStoreRegistryImpl {
+ GlobalBackingStoreRegistryImpl() {}
+ base::Mutex mutex_;
+ std::unordered_map<const void*, std::weak_ptr<BackingStore>> map_;
+};
+base::LazyInstance<GlobalBackingStoreRegistryImpl>::type global_registry_impl_ =
+ LAZY_INSTANCE_INITIALIZER;
+inline GlobalBackingStoreRegistryImpl* impl() {
+ return global_registry_impl_.Pointer();
+}
+} // namespace
+
+void GlobalBackingStoreRegistry::Register(
+ std::shared_ptr<BackingStore> backing_store) {
+ if (!backing_store || !backing_store->buffer_start()) return;
+
+ if (!backing_store->free_on_destruct()) {
+ // If the backing store buffer is managed by the embedder,
+ // then we don't have to guarantee that there is single unique
+ // BackingStore per buffer_start() because the destructor of
+ // of the BackingStore will be a no-op in that case.
+ return;
+ }
+
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ if (backing_store->globally_registered_) return;
+ TRACE_BS("BS:reg bs=%p mem=%p (length=%zu, capacity=%zu)\n",
+ backing_store.get(), backing_store->buffer_start(),
+ backing_store->byte_length(), backing_store->byte_capacity());
+ std::weak_ptr<BackingStore> weak = backing_store;
+ auto result = impl()->map_.insert({backing_store->buffer_start(), weak});
+ CHECK(result.second);
+ backing_store->globally_registered_ = true;
+}
+
+void GlobalBackingStoreRegistry::Unregister(BackingStore* backing_store) {
+ if (!backing_store->globally_registered_) return;
+
+ DCHECK_NOT_NULL(backing_store->buffer_start());
+
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ const auto& result = impl()->map_.find(backing_store->buffer_start());
+ if (result != impl()->map_.end()) {
+ DCHECK(!result->second.lock());
+ impl()->map_.erase(result);
+ }
+ backing_store->globally_registered_ = false;
+}
+
+std::shared_ptr<BackingStore> GlobalBackingStoreRegistry::Lookup(
+ void* buffer_start, size_t length) {
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ TRACE_BS("BS:lookup mem=%p (%zu bytes)\n", buffer_start, length);
+ const auto& result = impl()->map_.find(buffer_start);
+ if (result == impl()->map_.end()) {
+ return std::shared_ptr<BackingStore>();
+ }
+ auto backing_store = result->second.lock();
+ DCHECK_EQ(buffer_start, backing_store->buffer_start());
+ DCHECK_EQ(length, backing_store->byte_length());
+ return backing_store;
+}
+
+void GlobalBackingStoreRegistry::Purge(Isolate* isolate) {
+ // We need to keep a reference to all backing stores that are inspected
+ // in the purging loop below. Otherwise, we might get a deadlock
+ // if the temporary backing store reference created in the loop is
+ // the last reference. In that case the destructor of the backing store
+ // may try to take the &impl()->mutex_ in order to unregister itself.
+ std::vector<std::shared_ptr<BackingStore>> prevent_destruction_under_lock;
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ // Purge all entries in the map that refer to the given isolate.
+ for (auto& entry : impl()->map_) {
+ auto backing_store = entry.second.lock();
+ prevent_destruction_under_lock.emplace_back(backing_store);
+ if (!backing_store) continue; // skip entries where weak ptr is null
+ if (!backing_store->is_wasm_memory()) continue; // skip non-wasm memory
+ if (!backing_store->is_shared()) continue; // skip non-shared memory
+ SharedWasmMemoryData* shared_data =
+ backing_store->get_shared_wasm_memory_data();
+ // Remove this isolate from the isolates list.
+ auto& isolates = shared_data->isolates_;
+ for (size_t i = 0; i < isolates.size(); i++) {
+ if (isolates[i] == isolate) isolates[i] = nullptr;
+ }
+ }
+}
+
+void GlobalBackingStoreRegistry::AddSharedWasmMemoryObject(
+ Isolate* isolate, BackingStore* backing_store,
+ Handle<WasmMemoryObject> memory_object) {
+ // Add to the weak array list of shared memory objects in the isolate.
+ isolate->AddSharedWasmMemory(memory_object);
+
+ // Add the isolate to the list of isolates sharing this backing store.
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ SharedWasmMemoryData* shared_data =
+ backing_store->get_shared_wasm_memory_data();
+ auto& isolates = shared_data->isolates_;
+ int free_entry = -1;
+ for (size_t i = 0; i < isolates.size(); i++) {
+ if (isolates[i] == isolate) return;
+ if (isolates[i] == nullptr) free_entry = static_cast<int>(i);
+ }
+ if (free_entry >= 0)
+ isolates[free_entry] = isolate;
+ else
+ isolates.push_back(isolate);
+}
+
+void GlobalBackingStoreRegistry::BroadcastSharedWasmMemoryGrow(
+ Isolate* isolate, std::shared_ptr<BackingStore> backing_store,
+ size_t new_pages) {
+ {
+ // The global lock protects the list of isolates per backing store.
+ base::MutexGuard scope_lock(&impl()->mutex_);
+ SharedWasmMemoryData* shared_data =
+ backing_store->get_shared_wasm_memory_data();
+ for (Isolate* other : shared_data->isolates_) {
+ if (other && other != isolate) {
+ other->stack_guard()->RequestGrowSharedMemory();
+ }
+ }
+ }
+ // Update memory objects in this isolate.
+ UpdateSharedWasmMemoryObjects(isolate);
+}
+
+void GlobalBackingStoreRegistry::UpdateSharedWasmMemoryObjects(
+ Isolate* isolate) {
+ HandleScope scope(isolate);
+ Handle<WeakArrayList> shared_wasm_memories =
+ isolate->factory()->shared_wasm_memories();
+
+ for (int i = 0; i < shared_wasm_memories->length(); i++) {
+ HeapObject obj;
+ if (!shared_wasm_memories->Get(i).GetHeapObject(&obj)) continue;
+
+ Handle<WasmMemoryObject> memory_object(WasmMemoryObject::cast(obj),
+ isolate);
+ Handle<JSArrayBuffer> old_buffer(memory_object->array_buffer(), isolate);
+ std::shared_ptr<BackingStore> backing_store = old_buffer->GetBackingStore();
+
+ if (old_buffer->byte_length() != backing_store->byte_length()) {
+ Handle<JSArrayBuffer> new_buffer =
+ isolate->factory()->NewJSSharedArrayBuffer(std::move(backing_store));
+ memory_object->update_instances(isolate, new_buffer);
+ }
+ }
+}
+
+} // namespace internal
+} // namespace v8
+
+#undef TRACE_BS
diff --git a/deps/v8/src/objects/backing-store.h b/deps/v8/src/objects/backing-store.h
new file mode 100644
index 0000000000..2c6ffb28da
--- /dev/null
+++ b/deps/v8/src/objects/backing-store.h
@@ -0,0 +1,206 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_BACKING_STORE_H_
+#define V8_OBJECTS_BACKING_STORE_H_
+
+#include <memory>
+
+#include "include/v8-internal.h"
+#include "src/handles/handles.h"
+
+namespace v8 {
+namespace internal {
+
+class Isolate;
+class WasmMemoryObject;
+
+// Whether the backing store is shared or not.
+enum class SharedFlag : uint8_t { kNotShared, kShared };
+
+// Whether the backing store memory is initialied to zero or not.
+enum class InitializedFlag : uint8_t { kUninitialized, kZeroInitialized };
+
+// Internal information for shared wasm memories. E.g. contains
+// a list of all memory objects (across all isolates) that share this
+// backing store.
+struct SharedWasmMemoryData;
+
+// The {BackingStore} data structure stores all the low-level details about the
+// backing store of an array buffer or Wasm memory, including its base address
+// and length, whether it is shared, provided by the embedder, has guard
+// regions, etc. Instances of this classes *own* the underlying memory
+// when they are created through one of the {Allocate()} methods below,
+// and the destructor frees the memory (and page allocation if necessary).
+// Backing stores can also *wrap* embedder-allocated memory. In this case,
+// they do not own the memory, and upon destruction, they do not deallocate it.
+class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
+ public:
+ ~BackingStore();
+
+ // Allocate an array buffer backing store using the default method,
+ // which currently is the embedder-provided array buffer allocator.
+ static std::unique_ptr<BackingStore> Allocate(Isolate* isolate,
+ size_t byte_length,
+ SharedFlag shared,
+ InitializedFlag initialized);
+
+ // Allocate the backing store for a Wasm memory.
+ static std::unique_ptr<BackingStore> AllocateWasmMemory(Isolate* isolate,
+ size_t initial_pages,
+ size_t maximum_pages,
+ SharedFlag shared);
+
+ // Create a backing store that wraps existing allocated memory.
+ // If {free_on_destruct} is {true}, the memory will be freed using the
+ // ArrayBufferAllocator::Free() callback when this backing store is
+ // destructed. Otherwise destructing the backing store will do nothing
+ // to the allocated memory.
+ static std::unique_ptr<BackingStore> WrapAllocation(Isolate* isolate,
+ void* allocation_base,
+ size_t allocation_length,
+ SharedFlag shared,
+ bool free_on_destruct);
+
+ // Create an empty backing store.
+ static std::unique_ptr<BackingStore> EmptyBackingStore(SharedFlag shared);
+
+ // Accessors.
+ void* buffer_start() const { return buffer_start_; }
+ size_t byte_length() const {
+ return byte_length_.load(std::memory_order_relaxed);
+ }
+ size_t byte_capacity() const { return byte_capacity_; }
+ bool is_shared() const { return is_shared_; }
+ bool is_wasm_memory() const { return is_wasm_memory_; }
+ bool has_guard_regions() const { return has_guard_regions_; }
+ bool free_on_destruct() const { return free_on_destruct_; }
+
+ // Attempt to grow this backing store in place.
+ bool GrowWasmMemoryInPlace(Isolate* isolate, size_t delta_pages,
+ size_t max_pages);
+
+ // Allocate a new, larger, backing store for this Wasm memory and copy the
+ // contents of this backing store into it.
+ std::unique_ptr<BackingStore> CopyWasmMemory(Isolate* isolate,
+ size_t new_pages);
+
+ // Attach the given memory object to this backing store. The memory object
+ // will be updated if this backing store is grown.
+ void AttachSharedWasmMemoryObject(Isolate* isolate,
+ Handle<WasmMemoryObject> memory_object);
+
+ // Send asynchronous updates to attached memory objects in other isolates
+ // after the backing store has been grown. Memory objects in this
+ // isolate are updated synchronously.
+ static void BroadcastSharedWasmMemoryGrow(Isolate* isolate,
+ std::shared_ptr<BackingStore>,
+ size_t new_pages);
+
+ // TODO(wasm): address space limitations should be enforced in page alloc.
+ // These methods enforce a limit on the total amount of address space,
+ // which is used for both backing stores and wasm memory.
+ static bool ReserveAddressSpace(uint64_t num_bytes);
+ static void ReleaseReservation(uint64_t num_bytes);
+
+ // Remove all memory objects in the given isolate that refer to this
+ // backing store.
+ static void RemoveSharedWasmMemoryObjects(Isolate* isolate);
+
+ // Update all shared memory objects in this isolate (after a grow operation).
+ static void UpdateSharedWasmMemoryObjects(Isolate* isolate);
+
+ private:
+ friend class GlobalBackingStoreRegistry;
+
+ BackingStore(void* buffer_start, size_t byte_length, size_t byte_capacity,
+ SharedFlag shared, bool is_wasm_memory, bool free_on_destruct,
+ bool has_guard_regions)
+ : buffer_start_(buffer_start),
+ byte_length_(byte_length),
+ byte_capacity_(byte_capacity),
+ is_shared_(shared == SharedFlag::kShared),
+ is_wasm_memory_(is_wasm_memory),
+ free_on_destruct_(free_on_destruct),
+ has_guard_regions_(has_guard_regions),
+ globally_registered_(false) {
+ type_specific_data_.v8_api_array_buffer_allocator = nullptr;
+ }
+
+ void* buffer_start_ = nullptr;
+ std::atomic<size_t> byte_length_{0};
+ size_t byte_capacity_ = 0;
+ bool is_shared_ : 1;
+ bool is_wasm_memory_ : 1;
+ bool free_on_destruct_ : 1;
+ bool has_guard_regions_ : 1;
+ bool globally_registered_ : 1;
+
+ union {
+ // If this backing store was allocated through the ArrayBufferAllocator API,
+ // this is a direct pointer to the API object for freeing the backing
+ // store.
+ // Note: we use {void*} here because we cannot forward-declare an inner
+ // class from the API.
+ void* v8_api_array_buffer_allocator;
+
+ // For shared Wasm memories, this is a list of all the attached memory
+ // objects, which is needed to grow shared backing stores.
+ SharedWasmMemoryData* shared_wasm_memory_data;
+ } type_specific_data_;
+
+ // Accessors for type-specific data.
+ void* get_v8_api_array_buffer_allocator();
+ SharedWasmMemoryData* get_shared_wasm_memory_data();
+
+ void Clear(); // Internally clears fields after deallocation.
+ static std::unique_ptr<BackingStore> TryAllocateWasmMemory(
+ Isolate* isolate, size_t initial_pages, size_t maximum_pages,
+ SharedFlag shared);
+
+ DISALLOW_COPY_AND_ASSIGN(BackingStore);
+};
+
+// A global, per-process mapping from buffer addresses to backing stores.
+// This is generally only used for dealing with an embedder that has not
+// migrated to the new API which should use proper pointers to manage
+// backing stores.
+class GlobalBackingStoreRegistry {
+ public:
+ // Register a backing store in the global registry. A mapping from the
+ // {buffer_start} to the backing store object will be added. The backing
+ // store will automatically unregister itself upon destruction.
+ static void Register(std::shared_ptr<BackingStore> backing_store);
+
+ // Look up a backing store based on the {buffer_start} pointer.
+ static std::shared_ptr<BackingStore> Lookup(void* buffer_start,
+ size_t length);
+
+ private:
+ friend class BackingStore;
+ // Unregister a backing store in the global registry.
+ static void Unregister(BackingStore* backing_store);
+
+ // Adds the given memory object to the backing store's weak list
+ // of memory objects (under the registry lock).
+ static void AddSharedWasmMemoryObject(Isolate* isolate,
+ BackingStore* backing_store,
+ Handle<WasmMemoryObject> memory_object);
+
+ // Purge any shared wasm memory lists that refer to this isolate.
+ static void Purge(Isolate* isolate);
+
+ // Broadcast updates to all attached memory objects.
+ static void BroadcastSharedWasmMemoryGrow(
+ Isolate* isolate, std::shared_ptr<BackingStore> backing_store,
+ size_t new_pages);
+
+ // Update all shared memory objects in the given isolate.
+ static void UpdateSharedWasmMemoryObjects(Isolate* isolate);
+};
+
+} // namespace internal
+} // namespace v8
+
+#endif // V8_OBJECTS_BACKING_STORE_H_
diff --git a/deps/v8/src/objects/bigint.cc b/deps/v8/src/objects/bigint.cc
index 2905bb44c6..6cc43a78e7 100644
--- a/deps/v8/src/objects/bigint.cc
+++ b/deps/v8/src/objects/bigint.cc
@@ -1981,14 +1981,13 @@ void BigInt::SerializeDigits(uint8_t* storage) {
// The serialization format MUST NOT CHANGE without updating the format
// version in value-serializer.cc!
MaybeHandle<BigInt> BigInt::FromSerializedDigits(
- Isolate* isolate, uint32_t bitfield, Vector<const uint8_t> digits_storage,
- AllocationType allocation) {
+ Isolate* isolate, uint32_t bitfield, Vector<const uint8_t> digits_storage) {
int bytelength = LengthBits::decode(bitfield);
DCHECK(digits_storage.length() == bytelength);
bool sign = SignBits::decode(bitfield);
int length = (bytelength + kDigitSize - 1) / kDigitSize; // Round up.
Handle<MutableBigInt> result =
- MutableBigInt::Cast(isolate->factory()->NewBigInt(length, allocation));
+ MutableBigInt::Cast(isolate->factory()->NewBigInt(length));
result->initialize_bitfield(sign, length);
void* digits =
reinterpret_cast<void*>(result->ptr() + kDigitsOffset - kHeapObjectTag);
diff --git a/deps/v8/src/objects/bigint.h b/deps/v8/src/objects/bigint.h
index ca80547230..f50e3bcf04 100644
--- a/deps/v8/src/objects/bigint.h
+++ b/deps/v8/src/objects/bigint.h
@@ -6,8 +6,8 @@
#define V8_OBJECTS_BIGINT_H_
#include "src/common/globals.h"
-#include "src/objects/heap-object.h"
#include "src/objects/objects.h"
+#include "src/objects/primitive-heap-object.h"
#include "src/utils/utils.h"
// Has to be the last include (doesn't have include guards):
@@ -28,7 +28,7 @@ class ValueSerializer;
// BigIntBase is just the raw data object underlying a BigInt. Use with care!
// Most code should be using BigInts instead.
-class BigIntBase : public HeapObject {
+class BigIntBase : public PrimitiveHeapObject {
public:
inline int length() const {
int32_t bitfield = RELAXED_READ_INT32_FIELD(*this, kBitfieldOffset);
@@ -69,7 +69,7 @@ class BigIntBase : public HeapObject {
V(kHeaderSize, 0) \
V(kDigitsOffset, 0)
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, BIGINT_FIELDS)
+ DEFINE_FIELD_OFFSET_CONSTANTS(PrimitiveHeapObject::kHeaderSize, BIGINT_FIELDS)
#undef BIGINT_FIELDS
static constexpr bool HasOptionalPadding() {
@@ -105,7 +105,7 @@ class BigIntBase : public HeapObject {
// Only serves to make macros happy; other code should use IsBigInt.
bool IsBigIntBase() const { return true; }
- OBJECT_CONSTRUCTORS(BigIntBase, HeapObject);
+ OBJECT_CONSTRUCTORS(BigIntBase, PrimitiveHeapObject);
};
class FreshlyAllocatedBigInt : public BigIntBase {
@@ -263,8 +263,8 @@ class BigInt : public BigIntBase {
// {DigitsByteLengthForBitfield(GetBitfieldForSerialization())}.
void SerializeDigits(uint8_t* storage);
V8_WARN_UNUSED_RESULT static MaybeHandle<BigInt> FromSerializedDigits(
- Isolate* isolate, uint32_t bitfield, Vector<const uint8_t> digits_storage,
- AllocationType allocation);
+ Isolate* isolate, uint32_t bitfield,
+ Vector<const uint8_t> digits_storage);
OBJECT_CONSTRUCTORS(BigInt, BigIntBase);
};
diff --git a/deps/v8/src/objects/code.cc b/deps/v8/src/objects/code.cc
index b416df8878..a477a7da26 100644
--- a/deps/v8/src/objects/code.cc
+++ b/deps/v8/src/objects/code.cc
@@ -101,7 +101,6 @@ void Code::CopyFromNoFlush(Heap* heap, const CodeDesc& desc) {
// Unbox handles and relocate.
Assembler* origin = desc.origin;
- AllowDeferredHandleDereference embedding_raw_address;
const int mode_mask = RelocInfo::PostCodegenRelocationMask();
for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
@@ -670,8 +669,8 @@ inline void DisassembleCodeRange(Isolate* isolate, std::ostream& os, Code code,
} // namespace
-void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
- Isolate* isolate = GetIsolate();
+void Code::Disassemble(const char* name, std::ostream& os, Isolate* isolate,
+ Address current_pc) {
os << "kind = " << Kind2String(kind()) << "\n";
if (name == nullptr) {
name = GetName(isolate);
@@ -683,7 +682,7 @@ void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
os << "stack_slots = " << stack_slots() << "\n";
}
os << "compiler = " << (is_turbofanned() ? "turbofan" : "unknown") << "\n";
- os << "address = " << static_cast<const void*>(this) << "\n\n";
+ os << "address = " << reinterpret_cast<void*>(ptr()) << "\n\n";
if (is_off_heap_trampoline()) {
int trampoline_size = raw_instruction_size();
@@ -991,8 +990,7 @@ Handle<DependentCode> DependentCode::EnsureSpace(
int capacity = kCodesStartIndex + DependentCode::Grow(entries->count());
int grow_by = capacity - entries->length();
return Handle<DependentCode>::cast(
- isolate->factory()->CopyWeakFixedArrayAndGrow(entries, grow_by,
- AllocationType::kOld));
+ isolate->factory()->CopyWeakFixedArrayAndGrow(entries, grow_by));
}
bool DependentCode::Compact() {
diff --git a/deps/v8/src/objects/code.h b/deps/v8/src/objects/code.h
index 6a5ac9f31a..6f8c378093 100644
--- a/deps/v8/src/objects/code.h
+++ b/deps/v8/src/objects/code.h
@@ -61,6 +61,7 @@ class Code : public HeapObject {
#ifdef ENABLE_DISASSEMBLER
const char* GetName(Isolate* isolate) const;
V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
+ Isolate* isolate,
Address current_pc = kNullAddress);
#endif
diff --git a/deps/v8/src/objects/contexts-inl.h b/deps/v8/src/objects/contexts-inl.h
index 0c566dd081..669e98591f 100644
--- a/deps/v8/src/objects/contexts-inl.h
+++ b/deps/v8/src/objects/contexts-inl.h
@@ -13,6 +13,7 @@
#include "src/objects/js-objects-inl.h"
#include "src/objects/map-inl.h"
#include "src/objects/objects-inl.h"
+#include "src/objects/osr-optimized-code-cache-inl.h"
#include "src/objects/regexp-match-info.h"
#include "src/objects/scope-info.h"
#include "src/objects/shared-function-info.h"
@@ -47,10 +48,29 @@ Context ScriptContextTable::get_context(int i) const {
OBJECT_CONSTRUCTORS_IMPL(Context, HeapObject)
NEVER_READ_ONLY_SPACE_IMPL(Context)
CAST_ACCESSOR(Context)
-SMI_ACCESSORS(Context, length, kLengthOffset)
+
+SMI_ACCESSORS(Context, length_and_extension_flag, kLengthOffset)
+SYNCHRONIZED_SMI_ACCESSORS(Context, length_and_extension_flag, kLengthOffset)
CAST_ACCESSOR(NativeContext)
+int Context::length() const {
+ return LengthField::decode(length_and_extension_flag());
+}
+
+int Context::synchronized_length() const {
+ return LengthField::decode(synchronized_length_and_extension_flag());
+}
+
+void Context::initialize_length_and_extension_bit(int len,
+ Context::HasExtension flag) {
+ DCHECK(LengthField::is_valid(len));
+ int value = 0;
+ value = LengthField::update(value, len);
+ value = HasExtensionField::update(value, flag == Context::HasExtension::kYes);
+ set_length_and_extension_flag(value);
+}
+
Object Context::get(int index) const {
Isolate* isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
@@ -94,11 +114,20 @@ void Context::set_previous(Context context) { set(PREVIOUS_INDEX, context); }
Object Context::next_context_link() { return get(Context::NEXT_CONTEXT_LINK); }
-bool Context::has_extension() { return !extension().IsTheHole(); }
+bool Context::has_extension() {
+ return static_cast<bool>(
+ HasExtensionField::decode(length_and_extension_flag())) &&
+ !extension().IsTheHole();
+}
+
HeapObject Context::extension() {
return HeapObject::cast(get(EXTENSION_INDEX));
}
-void Context::set_extension(HeapObject object) { set(EXTENSION_INDEX, object); }
+void Context::set_extension(HeapObject object) {
+ set(EXTENSION_INDEX, object);
+ synchronized_set_length_and_extension_flag(
+ HasExtensionField::update(length_and_extension_flag(), true));
+}
NativeContext Context::native_context() const {
Object result = get(NATIVE_CONTEXT_INDEX);
@@ -197,7 +226,7 @@ int Context::FunctionMapIndex(LanguageMode language_mode, FunctionKind kind,
base = IsAsyncFunction(kind) ? ASYNC_GENERATOR_FUNCTION_MAP_INDEX
: GENERATOR_FUNCTION_MAP_INDEX;
- } else if (IsAsyncFunction(kind)) {
+ } else if (IsAsyncFunction(kind) || IsAsyncModule(kind)) {
CHECK_FOLLOWS4(ASYNC_FUNCTION_MAP_INDEX, ASYNC_FUNCTION_WITH_NAME_MAP_INDEX,
ASYNC_FUNCTION_WITH_HOME_OBJECT_MAP_INDEX,
ASYNC_FUNCTION_WITH_NAME_AND_HOME_OBJECT_MAP_INDEX);
@@ -252,6 +281,10 @@ void NativeContext::set_microtask_queue(MicrotaskQueue* microtask_queue) {
reinterpret_cast<Address>(microtask_queue));
}
+OSROptimizedCodeCache NativeContext::GetOSROptimizedCodeCache() {
+ return OSROptimizedCodeCache::cast(osr_code_cache());
+}
+
OBJECT_CONSTRUCTORS_IMPL(NativeContext, Context)
} // namespace internal
diff --git a/deps/v8/src/objects/contexts.cc b/deps/v8/src/objects/contexts.cc
index 74fb4477b1..9dbba06a4d 100644
--- a/deps/v8/src/objects/contexts.cc
+++ b/deps/v8/src/objects/contexts.cc
@@ -39,12 +39,14 @@ Handle<ScriptContextTable> ScriptContextTable::Extend(
bool ScriptContextTable::Lookup(Isolate* isolate, ScriptContextTable table,
String name, LookupResult* result) {
DisallowHeapAllocation no_gc;
+ // Static variables cannot be in script contexts.
+ IsStaticFlag is_static_flag;
for (int i = 0; i < table.used(); i++) {
Context context = table.get_context(i);
DCHECK(context.IsScriptContext());
int slot_index = ScopeInfo::ContextSlotIndex(
context.scope_info(), name, &result->mode, &result->init_flag,
- &result->maybe_assigned_flag);
+ &result->maybe_assigned_flag, &is_static_flag);
if (slot_index >= 0) {
result->context_index = i;
@@ -129,10 +131,6 @@ JSGlobalProxy Context::global_proxy() {
return native_context().global_proxy_object();
}
-void Context::set_global_proxy(JSGlobalProxy object) {
- native_context().set_global_proxy_object(object);
-}
-
/**
* Lookups a property in an object environment, taking the unscopables into
* account. This is used For HasBinding spec algorithms for ObjectEnvironment.
@@ -175,7 +173,6 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name,
Isolate* isolate = context->GetIsolate();
bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0;
- bool failed_whitelist = false;
*index = kNotFound;
*attributes = ABSENT;
*init_flag = kCreatedInitialized;
@@ -287,8 +284,10 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name,
VariableMode mode;
InitializationFlag flag;
MaybeAssignedFlag maybe_assigned_flag;
- int slot_index = ScopeInfo::ContextSlotIndex(scope_info, *name, &mode,
- &flag, &maybe_assigned_flag);
+ IsStaticFlag is_static_flag;
+ int slot_index =
+ ScopeInfo::ContextSlotIndex(scope_info, *name, &mode, &flag,
+ &maybe_assigned_flag, &is_static_flag);
DCHECK(slot_index < 0 || slot_index >= MIN_CONTEXT_SLOTS);
if (slot_index >= 0) {
if (FLAG_trace_contexts) {
@@ -357,6 +356,17 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name,
return extension;
}
}
+
+ // Check blacklist. Names that are listed, cannot be resolved further.
+ Object blacklist = context->get(BLACK_LIST_INDEX);
+ if (blacklist.IsStringSet() &&
+ StringSet::cast(blacklist).Has(isolate, name)) {
+ if (FLAG_trace_contexts) {
+ PrintF(" - name is blacklisted. Aborting.\n");
+ }
+ break;
+ }
+
// Check the original context, but do not follow its context chain.
Object obj = context->get(WRAPPED_CONTEXT_INDEX);
if (obj.IsContext()) {
@@ -366,26 +376,12 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name,
attributes, init_flag, variable_mode);
if (!result.is_null()) return result;
}
- // Check whitelist. Names that do not pass whitelist shall only resolve
- // to with, script or native contexts up the context chain.
- obj = context->get(WHITE_LIST_INDEX);
- if (obj.IsStringSet()) {
- failed_whitelist =
- failed_whitelist || !StringSet::cast(obj).Has(isolate, name);
- }
}
// 3. Prepare to continue with the previous (next outermost) context.
if (context->IsNativeContext()) break;
- do {
- context = Handle<Context>(context->previous(), isolate);
- // If we come across a whitelist context, and the name is not
- // whitelisted, then only consider with, script, module or native
- // contexts.
- } while (failed_whitelist && !context->IsScriptContext() &&
- !context->IsNativeContext() && !context->IsWithContext() &&
- !context->IsModuleContext());
+ context = Handle<Context>(context->previous(), isolate);
} while (follow_context_chain);
if (FLAG_trace_contexts) {
diff --git a/deps/v8/src/objects/contexts.h b/deps/v8/src/objects/contexts.h
index a7b60ff7b9..7fa988be07 100644
--- a/deps/v8/src/objects/contexts.h
+++ b/deps/v8/src/objects/contexts.h
@@ -7,6 +7,7 @@
#include "src/objects/fixed-array.h"
#include "src/objects/function-kind.h"
+#include "src/objects/osr-optimized-code-cache.h"
#include "torque-generated/field-offsets-tq.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -37,21 +38,23 @@ enum ContextLookupFlags {
// must always be allocated via Heap::AllocateContext() or
// Factory::NewContext.
-#define NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(V) \
- V(GENERATOR_NEXT_INTERNAL, JSFunction, generator_next_internal) \
- V(MAKE_ERROR_INDEX, JSFunction, make_error) \
- V(MAKE_RANGE_ERROR_INDEX, JSFunction, make_range_error) \
- V(MAKE_SYNTAX_ERROR_INDEX, JSFunction, make_syntax_error) \
- V(MAKE_TYPE_ERROR_INDEX, JSFunction, make_type_error) \
- V(MAKE_URI_ERROR_INDEX, JSFunction, make_uri_error) \
- V(OBJECT_CREATE, JSFunction, object_create) \
- V(REFLECT_APPLY_INDEX, JSFunction, reflect_apply) \
- V(REFLECT_CONSTRUCT_INDEX, JSFunction, reflect_construct) \
- V(MATH_FLOOR_INDEX, JSFunction, math_floor) \
- V(MATH_POW_INDEX, JSFunction, math_pow) \
- V(PROMISE_INTERNAL_CONSTRUCTOR_INDEX, JSFunction, \
- promise_internal_constructor) \
- V(IS_PROMISE_INDEX, JSFunction, is_promise) \
+#define NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(V) \
+ V(GENERATOR_NEXT_INTERNAL, JSFunction, generator_next_internal) \
+ V(ASYNC_MODULE_EVALUATE_INTERNAL, JSFunction, \
+ async_module_evaluate_internal) \
+ V(MAKE_ERROR_INDEX, JSFunction, make_error) \
+ V(MAKE_RANGE_ERROR_INDEX, JSFunction, make_range_error) \
+ V(MAKE_SYNTAX_ERROR_INDEX, JSFunction, make_syntax_error) \
+ V(MAKE_TYPE_ERROR_INDEX, JSFunction, make_type_error) \
+ V(MAKE_URI_ERROR_INDEX, JSFunction, make_uri_error) \
+ V(OBJECT_CREATE, JSFunction, object_create) \
+ V(REFLECT_APPLY_INDEX, JSFunction, reflect_apply) \
+ V(REFLECT_CONSTRUCT_INDEX, JSFunction, reflect_construct) \
+ V(MATH_FLOOR_INDEX, JSFunction, math_floor) \
+ V(MATH_POW_INDEX, JSFunction, math_pow) \
+ V(PROMISE_INTERNAL_CONSTRUCTOR_INDEX, JSFunction, \
+ promise_internal_constructor) \
+ V(IS_PROMISE_INDEX, JSFunction, is_promise) \
V(PROMISE_THEN_INDEX, JSFunction, promise_then)
#define NATIVE_CONTEXT_FIELDS(V) \
@@ -104,6 +107,8 @@ enum ContextLookupFlags {
V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
call_as_constructor_delegate) \
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
+ V(CALL_ASYNC_MODULE_FULFILLED, JSFunction, call_async_module_fulfilled) \
+ V(CALL_ASYNC_MODULE_REJECTED, JSFunction, call_async_module_rejected) \
V(CALLSITE_FUNCTION_INDEX, JSFunction, callsite_function) \
V(CONTEXT_EXTENSION_FUNCTION_INDEX, JSFunction, context_extension_function) \
V(DATA_PROPERTY_DESCRIPTOR_MAP_INDEX, Map, data_property_descriptor_map) \
@@ -159,6 +164,11 @@ enum ContextLookupFlags {
V(INTL_NUMBER_FORMAT_FUNCTION_INDEX, JSFunction, \
intl_number_format_function) \
V(INTL_LOCALE_FUNCTION_INDEX, JSFunction, intl_locale_function) \
+ V(INTL_LIST_FORMAT_FUNCTION_INDEX, JSFunction, intl_list_format_function) \
+ V(INTL_PLURAL_RULES_FUNCTION_INDEX, JSFunction, intl_plural_rules_function) \
+ V(INTL_RELATIVE_TIME_FORMAT_FUNCTION_INDEX, JSFunction, \
+ intl_relative_time_format_function) \
+ V(INTL_SEGMENTER_FUNCTION_INDEX, JSFunction, intl_segmenter_function) \
V(INTL_SEGMENT_ITERATOR_MAP_INDEX, Map, intl_segment_iterator_map) \
V(ITERATOR_RESULT_MAP_INDEX, Map, iterator_result_map) \
V(JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX, Map, \
@@ -233,6 +243,7 @@ enum ContextLookupFlags {
V(REGEXP_PROTOTYPE_MAP_INDEX, Map, regexp_prototype_map) \
V(REGEXP_REPLACE_FUNCTION_INDEX, JSFunction, regexp_replace_function) \
V(REGEXP_RESULT_MAP_INDEX, Map, regexp_result_map) \
+ V(REGEXP_RESULT_INDICES_MAP_INDEX, Map, regexp_result_indices_map) \
V(REGEXP_SEARCH_FUNCTION_INDEX, JSFunction, regexp_search_function) \
V(REGEXP_SPLIT_FUNCTION_INDEX, JSFunction, regexp_split_function) \
V(INITIAL_REGEXP_STRING_ITERATOR_PROTOTYPE_MAP_INDEX, Map, \
@@ -345,6 +356,7 @@ enum ContextLookupFlags {
V(WEAKMAP_SET_INDEX, JSFunction, weakmap_set) \
V(WEAKMAP_GET_INDEX, JSFunction, weakmap_get) \
V(WEAKSET_ADD_INDEX, JSFunction, weakset_add) \
+ V(OSR_CODE_CACHE_INDEX, WeakFixedArray, osr_code_cache) \
NATIVE_CONTEXT_INTRINSIC_FUNCTIONS(V)
// A table of all script contexts. Every loaded top-level script with top-level
@@ -443,9 +455,19 @@ class Context : public HeapObject {
DECL_CAST(Context)
+ enum class HasExtension { kYes, kNo };
+
// [length]: length of the context.
V8_INLINE int length() const;
- V8_INLINE void set_length(int value);
+ V8_INLINE int synchronized_length() const;
+ V8_INLINE void initialize_length_and_extension_bit(
+ int len, HasExtension flag = HasExtension::kNo);
+
+ // We use the 30th bit. Otherwise if we set the 31st bit,
+ // the number would be pottentially bigger than an SMI.
+ // Any DCHECK(Smi::IsValue(...)) would fail.
+ using LengthField = BitField<int, 0, kSmiValueSize - 2>;
+ using HasExtensionField = BitField<int, kSmiValueSize - 2, 1>;
// Setter and getter for elements.
V8_INLINE Object get(int index) const;
@@ -458,18 +480,18 @@ class Context : public HeapObject {
TORQUE_GENERATED_CONTEXT_FIELDS)
// TODO(v8:8989): [torque] Support marker constants.
/* TODO(ishell): remove this fixedArray-like header size. */
- static const int kHeaderSize = kScopeInfoOffset;
+ static const int kFixedArrayLikeHeaderSize = kScopeInfoOffset;
static const int kStartOfTaggedFieldsOffset = kScopeInfoOffset;
/* Header size. */ \
/* TODO(ishell): use this as header size once MIN_CONTEXT_SLOTS */ \
/* is removed in favour of offset-based access to common fields. */ \
- static const int kTodoHeaderSize = kSize;
+ static const int kTodoHeaderSize = kHeaderSize;
// Garbage collection support.
V8_INLINE static constexpr int SizeFor(int length) {
// TODO(ishell): switch to kTodoHeaderSize based approach once we no longer
// reference common Context fields via index
- return kHeaderSize + length * kTaggedSize;
+ return kFixedArrayLikeHeaderSize + length * kTaggedSize;
}
// Code Generation support.
@@ -517,7 +539,7 @@ class Context : public HeapObject {
// These slots hold values in debug evaluate contexts.
WRAPPED_CONTEXT_INDEX = MIN_CONTEXT_SLOTS,
- WHITE_LIST_INDEX = MIN_CONTEXT_SLOTS + 1
+ BLACK_LIST_INDEX = MIN_CONTEXT_SLOTS + 1
};
// A region of native context entries containing maps for functions created
@@ -558,7 +580,6 @@ class Context : public HeapObject {
// Returns a JSGlobalProxy object or null.
V8_EXPORT_PRIVATE JSGlobalProxy global_proxy();
- void set_global_proxy(JSGlobalProxy global);
// Get the JSGlobalObject object.
V8_EXPORT_PRIVATE JSGlobalObject global_object();
@@ -652,6 +673,8 @@ class Context : public HeapObject {
#endif
OBJECT_CONSTRUCTORS(Context, HeapObject);
+ DECL_INT_ACCESSORS(length_and_extension_flag)
+ DECL_SYNCHRONIZED_INT_ACCESSORS(length_and_extension_flag)
};
class NativeContext : public Context {
@@ -696,6 +719,8 @@ class NativeContext : public Context {
void SetDeoptimizedCodeListHead(Object head);
Object DeoptimizedCodeListHead();
+ inline OSROptimizedCodeCache GetOSROptimizedCodeCache();
+
void ResetErrorsThrown();
void IncrementErrorsThrown();
int GetErrorsThrown();
diff --git a/deps/v8/src/objects/data-handler.h b/deps/v8/src/objects/data-handler.h
index 667b19b3d4..c9c0cf4cbc 100644
--- a/deps/v8/src/objects/data-handler.h
+++ b/deps/v8/src/objects/data-handler.h
@@ -41,7 +41,7 @@ class DataHandler : public Struct {
static const int kSizeWithData0 = kData1Offset;
static const int kSizeWithData1 = kData2Offset;
static const int kSizeWithData2 = kData3Offset;
- static const int kSizeWithData3 = kSize;
+ static const int kSizeWithData3 = kHeaderSize;
DECL_CAST(DataHandler)
diff --git a/deps/v8/src/objects/debug-objects-inl.h b/deps/v8/src/objects/debug-objects-inl.h
index 273f710c3b..8189481394 100644
--- a/deps/v8/src/objects/debug-objects-inl.h
+++ b/deps/v8/src/objects/debug-objects-inl.h
@@ -21,24 +21,16 @@ namespace internal {
OBJECT_CONSTRUCTORS_IMPL(BreakPoint, Tuple2)
OBJECT_CONSTRUCTORS_IMPL(BreakPointInfo, Tuple2)
OBJECT_CONSTRUCTORS_IMPL(CoverageInfo, FixedArray)
-OBJECT_CONSTRUCTORS_IMPL(DebugInfo, Struct)
+TQ_OBJECT_CONSTRUCTORS_IMPL(DebugInfo)
NEVER_READ_ONLY_SPACE_IMPL(DebugInfo)
CAST_ACCESSOR(BreakPointInfo)
-CAST_ACCESSOR(DebugInfo)
CAST_ACCESSOR(CoverageInfo)
CAST_ACCESSOR(BreakPoint)
-SMI_ACCESSORS(DebugInfo, flags, kFlagsOffset)
-ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
-SMI_ACCESSORS(DebugInfo, debugger_hints, kDebuggerHintsOffset)
-ACCESSORS(DebugInfo, script, Object, kScriptOffset)
-ACCESSORS(DebugInfo, original_bytecode_array, Object,
- kOriginalBytecodeArrayOffset)
-ACCESSORS(DebugInfo, debug_bytecode_array, Object, kDebugBytecodeArrayOffset)
-ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsOffset)
-ACCESSORS(DebugInfo, coverage_info, Object, kCoverageInfoOffset)
+TQ_SMI_ACCESSORS(DebugInfo, flags)
+TQ_SMI_ACCESSORS(DebugInfo, debugger_hints)
BIT_FIELD_ACCESSORS(DebugInfo, debugger_hints, side_effect_state,
DebugInfo::SideEffectStateBits)
diff --git a/deps/v8/src/objects/debug-objects.h b/deps/v8/src/objects/debug-objects.h
index 243caaa526..39f42c1168 100644
--- a/deps/v8/src/objects/debug-objects.h
+++ b/deps/v8/src/objects/debug-objects.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_DEBUG_OBJECTS_H_
#define V8_OBJECTS_DEBUG_OBJECTS_H_
+#include <memory>
+
#include "src/objects/fixed-array.h"
#include "src/objects/objects.h"
#include "src/objects/struct.h"
@@ -20,7 +22,7 @@ class BytecodeArray;
// The DebugInfo class holds additional information for a function being
// debugged.
-class DebugInfo : public Struct {
+class DebugInfo : public TorqueGeneratedDebugInfo<DebugInfo, Struct> {
public:
NEVER_READ_ONLY_SPACE
enum Flag {
@@ -38,15 +40,9 @@ class DebugInfo : public Struct {
// A bitfield that lists uses of the current instance.
DECL_INT_ACCESSORS(flags)
- // The shared function info for the source being debugged.
- DECL_ACCESSORS(shared, SharedFunctionInfo)
-
// Bit field containing various information collected for debugging.
DECL_INT_ACCESSORS(debugger_hints)
- // Script field from shared function info.
- DECL_ACCESSORS(script, Object)
-
// DebugInfo can be detached from the SharedFunctionInfo iff it is empty.
bool IsEmpty() const;
@@ -83,17 +79,6 @@ class DebugInfo : public Struct {
void ClearBreakAtEntry();
bool BreakAtEntry() const;
- // The original uninstrumented bytecode array for functions with break
- // points - the instrumented bytecode is held in the shared function info.
- DECL_ACCESSORS(original_bytecode_array, Object)
-
- // The debug instrumented bytecode array for functions with break points
- // - also pointed to by the shared function info.
- DECL_ACCESSORS(debug_bytecode_array, Object)
-
- // Fixed array holding status information for each active break point.
- DECL_ACCESSORS(break_points, FixedArray)
-
// Check if there is a break point at a source position.
bool HasBreakPoint(Isolate* isolate, int source_position);
// Attempt to clear a break point. Return true if successful.
@@ -160,17 +145,9 @@ class DebugInfo : public Struct {
// Clears all fields related to block coverage.
void ClearCoverageInfo(Isolate* isolate);
- DECL_ACCESSORS(coverage_info, Object)
-
- DECL_CAST(DebugInfo)
// Dispatched behavior.
DECL_PRINTER(DebugInfo)
- DECL_VERIFIER(DebugInfo)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
- TORQUE_GENERATED_DEBUG_INFO_FIELDS)
static const int kEstimatedNofBreakPointsInFunction = 4;
@@ -178,7 +155,7 @@ class DebugInfo : public Struct {
// Get the break point info object for a source position.
Object GetBreakPointInfo(Isolate* isolate, int source_position);
- OBJECT_CONSTRUCTORS(DebugInfo, Struct);
+ TQ_OBJECT_CONSTRUCTORS(DebugInfo)
};
// The BreakPointInfo class holds information for break points set in a
diff --git a/deps/v8/src/objects/descriptor-array-inl.h b/deps/v8/src/objects/descriptor-array-inl.h
index e2805d795a..5ea14c1e60 100644
--- a/deps/v8/src/objects/descriptor-array-inl.h
+++ b/deps/v8/src/objects/descriptor-array-inl.h
@@ -58,33 +58,35 @@ void DescriptorArray::CopyEnumCacheFrom(DescriptorArray array) {
set_enum_cache(array.enum_cache());
}
-int DescriptorArray::Search(Name name, int valid_descriptors) {
+InternalIndex DescriptorArray::Search(Name name, int valid_descriptors) {
DCHECK(name.IsUniqueName());
- return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors,
- nullptr);
+ return InternalIndex(
+ internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, nullptr));
}
-int DescriptorArray::Search(Name name, Map map) {
+InternalIndex DescriptorArray::Search(Name name, Map map) {
DCHECK(name.IsUniqueName());
int number_of_own_descriptors = map.NumberOfOwnDescriptors();
- if (number_of_own_descriptors == 0) return kNotFound;
+ if (number_of_own_descriptors == 0) return InternalIndex::NotFound();
return Search(name, number_of_own_descriptors);
}
-int DescriptorArray::SearchWithCache(Isolate* isolate, Name name, Map map) {
+InternalIndex DescriptorArray::SearchWithCache(Isolate* isolate, Name name,
+ Map map) {
DCHECK(name.IsUniqueName());
int number_of_own_descriptors = map.NumberOfOwnDescriptors();
- if (number_of_own_descriptors == 0) return kNotFound;
+ if (number_of_own_descriptors == 0) return InternalIndex::NotFound();
DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
int number = cache->Lookup(map, name);
if (number == DescriptorLookupCache::kAbsent) {
- number = Search(name, number_of_own_descriptors);
+ InternalIndex result = Search(name, number_of_own_descriptors);
+ number = result.is_found() ? result.as_int() : DescriptorArray::kNotFound;
cache->Update(map, name, number);
}
-
- return number;
+ if (number == DescriptorArray::kNotFound) return InternalIndex::NotFound();
+ return InternalIndex(number);
}
ObjectSlot DescriptorArray::GetFirstPointerSlot() {
@@ -102,26 +104,27 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
return RawField(OffsetOfDescriptorAt(descriptor));
}
-Name DescriptorArray::GetKey(int descriptor_number) const {
+Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
Isolate* isolate = GetIsolateForPtrCompr(*this);
return GetKey(isolate, descriptor_number);
}
-Name DescriptorArray::GetKey(Isolate* isolate, int descriptor_number) const {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+Name DescriptorArray::GetKey(Isolate* isolate,
+ InternalIndex descriptor_number) const {
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
return Name::cast(EntryKeyField::Relaxed_Load(isolate, *this, entry_offset));
}
-void DescriptorArray::SetKey(int descriptor_number, Name key) {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) {
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
EntryKeyField::Relaxed_Store(*this, entry_offset, key);
WRITE_BARRIER(*this, entry_offset + kEntryKeyOffset, key);
}
int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
- return GetDetails(descriptor_number).pointer();
+ return GetDetails(InternalIndex(descriptor_number)).pointer();
}
Name DescriptorArray::GetSortedKey(int descriptor_number) {
@@ -130,81 +133,83 @@ Name DescriptorArray::GetSortedKey(int descriptor_number) {
}
Name DescriptorArray::GetSortedKey(Isolate* isolate, int descriptor_number) {
- return GetKey(isolate, GetSortedKeyIndex(descriptor_number));
+ return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number)));
}
void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
- PropertyDetails details = GetDetails(descriptor_number);
- SetDetails(descriptor_number, details.set_pointer(pointer));
+ PropertyDetails details = GetDetails(InternalIndex(descriptor_number));
+ SetDetails(InternalIndex(descriptor_number), details.set_pointer(pointer));
}
-Object DescriptorArray::GetStrongValue(int descriptor_number) {
+Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
Isolate* isolate = GetIsolateForPtrCompr(*this);
return GetStrongValue(isolate, descriptor_number);
}
Object DescriptorArray::GetStrongValue(Isolate* isolate,
- int descriptor_number) {
+ InternalIndex descriptor_number) {
return GetValue(isolate, descriptor_number).cast<Object>();
}
-void DescriptorArray::SetValue(int descriptor_number, MaybeObject value) {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+void DescriptorArray::SetValue(InternalIndex descriptor_number,
+ MaybeObject value) {
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
EntryValueField::Relaxed_Store(*this, entry_offset, value);
WEAK_WRITE_BARRIER(*this, entry_offset + kEntryValueOffset, value);
}
-MaybeObject DescriptorArray::GetValue(int descriptor_number) {
+MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
Isolate* isolate = GetIsolateForPtrCompr(*this);
return GetValue(isolate, descriptor_number);
}
-MaybeObject DescriptorArray::GetValue(Isolate* isolate, int descriptor_number) {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+MaybeObject DescriptorArray::GetValue(Isolate* isolate,
+ InternalIndex descriptor_number) {
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
return EntryValueField::Relaxed_Load(isolate, *this, entry_offset);
}
-PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) {
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
Smi details = EntryDetailsField::Relaxed_Load(*this, entry_offset);
return PropertyDetails(details);
}
-void DescriptorArray::SetDetails(int descriptor_number,
+void DescriptorArray::SetDetails(InternalIndex descriptor_number,
PropertyDetails details) {
- DCHECK_LT(descriptor_number, number_of_descriptors());
- int entry_offset = OffsetOfDescriptorAt(descriptor_number);
+ DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
+ int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
EntryDetailsField::Relaxed_Store(*this, entry_offset, details.AsSmi());
}
-int DescriptorArray::GetFieldIndex(int descriptor_number) {
+int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) {
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
return GetDetails(descriptor_number).field_index();
}
-FieldType DescriptorArray::GetFieldType(int descriptor_number) {
+FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
Isolate* isolate = GetIsolateForPtrCompr(*this);
return GetFieldType(isolate, descriptor_number);
}
FieldType DescriptorArray::GetFieldType(Isolate* isolate,
- int descriptor_number) {
+ InternalIndex descriptor_number) {
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
MaybeObject wrapped_type = GetValue(isolate, descriptor_number);
return Map::UnwrapFieldType(wrapped_type);
}
-void DescriptorArray::Set(int descriptor_number, Name key, MaybeObject value,
- PropertyDetails details) {
+void DescriptorArray::Set(InternalIndex descriptor_number, Name key,
+ MaybeObject value, PropertyDetails details) {
SetKey(descriptor_number, key);
SetDetails(descriptor_number, details);
SetValue(descriptor_number, value);
}
-void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
+void DescriptorArray::Set(InternalIndex descriptor_number, Descriptor* desc) {
Name key = *desc->GetKey();
MaybeObject value = *desc->GetValue();
Set(descriptor_number, key, value, desc->GetDetails());
@@ -215,7 +220,7 @@ void DescriptorArray::Append(Descriptor* desc) {
int descriptor_number = number_of_descriptors();
DCHECK_LE(descriptor_number + 1, number_of_all_descriptors());
set_number_of_descriptors(descriptor_number + 1);
- Set(descriptor_number, desc);
+ Set(InternalIndex(descriptor_number), desc);
uint32_t hash = desc->GetKey()->Hash();
diff --git a/deps/v8/src/objects/descriptor-array.h b/deps/v8/src/objects/descriptor-array.h
index 0f17cd22ea..73b94b7cfa 100644
--- a/deps/v8/src/objects/descriptor-array.h
+++ b/deps/v8/src/objects/descriptor-array.h
@@ -6,6 +6,8 @@
#define V8_OBJECTS_DESCRIPTOR_ARRAY_H_
#include "src/objects/fixed-array.h"
+// TODO(jkummerow): Consider forward-declaring instead.
+#include "src/objects/internal-index.h"
#include "src/objects/objects.h"
#include "src/objects/struct.h"
#include "src/utils/utils.h"
@@ -62,27 +64,29 @@ class DescriptorArray : public HeapObject {
Handle<FixedArray> indices);
// Accessors for fetching instance descriptor at descriptor number.
- inline Name GetKey(int descriptor_number) const;
- inline Name GetKey(Isolate* isolate, int descriptor_number) const;
- inline Object GetStrongValue(int descriptor_number);
- inline Object GetStrongValue(Isolate* isolate, int descriptor_number);
- inline MaybeObject GetValue(int descriptor_number);
- inline MaybeObject GetValue(Isolate* isolate, int descriptor_number);
- inline PropertyDetails GetDetails(int descriptor_number);
- inline int GetFieldIndex(int descriptor_number);
- inline FieldType GetFieldType(int descriptor_number);
- inline FieldType GetFieldType(Isolate* isolate, int descriptor_number);
+ inline Name GetKey(InternalIndex descriptor_number) const;
+ inline Name GetKey(Isolate* isolate, InternalIndex descriptor_number) const;
+ inline Object GetStrongValue(InternalIndex descriptor_number);
+ inline Object GetStrongValue(Isolate* isolate,
+ InternalIndex descriptor_number);
+ inline MaybeObject GetValue(InternalIndex descriptor_number);
+ inline MaybeObject GetValue(Isolate* isolate,
+ InternalIndex descriptor_number);
+ inline PropertyDetails GetDetails(InternalIndex descriptor_number);
+ inline int GetFieldIndex(InternalIndex descriptor_number);
+ inline FieldType GetFieldType(InternalIndex descriptor_number);
+ inline FieldType GetFieldType(Isolate* isolate,
+ InternalIndex descriptor_number);
inline Name GetSortedKey(int descriptor_number);
inline Name GetSortedKey(Isolate* isolate, int descriptor_number);
inline int GetSortedKeyIndex(int descriptor_number);
- inline void SetSortedKey(int pointer, int descriptor_number);
// Accessor for complete descriptor.
- inline void Set(int descriptor_number, Descriptor* desc);
- inline void Set(int descriptor_number, Name key, MaybeObject value,
+ inline void Set(InternalIndex descriptor_number, Descriptor* desc);
+ inline void Set(InternalIndex descriptor_number, Name key, MaybeObject value,
PropertyDetails details);
- void Replace(int descriptor_number, Descriptor* descriptor);
+ void Replace(InternalIndex descriptor_number, Descriptor* descriptor);
// Generalizes constness, representation and field type of all field
// descriptors.
@@ -109,20 +113,20 @@ class DescriptorArray : public HeapObject {
void Sort();
// Search the instance descriptors for given name.
- V8_INLINE int Search(Name name, int number_of_own_descriptors);
- V8_INLINE int Search(Name name, Map map);
+ V8_INLINE InternalIndex Search(Name name, int number_of_own_descriptors);
+ V8_INLINE InternalIndex Search(Name name, Map map);
// As the above, but uses DescriptorLookupCache and updates it when
// necessary.
- V8_INLINE int SearchWithCache(Isolate* isolate, Name name, Map map);
+ V8_INLINE InternalIndex SearchWithCache(Isolate* isolate, Name name, Map map);
bool IsEqualUpTo(DescriptorArray desc, int nof_descriptors);
// Allocates a DescriptorArray, but returns the singleton
// empty descriptor array object if number_of_descriptors is 0.
- V8_EXPORT_PRIVATE static Handle<DescriptorArray> Allocate(
- Isolate* isolate, int nof_descriptors, int slack,
- AllocationType allocation = AllocationType::kYoung);
+ V8_EXPORT_PRIVATE static Handle<DescriptorArray> Allocate(Isolate* isolate,
+ int nof_descriptors,
+ int slack);
void Initialize(EnumCache enum_cache, HeapObject undefined_value,
int nof_descriptors, int slack);
@@ -176,7 +180,7 @@ class DescriptorArray : public HeapObject {
// Print all the descriptors.
void PrintDescriptors(std::ostream& os);
- void PrintDescriptorDetails(std::ostream& os, int descriptor,
+ void PrintDescriptorDetails(std::ostream& os, InternalIndex descriptor,
PropertyDetails::PrintMode mode);
DECL_PRINTER(DescriptorArray)
@@ -210,13 +214,16 @@ class DescriptorArray : public HeapObject {
private:
DECL_INT16_ACCESSORS(filler16bits)
- inline void SetKey(int descriptor_number, Name key);
- inline void SetValue(int descriptor_number, MaybeObject value);
- inline void SetDetails(int descriptor_number, PropertyDetails details);
+ inline void SetKey(InternalIndex descriptor_number, Name key);
+ inline void SetValue(InternalIndex descriptor_number, MaybeObject value);
+ inline void SetDetails(InternalIndex descriptor_number,
+ PropertyDetails details);
// Transfer a complete descriptor from the src descriptor array to this
// descriptor array.
- void CopyFrom(int index, DescriptorArray src);
+ void CopyFrom(InternalIndex index, DescriptorArray src);
+
+ inline void SetSortedKey(int pointer, int descriptor_number);
// Swap first and second descriptor.
inline void SwapSortedKeys(int first, int second);
diff --git a/deps/v8/src/objects/elements.cc b/deps/v8/src/objects/elements.cc
index 6e5648d2f4..686f1a9b1a 100644
--- a/deps/v8/src/objects/elements.cc
+++ b/deps/v8/src/objects/elements.cc
@@ -8,6 +8,7 @@
#include "src/execution/arguments.h"
#include "src/execution/frames.h"
#include "src/execution/isolate-inl.h"
+#include "src/execution/protectors-inl.h"
#include "src/heap/factory.h"
#include "src/heap/heap-inl.h" // For MaxNumberToStringCacheSize.
#include "src/heap/heap-write-barrier-inl.h"
@@ -509,11 +510,11 @@ Maybe<int64_t> IndexOfValueSlowPath(Isolate* isolate, Handle<JSObject> receiver,
// that take an entry (instead of an index) as an argument.
class InternalElementsAccessor : public ElementsAccessor {
public:
- uint32_t GetEntryForIndex(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index) override = 0;
+ InternalIndex GetEntryForIndex(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index) override = 0;
- PropertyDetails GetDetails(JSObject holder, uint32_t entry) override = 0;
+ PropertyDetails GetDetails(JSObject holder, InternalIndex entry) override = 0;
};
// Base class for element handler implementations. Contains the
@@ -594,16 +595,17 @@ class ElementsAccessorBase : public InternalElementsAccessor {
FixedArrayBase backing_store,
PropertyFilter filter = ALL_PROPERTIES) {
return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
- filter) != kMaxUInt32;
+ filter)
+ .is_found();
}
- bool HasEntry(JSObject holder, uint32_t entry) final {
+ bool HasEntry(JSObject holder, InternalIndex entry) final {
return Subclass::HasEntryImpl(holder.GetIsolate(), holder.elements(),
entry);
}
static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
UNIMPLEMENTED();
}
@@ -615,33 +617,33 @@ class ElementsAccessorBase : public InternalElementsAccessor {
return false;
}
- Handle<Object> Get(Handle<JSObject> holder, uint32_t entry) final {
+ Handle<Object> Get(Handle<JSObject> holder, InternalIndex entry) final {
return Subclass::GetInternalImpl(holder, entry);
}
static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
- uint32_t entry) {
+ InternalIndex entry) {
return Subclass::GetImpl(holder->GetIsolate(), holder->elements(), entry);
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
uint32_t index = GetIndexForEntryImpl(backing_store, entry);
return handle(BackingStore::cast(backing_store).get(index), isolate);
}
- void Set(Handle<JSObject> holder, uint32_t entry, Object value) final {
+ void Set(Handle<JSObject> holder, InternalIndex entry, Object value) final {
Subclass::SetImpl(holder, entry, value);
}
void Reconfigure(Handle<JSObject> object, Handle<FixedArrayBase> store,
- uint32_t entry, Handle<Object> value,
+ InternalIndex entry, Handle<Object> value,
PropertyAttributes attributes) final {
Subclass::ReconfigureImpl(object, store, entry, value, attributes);
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
UNREACHABLE();
@@ -917,7 +919,7 @@ class ElementsAccessorBase : public InternalElementsAccessor {
return true;
}
- void Delete(Handle<JSObject> obj, uint32_t entry) final {
+ void Delete(Handle<JSObject> obj, InternalIndex entry) final {
Subclass::DeleteImpl(obj, entry);
}
@@ -1024,9 +1026,9 @@ class ElementsAccessorBase : public InternalElementsAccessor {
if (!key->ToUint32(&index)) continue;
DCHECK_EQ(object->GetElementsKind(), original_elements_kind);
- uint32_t entry = Subclass::GetEntryForIndexImpl(
+ InternalIndex entry = Subclass::GetEntryForIndexImpl(
isolate, *object, object->elements(), index, filter);
- if (entry == kMaxUInt32) continue;
+ if (entry.is_not_found()) continue;
PropertyDetails details = Subclass::GetDetailsImpl(*object, entry);
Handle<Object> value;
@@ -1053,9 +1055,9 @@ class ElementsAccessorBase : public InternalElementsAccessor {
InternalElementsAccessor* accessor =
reinterpret_cast<InternalElementsAccessor*>(
object->GetElementsAccessor());
- uint32_t entry = accessor->GetEntryForIndex(isolate, *object,
- object->elements(), index);
- if (entry == kMaxUInt32) continue;
+ InternalIndex entry = accessor->GetEntryForIndex(
+ isolate, *object, object->elements(), index);
+ if (entry.is_not_found()) continue;
PropertyDetails details = accessor->GetDetails(*object, entry);
if (!details.IsEnumerable()) continue;
}
@@ -1280,43 +1282,44 @@ class ElementsAccessorBase : public InternalElementsAccessor {
void Reverse(JSObject receiver) final { Subclass::ReverseImpl(receiver); }
static uint32_t GetIndexForEntryImpl(FixedArrayBase backing_store,
- uint32_t entry) {
- return entry;
+ InternalIndex entry) {
+ return entry.as_uint32();
}
- static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index, PropertyFilter filter) {
+ static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index,
+ PropertyFilter filter) {
DCHECK(IsFastElementsKind(kind()) ||
IsAnyNonextensibleElementsKind(kind()));
uint32_t length = Subclass::GetMaxIndex(holder, backing_store);
if (IsHoleyElementsKindForRead(kind())) {
return index < length && !BackingStore::cast(backing_store)
.is_the_hole(isolate, index)
- ? index
- : kMaxUInt32;
+ ? InternalIndex(index)
+ : InternalIndex::NotFound();
} else {
- return index < length ? index : kMaxUInt32;
+ return index < length ? InternalIndex(index) : InternalIndex::NotFound();
}
}
- uint32_t GetEntryForIndex(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index) final {
+ InternalIndex GetEntryForIndex(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index) final {
return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
ALL_PROPERTIES);
}
static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
}
- static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
+ static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
}
- PropertyDetails GetDetails(JSObject holder, uint32_t entry) final {
+ PropertyDetails GetDetails(JSObject holder, InternalIndex entry) final {
return Subclass::GetDetailsImpl(holder, entry);
}
@@ -1419,10 +1422,11 @@ class DictionaryElementsAccessor
UNREACHABLE();
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
Handle<NumberDictionary> dict(NumberDictionary::cast(obj->elements()),
obj->GetIsolate());
- dict = NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry);
+ dict =
+ NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry.as_int());
obj->set_elements(*dict);
}
@@ -1441,38 +1445,38 @@ class DictionaryElementsAccessor
return false;
}
- static Object GetRaw(FixedArrayBase store, uint32_t entry) {
+ static Object GetRaw(FixedArrayBase store, InternalIndex entry) {
NumberDictionary backing_store = NumberDictionary::cast(store);
- return backing_store.ValueAt(entry);
+ return backing_store.ValueAt(entry.as_int());
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
return handle(GetRaw(backing_store, entry), isolate);
}
- static inline void SetImpl(Handle<JSObject> holder, uint32_t entry,
+ static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
Object value) {
SetImpl(holder->elements(), entry, value);
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value) {
- NumberDictionary::cast(backing_store).ValueAtPut(entry, value);
+ NumberDictionary::cast(backing_store).ValueAtPut(entry.as_int(), value);
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
NumberDictionary dictionary = NumberDictionary::cast(*store);
if (attributes != NONE) object->RequireSlowElements(dictionary);
- dictionary.ValueAtPut(entry, *value);
- PropertyDetails details = dictionary.DetailsAt(entry);
+ dictionary.ValueAtPut(entry.as_int(), *value);
+ PropertyDetails details = dictionary.DetailsAt(entry.as_int());
details = PropertyDetails(kData, attributes, PropertyCellType::kNoCell,
details.dictionary_index());
- dictionary.DetailsAtPut(object->GetIsolate(), entry, details);
+ dictionary.DetailsAtPut(object->GetIsolate(), entry.as_int(), details);
}
static void AddImpl(Handle<JSObject> object, uint32_t index,
@@ -1493,43 +1497,47 @@ class DictionaryElementsAccessor
}
static bool HasEntryImpl(Isolate* isolate, FixedArrayBase store,
- uint32_t entry) {
+ InternalIndex entry) {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(store);
- Object index = dict.KeyAt(entry);
+ Object index = dict.KeyAt(entry.as_int());
return !index.IsTheHole(isolate);
}
- static uint32_t GetIndexForEntryImpl(FixedArrayBase store, uint32_t entry) {
+ static uint32_t GetIndexForEntryImpl(FixedArrayBase store,
+ InternalIndex entry) {
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(store);
uint32_t result = 0;
- CHECK(dict.KeyAt(entry).ToArrayIndex(&result));
+ CHECK(dict.KeyAt(entry.as_int()).ToArrayIndex(&result));
return result;
}
- static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
- FixedArrayBase store, uint32_t index,
- PropertyFilter filter) {
+ static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
+ FixedArrayBase store,
+ uint32_t index,
+ PropertyFilter filter) {
DisallowHeapAllocation no_gc;
NumberDictionary dictionary = NumberDictionary::cast(store);
int entry = dictionary.FindEntry(isolate, index);
- if (entry == NumberDictionary::kNotFound) return kMaxUInt32;
+ if (entry == NumberDictionary::kNotFound) {
+ return InternalIndex::NotFound();
+ }
if (filter != ALL_PROPERTIES) {
PropertyDetails details = dictionary.DetailsAt(entry);
PropertyAttributes attr = details.attributes();
- if ((attr & filter) != 0) return kMaxUInt32;
+ if ((attr & filter) != 0) return InternalIndex::NotFound();
}
- return static_cast<uint32_t>(entry);
+ return InternalIndex(entry);
}
- static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
+ static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
return GetDetailsImpl(holder.elements(), entry);
}
static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
- uint32_t entry) {
- return NumberDictionary::cast(backing_store).DetailsAt(entry);
+ InternalIndex entry) {
+ return NumberDictionary::cast(backing_store).DetailsAt(entry.as_int());
}
static uint32_t FilterKey(Handle<NumberDictionary> dictionary, int entry,
@@ -1688,7 +1696,8 @@ class DictionaryElementsAccessor
continue;
}
- PropertyDetails details = GetDetailsImpl(*dictionary, entry);
+ PropertyDetails details =
+ GetDetailsImpl(*dictionary, InternalIndex(entry));
switch (details.kind()) {
case kData: {
Object element_k = dictionary->ValueAt(entry);
@@ -1757,7 +1766,8 @@ class DictionaryElementsAccessor
int entry = dictionary->FindEntry(isolate, k);
if (entry == NumberDictionary::kNotFound) continue;
- PropertyDetails details = GetDetailsImpl(*dictionary, entry);
+ PropertyDetails details =
+ GetDetailsImpl(*dictionary, InternalIndex(entry));
switch (details.kind()) {
case kData: {
Object element_k = dictionary->ValueAt(entry);
@@ -1863,7 +1873,8 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
if (BackingStore::cast(*store).is_the_hole(isolate, i)) continue;
}
max_number_key = i;
- Handle<Object> value = Subclass::GetImpl(isolate, *store, i);
+ Handle<Object> value =
+ Subclass::GetImpl(isolate, *store, InternalIndex(i));
dictionary =
NumberDictionary::Add(isolate, dictionary, i, value, details);
j++;
@@ -1971,11 +1982,12 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
Handle<NumberDictionary> dictionary = JSObject::NormalizeElements(object);
- entry = dictionary->FindEntry(object->GetIsolate(), entry);
+ entry = InternalIndex(
+ dictionary->FindEntry(object->GetIsolate(), entry.as_uint32()));
DictionaryElementsAccessor::ReconfigureImpl(object, dictionary, entry,
value, attributes);
}
@@ -2000,10 +2012,10 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
JSObject::EnsureWritableFastElements(object);
}
}
- Subclass::SetImpl(object, index, *value);
+ Subclass::SetImpl(object, InternalIndex(index), *value);
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
ElementsKind kind = KindTraits::Kind;
if (IsFastPackedElementsKind(kind) ||
kind == PACKED_NONEXTENSIBLE_ELEMENTS) {
@@ -2013,12 +2025,14 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
IsNonextensibleElementsKind(kind)) {
JSObject::EnsureWritableFastElements(obj);
}
- DeleteCommon(obj, entry, handle(obj->elements(), obj->GetIsolate()));
+ DeleteCommon(obj, entry.as_uint32(),
+ handle(obj->elements(), obj->GetIsolate()));
}
static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
- return !BackingStore::cast(backing_store).is_the_hole(isolate, entry);
+ InternalIndex entry) {
+ return !BackingStore::cast(backing_store)
+ .is_the_hole(isolate, entry.as_int());
}
static uint32_t NumberOfElementsImpl(JSObject receiver,
@@ -2028,7 +2042,9 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
Isolate* isolate = receiver.GetIsolate();
uint32_t count = 0;
for (uint32_t i = 0; i < max_index; i++) {
- if (Subclass::HasEntryImpl(isolate, backing_store, i)) count++;
+ if (Subclass::HasEntryImpl(isolate, backing_store, InternalIndex(i))) {
+ count++;
+ }
}
return count;
}
@@ -2041,9 +2057,9 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
uint32_t length = Subclass::GetMaxNumberOfEntries(*receiver, *elements);
for (uint32_t i = 0; i < length; i++) {
if (IsFastPackedElementsKind(KindTraits::Kind) ||
- HasEntryImpl(isolate, *elements, i)) {
+ HasEntryImpl(isolate, *elements, InternalIndex(i))) {
RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(
- Subclass::GetImpl(isolate, *elements, i), convert));
+ Subclass::GetImpl(isolate, *elements, InternalIndex(i)), convert));
}
}
return ExceptionStatus::kSuccess;
@@ -2157,7 +2173,7 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
DCHECK_LE(end, Subclass::GetCapacityImpl(*receiver, receiver->elements()));
for (uint32_t index = start; index < end; ++index) {
- Subclass::SetImpl(receiver, index, *obj_value);
+ Subclass::SetImpl(receiver, InternalIndex(index), *obj_value);
}
return *receiver;
}
@@ -2311,9 +2327,10 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
Handle<FixedArrayBase> elements(object->elements(), isolate);
for (uint32_t i = 0; i < length; i++) {
- if (!Subclass::HasElementImpl(isolate, *object, i, *elements)) continue;
+ InternalIndex entry(i);
+ if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
Handle<Object> value;
- value = Subclass::GetImpl(isolate, *elements, i);
+ value = Subclass::GetImpl(isolate, *elements, entry);
if (value->IsName()) {
value = isolate->factory()->InternalizeName(Handle<Name>::cast(value));
}
@@ -2336,7 +2353,7 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
int new_length = length - 1;
int remove_index = remove_position == AT_START ? 0 : new_length;
Handle<Object> result =
- Subclass::GetImpl(isolate, *backing_store, remove_index);
+ Subclass::GetImpl(isolate, *backing_store, InternalIndex(remove_index));
if (remove_position == AT_START) {
Subclass::MoveElements(isolate, receiver, backing_store, 0, 1, new_length,
0, 0);
@@ -2396,7 +2413,8 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
for (uint32_t i = 0; i < copy_size; i++) {
Object argument = (*args)[src_index + i];
DCHECK(!argument.IsTheHole());
- Subclass::SetImpl(raw_backing_store, dst_index + i, argument, mode);
+ Subclass::SetImpl(raw_backing_store, InternalIndex(dst_index + i),
+ argument, mode);
}
}
};
@@ -2405,22 +2423,22 @@ template <typename Subclass, typename KindTraits>
class FastSmiOrObjectElementsAccessor
: public FastElementsAccessor<Subclass, KindTraits> {
public:
- static inline void SetImpl(Handle<JSObject> holder, uint32_t entry,
+ static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
Object value) {
SetImpl(holder->elements(), entry, value);
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value) {
- FixedArray::cast(backing_store).set(entry, value);
+ FixedArray::cast(backing_store).set(entry.as_int(), value);
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value, WriteBarrierMode mode) {
- FixedArray::cast(backing_store).set(entry, value, mode);
+ FixedArray::cast(backing_store).set(entry.as_int(), value, mode);
}
- static Object GetRaw(FixedArray backing_store, uint32_t entry) {
+ static Object GetRaw(FixedArray backing_store, InternalIndex entry) {
uint32_t index = Subclass::GetIndexForEntryImpl(backing_store, entry);
return backing_store.get(index);
}
@@ -2488,8 +2506,9 @@ class FastSmiOrObjectElementsAccessor
isolate);
uint32_t length = elements->length();
for (uint32_t index = 0; index < length; ++index) {
- if (!Subclass::HasEntryImpl(isolate, *elements, index)) continue;
- Handle<Object> value = Subclass::GetImpl(isolate, *elements, index);
+ InternalIndex entry(index);
+ if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
+ Handle<Object> value = Subclass::GetImpl(isolate, *elements, entry);
value = MakeEntryPair(isolate, index, value);
values_or_entries->set(count++, *value);
}
@@ -2499,8 +2518,9 @@ class FastSmiOrObjectElementsAccessor
FixedArray elements = FixedArray::cast(object->elements());
uint32_t length = elements.length();
for (uint32_t index = 0; index < length; ++index) {
- if (!Subclass::HasEntryImpl(isolate, elements, index)) continue;
- Object value = GetRaw(elements, index);
+ InternalIndex entry(index);
+ if (!Subclass::HasEntryImpl(isolate, elements, entry)) continue;
+ Object value = GetRaw(elements, entry);
values_or_entries->set(count++, value);
}
}
@@ -2641,7 +2661,7 @@ class FastSealedObjectElementsAccessor
UNREACHABLE();
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
UNREACHABLE();
}
@@ -2733,17 +2753,17 @@ class FastFrozenObjectElementsAccessor
public:
using BackingStore = typename KindTraits::BackingStore;
- static inline void SetImpl(Handle<JSObject> holder, uint32_t entry,
+ static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
Object value) {
UNREACHABLE();
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value) {
UNREACHABLE();
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value, WriteBarrierMode mode) {
UNREACHABLE();
}
@@ -2753,7 +2773,7 @@ class FastFrozenObjectElementsAccessor
UNREACHABLE();
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
UNREACHABLE();
}
@@ -2787,7 +2807,7 @@ class FastFrozenObjectElementsAccessor
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
UNREACHABLE();
@@ -2816,24 +2836,24 @@ class FastDoubleElementsAccessor
: public FastElementsAccessor<Subclass, KindTraits> {
public:
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
- return FixedDoubleArray::get(FixedDoubleArray::cast(backing_store), entry,
- isolate);
+ InternalIndex entry) {
+ return FixedDoubleArray::get(FixedDoubleArray::cast(backing_store),
+ entry.as_int(), isolate);
}
- static inline void SetImpl(Handle<JSObject> holder, uint32_t entry,
+ static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
Object value) {
SetImpl(holder->elements(), entry, value);
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value) {
- FixedDoubleArray::cast(backing_store).set(entry, value.Number());
+ FixedDoubleArray::cast(backing_store).set(entry.as_int(), value.Number());
}
- static inline void SetImpl(FixedArrayBase backing_store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
Object value, WriteBarrierMode mode) {
- FixedDoubleArray::cast(backing_store).set(entry, value.Number());
+ FixedDoubleArray::cast(backing_store).set(entry.as_int(), value.Number());
}
static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
@@ -2890,8 +2910,9 @@ class FastDoubleElementsAccessor
int count = 0;
uint32_t length = elements->length();
for (uint32_t index = 0; index < length; ++index) {
- if (!Subclass::HasEntryImpl(isolate, *elements, index)) continue;
- Handle<Object> value = Subclass::GetImpl(isolate, *elements, index);
+ InternalIndex entry(index);
+ if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
+ Handle<Object> value = Subclass::GetImpl(isolate, *elements, entry);
if (get_entries) {
value = MakeEntryPair(isolate, index, value);
}
@@ -2988,11 +3009,12 @@ class TypedElementsAccessor
// Conversion of scalar value to handlified object.
static Handle<Object> ToHandle(Isolate* isolate, ElementType value);
- static void SetImpl(Handle<JSObject> holder, uint32_t entry, Object value) {
+ static void SetImpl(Handle<JSObject> holder, InternalIndex entry,
+ Object value) {
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(holder);
- DCHECK_LE(entry, typed_array->length());
- SetImpl(static_cast<ElementType*>(typed_array->DataPtr()), entry,
- FromObject(value));
+ DCHECK_LE(entry.raw_value(), typed_array->length());
+ SetImpl(static_cast<ElementType*>(typed_array->DataPtr()),
+ entry.raw_value(), FromObject(value));
}
static void SetImpl(ElementType* data_ptr, size_t entry, ElementType value) {
@@ -3019,18 +3041,18 @@ class TypedElementsAccessor
}
static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
- uint32_t entry) {
+ InternalIndex entry) {
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(holder);
Isolate* isolate = typed_array->GetIsolate();
- DCHECK_LE(entry, typed_array->length());
+ DCHECK_LE(entry.raw_value(), typed_array->length());
DCHECK(!typed_array->WasDetached());
- ElementType elem =
- GetImpl(static_cast<ElementType*>(typed_array->DataPtr()), entry);
+ ElementType elem = GetImpl(
+ static_cast<ElementType*>(typed_array->DataPtr()), entry.raw_value());
return ToHandle(isolate, elem);
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
UNREACHABLE();
}
@@ -3059,12 +3081,12 @@ class TypedElementsAccessor
return result;
}
- static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
+ static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
return PropertyDetails(kData, DONT_DELETE, PropertyCellType::kNoCell);
}
static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
- uint32_t entry) {
+ InternalIndex entry) {
return PropertyDetails(kData, DONT_DELETE, PropertyCellType::kNoCell);
}
@@ -3085,21 +3107,22 @@ class TypedElementsAccessor
UNREACHABLE();
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
UNREACHABLE();
}
static uint32_t GetIndexForEntryImpl(FixedArrayBase backing_store,
- uint32_t entry) {
- return entry;
+ InternalIndex entry) {
+ return entry.as_uint32();
}
- static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index, PropertyFilter filter) {
+ static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index,
+ PropertyFilter filter) {
return index < AccessorClass::GetCapacityImpl(holder, backing_store)
- ? index
- : kMaxUInt32;
+ ? InternalIndex(index)
+ : InternalIndex::NotFound();
}
static uint32_t GetCapacityImpl(JSObject holder,
@@ -3122,7 +3145,8 @@ class TypedElementsAccessor
Handle<FixedArrayBase> elements(receiver->elements(), isolate);
uint32_t length = AccessorClass::GetCapacityImpl(*receiver, *elements);
for (uint32_t i = 0; i < length; i++) {
- Handle<Object> value = AccessorClass::GetInternalImpl(receiver, i);
+ Handle<Object> value =
+ AccessorClass::GetInternalImpl(receiver, InternalIndex(i));
RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
}
return ExceptionStatus::kSuccess;
@@ -3137,7 +3161,8 @@ class TypedElementsAccessor
Handle<FixedArrayBase> elements(object->elements(), isolate);
uint32_t length = AccessorClass::GetCapacityImpl(*object, *elements);
for (uint32_t index = 0; index < length; ++index) {
- Handle<Object> value = AccessorClass::GetInternalImpl(object, index);
+ Handle<Object> value =
+ AccessorClass::GetInternalImpl(object, InternalIndex(index));
if (get_entries) {
value = MakeEntryPair(isolate, index, value);
}
@@ -3361,7 +3386,8 @@ class TypedElementsAccessor
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(object);
Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
for (uint32_t i = 0; i < length; i++) {
- Handle<Object> value = AccessorClass::GetInternalImpl(typed_array, i);
+ Handle<Object> value =
+ AccessorClass::GetInternalImpl(typed_array, InternalIndex(i));
result->set(i, *value);
}
return result;
@@ -3499,7 +3525,7 @@ class TypedElementsAccessor
return true;
}
- return !isolate->IsNoElementsProtectorIntact(context);
+ return !Protectors::IsNoElementsIntact(isolate);
}
static bool TryCopyElementsFastNumber(Context context, JSArray source,
@@ -3539,18 +3565,18 @@ class TypedElementsAccessor
if (kind == PACKED_SMI_ELEMENTS) {
FixedArray source_store = FixedArray::cast(source.elements());
- for (uint32_t i = 0; i < length; i++) {
- Object elem = source_store.get(i);
+ for (size_t i = 0; i < length; i++) {
+ Object elem = source_store.get(static_cast<int>(i));
SetImpl(dest_data, i, FromScalar(Smi::ToInt(elem)));
}
return true;
} else if (kind == HOLEY_SMI_ELEMENTS) {
FixedArray source_store = FixedArray::cast(source.elements());
- for (uint32_t i = 0; i < length; i++) {
- if (source_store.is_the_hole(isolate, i)) {
+ for (size_t i = 0; i < length; i++) {
+ if (source_store.is_the_hole(isolate, static_cast<int>(i))) {
SetImpl(dest_data, i, FromObject(undefined));
} else {
- Object elem = source_store.get(i);
+ Object elem = source_store.get(static_cast<int>(i));
SetImpl(dest_data, i, FromScalar(Smi::ToInt(elem)));
}
}
@@ -3560,20 +3586,20 @@ class TypedElementsAccessor
// unboxing the double here by using get_scalar.
FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
- for (uint32_t i = 0; i < length; i++) {
+ for (size_t i = 0; i < length; i++) {
// Use the from_double conversion for this specific TypedArray type,
// rather than relying on C++ to convert elem.
- double elem = source_store.get_scalar(i);
+ double elem = source_store.get_scalar(static_cast<int>(i));
SetImpl(dest_data, i, FromScalar(elem));
}
return true;
} else if (kind == HOLEY_DOUBLE_ELEMENTS) {
FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
- for (uint32_t i = 0; i < length; i++) {
- if (source_store.is_the_hole(i)) {
+ for (size_t i = 0; i < length; i++) {
+ if (source_store.is_the_hole(static_cast<int>(i))) {
SetImpl(dest_data, i, FromObject(undefined));
} else {
- double elem = source_store.get_scalar(i);
+ double elem = source_store.get_scalar(static_cast<int>(i));
SetImpl(dest_data, i, FromScalar(elem));
}
}
@@ -3588,7 +3614,8 @@ class TypedElementsAccessor
Isolate* isolate = destination->GetIsolate();
for (size_t i = 0; i < length; i++) {
Handle<Object> elem;
- if (i <= kMaxUInt32) {
+ // TODO(4153): This if-branch will subsume its else-branch.
+ if (i <= JSArray::kMaxArrayIndex) {
LookupIterator it(isolate, source, static_cast<uint32_t>(i));
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, elem,
Object::GetProperty(&it));
@@ -3619,8 +3646,7 @@ class TypedElementsAccessor
}
// The spec says we store the length, then get each element, so we don't
// need to check changes to length.
- // TODO(bmeurer, v8:4153): Remove this static_cast.
- SetImpl(destination, static_cast<uint32_t>(offset + i), *elem);
+ SetImpl(destination, InternalIndex(offset + i), *elem);
}
return *isolate->factory()->undefined_value();
}
@@ -3893,14 +3919,14 @@ class SloppyArgumentsElementsAccessor
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase parameters,
- uint32_t entry) {
+ InternalIndex entry) {
Handle<SloppyArgumentsElements> elements(
SloppyArgumentsElements::cast(parameters), isolate);
uint32_t length = elements->parameter_map_length();
- if (entry < length) {
+ if (entry.as_uint32() < length) {
// Read context mapped entry.
DisallowHeapAllocation no_gc;
- Object probe = elements->get_mapped_entry(entry);
+ Object probe = elements->get_mapped_entry(entry.as_uint32());
DCHECK(!probe.IsTheHole(isolate));
Context context = elements->context();
int context_entry = Smi::ToInt(probe);
@@ -3909,7 +3935,7 @@ class SloppyArgumentsElementsAccessor
} else {
// Entry is not context mapped, defer to the arguments.
Handle<Object> result = ArgumentsAccessor::GetImpl(
- isolate, elements->arguments(), entry - length);
+ isolate, elements->arguments(), entry.adjust_down(length));
return Subclass::ConvertArgumentsStoreResult(isolate, elements, result);
}
}
@@ -3924,19 +3950,19 @@ class SloppyArgumentsElementsAccessor
UNREACHABLE();
}
- static inline void SetImpl(Handle<JSObject> holder, uint32_t entry,
+ static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
Object value) {
SetImpl(holder->elements(), entry, value);
}
- static inline void SetImpl(FixedArrayBase store, uint32_t entry,
+ static inline void SetImpl(FixedArrayBase store, InternalIndex entry,
Object value) {
SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
uint32_t length = elements.parameter_map_length();
- if (entry < length) {
+ if (entry.as_uint32() < length) {
// Store context mapped entry.
DisallowHeapAllocation no_gc;
- Object probe = elements.get_mapped_entry(entry);
+ Object probe = elements.get_mapped_entry(entry.as_uint32());
DCHECK(!probe.IsTheHole());
Context context = elements.context();
int context_entry = Smi::ToInt(probe);
@@ -3945,7 +3971,8 @@ class SloppyArgumentsElementsAccessor
} else {
// Entry is not context mapped defer to arguments.
FixedArray arguments = elements.arguments();
- Object current = ArgumentsAccessor::GetRaw(arguments, entry - length);
+ Object current =
+ ArgumentsAccessor::GetRaw(arguments, entry.adjust_down(length));
if (current.IsAliasedArgumentsEntry()) {
AliasedArgumentsEntry alias = AliasedArgumentsEntry::cast(current);
Context context = elements.context();
@@ -3953,7 +3980,7 @@ class SloppyArgumentsElementsAccessor
DCHECK(!context.get(context_entry).IsTheHole());
context.set(context_entry, value);
} else {
- ArgumentsAccessor::SetImpl(arguments, entry - length, value);
+ ArgumentsAccessor::SetImpl(arguments, entry.adjust_down(length), value);
}
}
}
@@ -3989,8 +4016,8 @@ class SloppyArgumentsElementsAccessor
FixedArrayBase arguments = elements.arguments();
uint32_t nof_elements = 0;
uint32_t length = elements.parameter_map_length();
- for (uint32_t entry = 0; entry < length; entry++) {
- if (HasParameterMapArg(isolate, elements, entry)) nof_elements++;
+ for (uint32_t index = 0; index < length; index++) {
+ if (HasParameterMapArg(isolate, elements, index)) nof_elements++;
}
return nof_elements +
ArgumentsAccessor::NumberOfElementsImpl(receiver, arguments);
@@ -4002,7 +4029,8 @@ class SloppyArgumentsElementsAccessor
Isolate* isolate = accumulator->isolate();
Handle<FixedArrayBase> elements(receiver->elements(), isolate);
uint32_t length = GetCapacityImpl(*receiver, *elements);
- for (uint32_t entry = 0; entry < length; entry++) {
+ for (uint32_t index = 0; index < length; index++) {
+ InternalIndex entry(index);
if (!HasEntryImpl(isolate, *elements, entry)) continue;
Handle<Object> value = GetImpl(isolate, *elements, entry);
RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
@@ -4011,15 +4039,16 @@ class SloppyArgumentsElementsAccessor
}
static bool HasEntryImpl(Isolate* isolate, FixedArrayBase parameters,
- uint32_t entry) {
+ InternalIndex entry) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
uint32_t length = elements.parameter_map_length();
- if (entry < length) {
- return HasParameterMapArg(isolate, elements, entry);
+ if (entry.as_uint32() < length) {
+ return HasParameterMapArg(isolate, elements, entry.as_uint32());
}
FixedArrayBase arguments = elements.arguments();
- return ArgumentsAccessor::HasEntryImpl(isolate, arguments, entry - length);
+ return ArgumentsAccessor::HasEntryImpl(isolate, arguments,
+ entry.adjust_down(length));
}
static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
@@ -4030,39 +4059,45 @@ class SloppyArgumentsElementsAccessor
}
static uint32_t GetIndexForEntryImpl(FixedArrayBase parameters,
- uint32_t entry) {
+ InternalIndex entry) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
uint32_t length = elements.parameter_map_length();
- if (entry < length) return entry;
+ uint32_t index = entry.as_uint32();
+ if (index < length) return index;
FixedArray arguments = elements.arguments();
- return ArgumentsAccessor::GetIndexForEntryImpl(arguments, entry - length);
+ return ArgumentsAccessor::GetIndexForEntryImpl(arguments,
+ entry.adjust_down(length));
}
- static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
- FixedArrayBase parameters,
- uint32_t index, PropertyFilter filter) {
+ static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
+ FixedArrayBase parameters,
+ uint32_t index,
+ PropertyFilter filter) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(parameters);
- if (HasParameterMapArg(isolate, elements, index)) return index;
+ if (HasParameterMapArg(isolate, elements, index)) {
+ return InternalIndex(index);
+ }
FixedArray arguments = elements.arguments();
- uint32_t entry = ArgumentsAccessor::GetEntryForIndexImpl(
+ InternalIndex entry = ArgumentsAccessor::GetEntryForIndexImpl(
isolate, holder, arguments, index, filter);
- if (entry == kMaxUInt32) return kMaxUInt32;
+ if (entry.is_not_found()) return entry;
// Arguments entries could overlap with the dictionary entries, hence offset
// them by the number of context mapped entries.
- return elements.parameter_map_length() + entry;
+ return entry.adjust_up(elements.parameter_map_length());
}
- static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
+ static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
SloppyArgumentsElements elements =
SloppyArgumentsElements::cast(holder.elements());
uint32_t length = elements.parameter_map_length();
- if (entry < length) {
+ if (entry.as_uint32() < length) {
return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
}
FixedArray arguments = elements.arguments();
- return ArgumentsAccessor::GetDetailsImpl(arguments, entry - length);
+ return ArgumentsAccessor::GetDetailsImpl(arguments,
+ entry.adjust_down(length));
}
static bool HasParameterMapArg(Isolate* isolate,
@@ -4073,26 +4108,26 @@ class SloppyArgumentsElementsAccessor
return !elements.get_mapped_entry(index).IsTheHole(isolate);
}
- static void DeleteImpl(Handle<JSObject> obj, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
Handle<SloppyArgumentsElements> elements(
SloppyArgumentsElements::cast(obj->elements()), obj->GetIsolate());
uint32_t length = elements->parameter_map_length();
- uint32_t delete_or_entry = entry;
- if (entry < length) {
- delete_or_entry = kMaxUInt32;
+ InternalIndex delete_or_entry = entry;
+ if (entry.as_uint32() < length) {
+ delete_or_entry = InternalIndex::NotFound();
}
Subclass::SloppyDeleteImpl(obj, elements, delete_or_entry);
// SloppyDeleteImpl allocates a new dictionary elements store. For making
// heap verification happy we postpone clearing out the mapped entry.
- if (entry < length) {
- elements->set_mapped_entry(entry,
+ if (entry.as_uint32() < length) {
+ elements->set_mapped_entry(entry.as_uint32(),
obj->GetReadOnlyRoots().the_hole_value());
}
}
static void SloppyDeleteImpl(Handle<JSObject> obj,
Handle<SloppyArgumentsElements> elements,
- uint32_t entry) {
+ InternalIndex entry) {
// Implemented in subclasses.
UNREACHABLE();
}
@@ -4152,9 +4187,9 @@ class SloppyArgumentsElementsAccessor
for (uint32_t k = start_from; k < length; ++k) {
DCHECK_EQ(object->map(), *original_map);
- uint32_t entry =
+ InternalIndex entry =
GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
- if (entry == kMaxUInt32) {
+ if (entry.is_not_found()) {
if (search_for_hole) return Just(true);
continue;
}
@@ -4193,9 +4228,9 @@ class SloppyArgumentsElementsAccessor
for (uint32_t k = start_from; k < length; ++k) {
DCHECK_EQ(object->map(), *original_map);
- uint32_t entry =
+ InternalIndex entry =
GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
- if (entry == kMaxUInt32) {
+ if (entry.is_not_found()) {
continue;
}
@@ -4246,14 +4281,15 @@ class SlowSloppyArgumentsElementsAccessor
}
static void SloppyDeleteImpl(Handle<JSObject> obj,
Handle<SloppyArgumentsElements> elements,
- uint32_t entry) {
+ InternalIndex entry) {
// No need to delete a context mapped entry from the arguments elements.
- if (entry == kMaxUInt32) return;
+ if (entry.is_not_found()) return;
Isolate* isolate = obj->GetIsolate();
Handle<NumberDictionary> dict(NumberDictionary::cast(elements->arguments()),
isolate);
- int length = elements->parameter_map_length();
- dict = NumberDictionary::DeleteEntry(isolate, dict, entry - length);
+ uint32_t length = elements->parameter_map_length();
+ dict = NumberDictionary::DeleteEntry(isolate, dict,
+ entry.as_uint32() - length);
elements->set_arguments(*dict);
}
static void AddImpl(Handle<JSObject> object, uint32_t index,
@@ -4278,15 +4314,15 @@ class SlowSloppyArgumentsElementsAccessor
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
Isolate* isolate = object->GetIsolate();
Handle<SloppyArgumentsElements> elements =
Handle<SloppyArgumentsElements>::cast(store);
uint32_t length = elements->parameter_map_length();
- if (entry < length) {
- Object probe = elements->get_mapped_entry(entry);
+ if (entry.as_uint32() < length) {
+ Object probe = elements->get_mapped_entry(entry.as_uint32());
DCHECK(!probe.IsTheHole(isolate));
Context context = elements->context();
int context_entry = Smi::ToInt(probe);
@@ -4294,7 +4330,7 @@ class SlowSloppyArgumentsElementsAccessor
context.set(context_entry, *value);
// Redefining attributes of an aliased element destroys fast aliasing.
- elements->set_mapped_entry(entry,
+ elements->set_mapped_entry(entry.as_uint32(),
ReadOnlyRoots(isolate).the_hole_value());
// For elements that are still writable we re-establish slow aliasing.
if ((attributes & READ_ONLY) == 0) {
@@ -4304,8 +4340,8 @@ class SlowSloppyArgumentsElementsAccessor
PropertyDetails details(kData, attributes, PropertyCellType::kNoCell);
Handle<NumberDictionary> arguments(
NumberDictionary::cast(elements->arguments()), isolate);
- arguments =
- NumberDictionary::Add(isolate, arguments, entry, value, details);
+ arguments = NumberDictionary::Add(isolate, arguments, entry.as_uint32(),
+ value, details);
// If the attributes were NONE, we would have called set rather than
// reconfigure.
DCHECK_NE(NONE, attributes);
@@ -4314,7 +4350,7 @@ class SlowSloppyArgumentsElementsAccessor
} else {
Handle<FixedArrayBase> arguments(elements->arguments(), isolate);
DictionaryElementsAccessor::ReconfigureImpl(
- object, arguments, entry - length, value, attributes);
+ object, arguments, entry.adjust_down(length), value, attributes);
}
}
};
@@ -4346,23 +4382,25 @@ class FastSloppyArgumentsElementsAccessor
static Handle<NumberDictionary> NormalizeArgumentsElements(
Handle<JSObject> object, Handle<SloppyArgumentsElements> elements,
- uint32_t* entry) {
+ InternalIndex* entry) {
Handle<NumberDictionary> dictionary = JSObject::NormalizeElements(object);
elements->set_arguments(*dictionary);
// kMaxUInt32 indicates that a context mapped element got deleted. In this
// case we only normalize the elements (aka. migrate to SLOW_SLOPPY).
- if (*entry == kMaxUInt32) return dictionary;
+ if (entry->is_not_found()) return dictionary;
uint32_t length = elements->parameter_map_length();
- if (*entry >= length) {
+ if (entry->as_uint32() >= length) {
*entry =
- dictionary->FindEntry(object->GetIsolate(), *entry - length) + length;
+ InternalIndex(dictionary->FindEntry(object->GetIsolate(),
+ entry->as_uint32() - length) +
+ length);
}
return dictionary;
}
static void SloppyDeleteImpl(Handle<JSObject> obj,
Handle<SloppyArgumentsElements> elements,
- uint32_t entry) {
+ InternalIndex entry) {
// Always normalize element on deleting an entry.
NormalizeArgumentsElements(obj, elements, &entry);
SlowSloppyArgumentsElementsAccessor::SloppyDeleteImpl(obj, elements, entry);
@@ -4386,11 +4424,12 @@ class FastSloppyArgumentsElementsAccessor
// index to entry explicitly since the slot still contains the hole, so the
// current EntryForIndex would indicate that it is "absent" by returning
// kMaxUInt32.
- FastHoleyObjectElementsAccessor::SetImpl(arguments, index, *value);
+ FastHoleyObjectElementsAccessor::SetImpl(arguments, InternalIndex(index),
+ *value);
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
DCHECK_EQ(object->elements(), *store);
@@ -4443,63 +4482,67 @@ class StringWrapperElementsAccessor
: public ElementsAccessorBase<Subclass, KindTraits> {
public:
static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
- uint32_t entry) {
+ InternalIndex entry) {
return GetImpl(holder, entry);
}
- static Handle<Object> GetImpl(Handle<JSObject> holder, uint32_t entry) {
+ static Handle<Object> GetImpl(Handle<JSObject> holder, InternalIndex entry) {
Isolate* isolate = holder->GetIsolate();
Handle<String> string(GetString(*holder), isolate);
uint32_t length = static_cast<uint32_t>(string->length());
- if (entry < length) {
+ if (entry.as_uint32() < length) {
return isolate->factory()->LookupSingleCharacterStringFromCode(
- String::Flatten(isolate, string)->Get(entry));
+ String::Flatten(isolate, string)->Get(entry.as_int()));
}
return BackingStoreAccessor::GetImpl(isolate, holder->elements(),
- entry - length);
+ entry.adjust_down(length));
}
static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase elements,
- uint32_t entry) {
+ InternalIndex entry) {
UNREACHABLE();
}
- static PropertyDetails GetDetailsImpl(JSObject holder, uint32_t entry) {
+ static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
uint32_t length = static_cast<uint32_t>(GetString(holder).length());
- if (entry < length) {
+ if (entry.as_uint32() < length) {
PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
return PropertyDetails(kData, attributes, PropertyCellType::kNoCell);
}
- return BackingStoreAccessor::GetDetailsImpl(holder, entry - length);
+ return BackingStoreAccessor::GetDetailsImpl(holder,
+ entry.adjust_down(length));
}
- static uint32_t GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index, PropertyFilter filter) {
+ static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index,
+ PropertyFilter filter) {
uint32_t length = static_cast<uint32_t>(GetString(holder).length());
- if (index < length) return index;
- uint32_t backing_store_entry = BackingStoreAccessor::GetEntryForIndexImpl(
- isolate, holder, backing_store, index, filter);
- if (backing_store_entry == kMaxUInt32) return kMaxUInt32;
- DCHECK(backing_store_entry < kMaxUInt32 - length);
- return backing_store_entry + length;
+ if (index < length) return InternalIndex(index);
+ InternalIndex backing_store_entry =
+ BackingStoreAccessor::GetEntryForIndexImpl(
+ isolate, holder, backing_store, index, filter);
+ if (backing_store_entry.is_not_found()) return backing_store_entry;
+ return backing_store_entry.adjust_up(length);
}
- static void DeleteImpl(Handle<JSObject> holder, uint32_t entry) {
+ static void DeleteImpl(Handle<JSObject> holder, InternalIndex entry) {
uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
- if (entry < length) {
+ if (entry.as_uint32() < length) {
return; // String contents can't be deleted.
}
- BackingStoreAccessor::DeleteImpl(holder, entry - length);
+ BackingStoreAccessor::DeleteImpl(holder, entry.adjust_down(length));
}
- static void SetImpl(Handle<JSObject> holder, uint32_t entry, Object value) {
+ static void SetImpl(Handle<JSObject> holder, InternalIndex entry,
+ Object value) {
uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
- if (entry < length) {
+ if (entry.as_uint32() < length) {
return; // String contents are read-only.
}
- BackingStoreAccessor::SetImpl(holder->elements(), entry - length, value);
+ BackingStoreAccessor::SetImpl(holder->elements(), entry.adjust_down(length),
+ value);
}
static void AddImpl(Handle<JSObject> object, uint32_t index,
@@ -4519,15 +4562,15 @@ class StringWrapperElementsAccessor
}
static void ReconfigureImpl(Handle<JSObject> object,
- Handle<FixedArrayBase> store, uint32_t entry,
+ Handle<FixedArrayBase> store, InternalIndex entry,
Handle<Object> value,
PropertyAttributes attributes) {
uint32_t length = static_cast<uint32_t>(GetString(*object).length());
- if (entry < length) {
+ if (entry.as_uint32() < length) {
return; // String contents can't be reconfigured.
}
- BackingStoreAccessor::ReconfigureImpl(object, store, entry - length, value,
- attributes);
+ BackingStoreAccessor::ReconfigureImpl(
+ object, store, entry.adjust_down(length), value, attributes);
}
V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
diff --git a/deps/v8/src/objects/elements.h b/deps/v8/src/objects/elements.h
index b7fcd907a3..219a9ad73a 100644
--- a/deps/v8/src/objects/elements.h
+++ b/deps/v8/src/objects/elements.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_ELEMENTS_H_
#include "src/objects/elements-kind.h"
+#include "src/objects/internal-index.h"
#include "src/objects/keys.h"
#include "src/objects/objects.h"
@@ -50,11 +51,9 @@ class ElementsAccessor {
// Note: this is currently not implemented for string wrapper and
// typed array elements.
- virtual bool HasEntry(JSObject holder, uint32_t entry) = 0;
+ virtual bool HasEntry(JSObject holder, InternalIndex entry) = 0;
- // TODO(cbruni): HasEntry and Get should not be exposed publicly with the
- // entry parameter.
- virtual Handle<Object> Get(Handle<JSObject> holder, uint32_t entry) = 0;
+ virtual Handle<Object> Get(Handle<JSObject> holder, InternalIndex entry) = 0;
virtual bool HasAccessors(JSObject holder) = 0;
virtual uint32_t NumberOfElements(JSObject holder) = 0;
@@ -105,7 +104,8 @@ class ElementsAccessor {
static void InitializeOncePerProcess();
static void TearDown();
- virtual void Set(Handle<JSObject> holder, uint32_t entry, Object value) = 0;
+ virtual void Set(Handle<JSObject> holder, InternalIndex entry,
+ Object value) = 0;
virtual void Add(Handle<JSObject> object, uint32_t index,
Handle<Object> value, PropertyAttributes attributes,
@@ -178,18 +178,18 @@ class ElementsAccessor {
// indices are equivalent to entries. In the NumberDictionary
// ElementsAccessor, entries are mapped to an index using the KeyAt method on
// the NumberDictionary.
- virtual uint32_t GetEntryForIndex(Isolate* isolate, JSObject holder,
- FixedArrayBase backing_store,
- uint32_t index) = 0;
+ virtual InternalIndex GetEntryForIndex(Isolate* isolate, JSObject holder,
+ FixedArrayBase backing_store,
+ uint32_t index) = 0;
- virtual PropertyDetails GetDetails(JSObject holder, uint32_t entry) = 0;
+ virtual PropertyDetails GetDetails(JSObject holder, InternalIndex entry) = 0;
virtual void Reconfigure(Handle<JSObject> object,
- Handle<FixedArrayBase> backing_store, uint32_t entry,
- Handle<Object> value,
+ Handle<FixedArrayBase> backing_store,
+ InternalIndex entry, Handle<Object> value,
PropertyAttributes attributes) = 0;
// Deletes an element in an object.
- virtual void Delete(Handle<JSObject> holder, uint32_t entry) = 0;
+ virtual void Delete(Handle<JSObject> holder, InternalIndex entry) = 0;
// NOTE: this method violates the handlified function signature convention:
// raw pointer parameter |source_holder| in the function that allocates.
diff --git a/deps/v8/src/objects/feedback-cell-inl.h b/deps/v8/src/objects/feedback-cell-inl.h
index e06cfce7de..188666d462 100644
--- a/deps/v8/src/objects/feedback-cell-inl.h
+++ b/deps/v8/src/objects/feedback-cell-inl.h
@@ -17,12 +17,7 @@
namespace v8 {
namespace internal {
-OBJECT_CONSTRUCTORS_IMPL(FeedbackCell, Struct)
-
-CAST_ACCESSOR(FeedbackCell)
-
-ACCESSORS(FeedbackCell, value, HeapObject, kValueOffset)
-INT32_ACCESSORS(FeedbackCell, interrupt_budget, kInterruptBudgetOffset)
+TQ_OBJECT_CONSTRUCTORS_IMPL(FeedbackCell)
void FeedbackCell::clear_padding() {
if (FeedbackCell::kAlignedSize == FeedbackCell::kUnalignedSize) return;
diff --git a/deps/v8/src/objects/feedback-cell.h b/deps/v8/src/objects/feedback-cell.h
index 3c085f72d9..669efaeaec 100644
--- a/deps/v8/src/objects/feedback-cell.h
+++ b/deps/v8/src/objects/feedback-cell.h
@@ -18,7 +18,7 @@ namespace internal {
// number of closures created for a certain function per native
// context. There's at most one FeedbackCell for each function in
// a native context.
-class FeedbackCell : public Struct {
+class FeedbackCell : public TorqueGeneratedFeedbackCell<FeedbackCell, Struct> {
public:
static int GetInitialInterruptBudget() {
if (FLAG_lazy_feedback_allocation) {
@@ -27,19 +27,8 @@ class FeedbackCell : public Struct {
return FLAG_interrupt_budget;
}
- // [value]: value of the cell.
- DECL_ACCESSORS(value, HeapObject)
- DECL_INT32_ACCESSORS(interrupt_budget)
-
- DECL_CAST(FeedbackCell)
-
// Dispatched behavior.
DECL_PRINTER(FeedbackCell)
- DECL_VERIFIER(FeedbackCell)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- TORQUE_GENERATED_FEEDBACK_CELL_FIELDS)
static const int kUnalignedSize = kSize;
static const int kAlignedSize = RoundUp<kObjectAlignment>(int{kSize});
@@ -50,7 +39,7 @@ class FeedbackCell : public Struct {
using BodyDescriptor =
FixedBodyDescriptor<kValueOffset, kInterruptBudgetOffset, kAlignedSize>;
- OBJECT_CONSTRUCTORS(FeedbackCell, Struct);
+ TQ_OBJECT_CONSTRUCTORS(FeedbackCell)
};
} // namespace internal
diff --git a/deps/v8/src/objects/feedback-vector-inl.h b/deps/v8/src/objects/feedback-vector-inl.h
index 9cdc03b5c2..024b92165d 100644
--- a/deps/v8/src/objects/feedback-vector-inl.h
+++ b/deps/v8/src/objects/feedback-vector-inl.h
@@ -286,10 +286,6 @@ Handle<Symbol> FeedbackVector::MegamorphicSentinel(Isolate* isolate) {
return isolate->factory()->megamorphic_symbol();
}
-Handle<Symbol> FeedbackVector::PremonomorphicSentinel(Isolate* isolate) {
- return isolate->factory()->premonomorphic_symbol();
-}
-
Symbol FeedbackVector::RawUninitializedSentinel(Isolate* isolate) {
return ReadOnlyRoots(isolate).uninitialized_symbol();
}
diff --git a/deps/v8/src/objects/feedback-vector.cc b/deps/v8/src/objects/feedback-vector.cc
index 2fbc48a95e..4fe75ab325 100644
--- a/deps/v8/src/objects/feedback-vector.cc
+++ b/deps/v8/src/objects/feedback-vector.cc
@@ -52,7 +52,6 @@ static bool IsPropertyNameFeedback(MaybeObject feedback) {
Symbol symbol = Symbol::cast(heap_object);
ReadOnlyRoots roots = symbol.GetReadOnlyRoots();
return symbol != roots.uninitialized_symbol() &&
- symbol != roots.premonomorphic_symbol() &&
symbol != roots.megamorphic_symbol();
}
@@ -233,8 +232,8 @@ Handle<FeedbackVector> FeedbackVector::New(
const int slot_count = shared->feedback_metadata().slot_count();
- Handle<FeedbackVector> vector = factory->NewFeedbackVector(
- shared, closure_feedback_cell_array, AllocationType::kOld);
+ Handle<FeedbackVector> vector =
+ factory->NewFeedbackVector(shared, closure_feedback_cell_array);
DCHECK_EQ(vector->length(), slot_count);
@@ -524,12 +523,6 @@ bool FeedbackNexus::Clear() {
return feedback_updated;
}
-void FeedbackNexus::ConfigurePremonomorphic(Handle<Map> receiver_map) {
- SetFeedback(*FeedbackVector::PremonomorphicSentinel(GetIsolate()),
- SKIP_WRITE_BARRIER);
- SetFeedbackExtra(HeapObjectReference::Weak(*receiver_map));
-}
-
bool FeedbackNexus::ConfigureMegamorphic() {
DisallowHeapAllocation no_gc;
Isolate* isolate = GetIsolate();
@@ -585,13 +578,6 @@ InlineCacheState FeedbackNexus::ic_state() const {
case FeedbackSlotKind::kLoadGlobalInsideTypeof: {
if (feedback->IsSmi()) return MONOMORPHIC;
- if (feedback == MaybeObject::FromObject(
- *FeedbackVector::PremonomorphicSentinel(isolate))) {
- DCHECK(kind() == FeedbackSlotKind::kStoreGlobalSloppy ||
- kind() == FeedbackSlotKind::kStoreGlobalStrict);
- return PREMONOMORPHIC;
- }
-
DCHECK(feedback->IsWeakOrCleared());
MaybeObject extra = GetFeedbackExtra();
if (!feedback->IsCleared() ||
@@ -619,10 +605,6 @@ InlineCacheState FeedbackNexus::ic_state() const {
*FeedbackVector::MegamorphicSentinel(isolate))) {
return MEGAMORPHIC;
}
- if (feedback == MaybeObject::FromObject(
- *FeedbackVector::PremonomorphicSentinel(isolate))) {
- return PREMONOMORPHIC;
- }
if (feedback->IsWeakOrCleared()) {
// Don't check if the map is cleared.
return MONOMORPHIC;
@@ -974,14 +956,6 @@ int FeedbackNexus::ExtractMaps(MapHandles* maps) const {
Map map = Map::cast(heap_object);
maps->push_back(handle(map, isolate));
return 1;
- } else if (feedback->GetHeapObjectIfStrong(&heap_object) &&
- heap_object ==
- heap_object.GetReadOnlyRoots().premonomorphic_symbol()) {
- if (GetFeedbackExtra()->GetHeapObjectIfWeak(&heap_object)) {
- Map map = Map::cast(heap_object);
- maps->push_back(handle(map, isolate));
- return 1;
- }
}
return 0;
@@ -1203,9 +1177,11 @@ KeyedAccessStoreMode FeedbackNexus::GetKeyedAccessStoreMode() const {
handler = handle(Code::cast(data_handler->smi_handler()),
vector().GetIsolate());
} else if (maybe_code_handler.object()->IsSmi()) {
- // Skip proxy handlers.
- DCHECK_EQ(*(maybe_code_handler.object()),
- *StoreHandler::StoreProxy(GetIsolate()));
+ // Skip proxy handlers and the slow handler.
+ DCHECK(*(maybe_code_handler.object()) ==
+ *StoreHandler::StoreProxy(GetIsolate()) ||
+ *(maybe_code_handler.object()) ==
+ *StoreHandler::StoreSlow(GetIsolate()));
continue;
} else {
// Element store without prototype chain check.
diff --git a/deps/v8/src/objects/feedback-vector.h b/deps/v8/src/objects/feedback-vector.h
index af03bb4130..1c34266dc8 100644
--- a/deps/v8/src/objects/feedback-vector.h
+++ b/deps/v8/src/objects/feedback-vector.h
@@ -305,9 +305,6 @@ class FeedbackVector : public HeapObject {
// The object that indicates a megamorphic state.
static inline Handle<Symbol> MegamorphicSentinel(Isolate* isolate);
- // The object that indicates a premonomorphic state.
- static inline Handle<Symbol> PremonomorphicSentinel(Isolate* isolate);
-
// A raw version of the uninitialized sentinel that's safe to read during
// garbage collection (e.g., for patching the cache).
static inline Symbol RawUninitializedSentinel(Isolate* isolate);
@@ -567,7 +564,7 @@ class FeedbackMetadata : public HeapObject {
// possibly be confused with a pointer.
// NOLINTNEXTLINE(runtime/references) (false positive)
STATIC_ASSERT((Name::kEmptyHashField & kHeapObjectTag) == kHeapObjectTag);
-STATIC_ASSERT(Name::kEmptyHashField == 0x3);
+STATIC_ASSERT(Name::kEmptyHashField == 0x7);
// Verify that a set hash field will not look like a tagged object.
STATIC_ASSERT(Name::kHashNotComputedMask == kHeapObjectTag);
@@ -657,13 +654,12 @@ class V8_EXPORT_PRIVATE FeedbackNexus final {
bool IsCleared() const {
InlineCacheState state = ic_state();
- return !FLAG_use_ic || state == UNINITIALIZED || state == PREMONOMORPHIC;
+ return !FLAG_use_ic || state == UNINITIALIZED;
}
// Clear() returns true if the state of the underlying vector was changed.
bool Clear();
void ConfigureUninitialized();
- void ConfigurePremonomorphic(Handle<Map> receiver_map);
// ConfigureMegamorphic() returns true if the state of the underlying vector
// was changed. Extra feedback is cleared if the 0 parameter version is used.
bool ConfigureMegamorphic();
diff --git a/deps/v8/src/objects/field-index-inl.h b/deps/v8/src/objects/field-index-inl.h
index 997cd68c32..93ffc59c72 100644
--- a/deps/v8/src/objects/field-index-inl.h
+++ b/deps/v8/src/objects/field-index-inl.h
@@ -60,13 +60,13 @@ int FieldIndex::GetLoadByFieldIndex() const {
return is_double() ? (result | 1) : result;
}
-FieldIndex FieldIndex::ForDescriptor(Map map, int descriptor_index) {
+FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
Isolate* isolate = GetIsolateForPtrCompr(map);
return ForDescriptor(isolate, map, descriptor_index);
}
FieldIndex FieldIndex::ForDescriptor(Isolate* isolate, Map map,
- int descriptor_index) {
+ InternalIndex descriptor_index) {
PropertyDetails details =
map.instance_descriptors(isolate).GetDetails(descriptor_index);
int field_index = details.field_index();
diff --git a/deps/v8/src/objects/field-index.h b/deps/v8/src/objects/field-index.h
index 4fae87774d..fbde0bc609 100644
--- a/deps/v8/src/objects/field-index.h
+++ b/deps/v8/src/objects/field-index.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_FIELD_INDEX_H_
#define V8_OBJECTS_FIELD_INDEX_H_
+// TODO(jkummerow): Consider forward-declaring instead.
+#include "src/objects/internal-index.h"
#include "src/objects/property-details.h"
#include "src/utils/utils.h"
@@ -27,9 +29,10 @@ class FieldIndex final {
Map map, int index,
Representation representation = Representation::Tagged());
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
- static inline FieldIndex ForDescriptor(Map map, int descriptor_index);
+ static inline FieldIndex ForDescriptor(Map map,
+ InternalIndex descriptor_index);
static inline FieldIndex ForDescriptor(Isolate* isolate, Map map,
- int descriptor_index);
+ InternalIndex descriptor_index);
inline int GetLoadByFieldIndex() const;
diff --git a/deps/v8/src/objects/fixed-array-inl.h b/deps/v8/src/objects/fixed-array-inl.h
index 79c29a6eeb..9701f8ef09 100644
--- a/deps/v8/src/objects/fixed-array-inl.h
+++ b/deps/v8/src/objects/fixed-array-inl.h
@@ -240,7 +240,7 @@ int BinarySearch(T* array, Name name, int valid_entries,
for (; low <= limit; ++low) {
int sort_index = array->GetSortedKeyIndex(low);
- Name entry = array->GetKey(sort_index);
+ Name entry = array->GetKey(InternalIndex(sort_index));
uint32_t current_hash = entry.hash_field();
if (current_hash != hash) {
if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
@@ -272,7 +272,7 @@ int LinearSearch(T* array, Name name, int valid_entries,
int len = array->number_of_entries();
for (int number = 0; number < len; number++) {
int sorted_index = array->GetSortedKeyIndex(number);
- Name entry = array->GetKey(sorted_index);
+ Name entry = array->GetKey(InternalIndex(sorted_index));
uint32_t current_hash = entry.hash_field();
if (current_hash > hash) {
*out_insertion_index = sorted_index;
@@ -286,7 +286,7 @@ int LinearSearch(T* array, Name name, int valid_entries,
DCHECK_LE(valid_entries, array->number_of_entries());
DCHECK_NULL(out_insertion_index); // Not supported here.
for (int number = 0; number < valid_entries; number++) {
- if (array->GetKey(number) == name) return number;
+ if (array->GetKey(InternalIndex(number)) == name) return number;
}
return T::kNotFound;
}
diff --git a/deps/v8/src/objects/fixed-array.h b/deps/v8/src/objects/fixed-array.h
index 40290797f7..1963eef728 100644
--- a/deps/v8/src/objects/fixed-array.h
+++ b/deps/v8/src/objects/fixed-array.h
@@ -86,14 +86,14 @@ class FixedArrayBase : public HeapObject {
V8_EXPORT_PRIVATE bool IsCowArray() const;
-// Maximal allowed size, in bytes, of a single FixedArrayBase.
-// Prevents overflowing size computations, as well as extreme memory
-// consumption.
-#ifdef V8_HOST_ARCH_32_BIT
- static const int kMaxSize = 512 * MB;
-#else
- static const int kMaxSize = 1024 * MB;
-#endif // V8_HOST_ARCH_32_BIT
+ // Maximal allowed size, in bytes, of a single FixedArrayBase.
+ // Prevents overflowing size computations, as well as extreme memory
+ // consumption. It's either (512Mb - kTaggedSize) or (1024Mb - kTaggedSize).
+ // -kTaggedSize is here to ensure that this max size always fits into Smi
+ // which is necessary for being able to create a free space filler for the
+ // whole array of kMaxSize.
+ static const int kMaxSize = 128 * kTaggedSize * MB - kTaggedSize;
+ STATIC_ASSERT(Smi::IsValid(kMaxSize));
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
@@ -120,7 +120,7 @@ class FixedArray : public FixedArrayBase {
// Return a grown copy if the index is bigger than the array's length.
V8_EXPORT_PRIVATE static Handle<FixedArray> SetAndGrow(
Isolate* isolate, Handle<FixedArray> array, int index,
- Handle<Object> value, AllocationType allocation = AllocationType::kYoung);
+ Handle<Object> value);
// Setter that uses write barrier.
inline void set(int index, Object value);
@@ -303,7 +303,6 @@ class WeakFixedArray : public HeapObject {
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
TORQUE_GENERATED_WEAK_FIXED_ARRAY_FIELDS)
- static constexpr int kHeaderSize = kSize;
static const int kMaxLength =
(FixedArray::kMaxSize - kHeaderSize) / kTaggedSize;
diff --git a/deps/v8/src/objects/function-kind.h b/deps/v8/src/objects/function-kind.h
index 8e9c68e426..9b0de76126 100644
--- a/deps/v8/src/objects/function-kind.h
+++ b/deps/v8/src/objects/function-kind.h
@@ -14,6 +14,7 @@ enum FunctionKind : uint8_t {
// BEGIN constructable functions
kNormalFunction,
kModule,
+ kAsyncModule,
// BEGIN class constructors
// BEGIN base constructors
kBaseConstructor,
@@ -61,7 +62,11 @@ inline bool IsArrowFunction(FunctionKind kind) {
}
inline bool IsModule(FunctionKind kind) {
- return kind == FunctionKind::kModule;
+ return IsInRange(kind, FunctionKind::kModule, FunctionKind::kAsyncModule);
+}
+
+inline bool IsAsyncModule(FunctionKind kind) {
+ return kind == FunctionKind::kAsyncModule;
}
inline bool IsAsyncGeneratorFunction(FunctionKind kind) {
@@ -163,6 +168,8 @@ inline const char* FunctionKind2String(FunctionKind kind) {
return "AsyncFunction";
case FunctionKind::kModule:
return "Module";
+ case FunctionKind::kAsyncModule:
+ return "AsyncModule";
case FunctionKind::kClassMembersInitializerFunction:
return "ClassMembersInitializerFunction";
case FunctionKind::kDefaultBaseConstructor:
diff --git a/deps/v8/src/objects/heap-number-inl.h b/deps/v8/src/objects/heap-number-inl.h
index 78e65ca231..546b16e93d 100644
--- a/deps/v8/src/objects/heap-number-inl.h
+++ b/deps/v8/src/objects/heap-number-inl.h
@@ -7,8 +7,8 @@
#include "src/objects/heap-number.h"
-#include "src/objects/heap-object-inl.h"
#include "src/objects/objects-inl.h"
+#include "src/objects/primitive-heap-object-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -16,7 +16,7 @@
namespace v8 {
namespace internal {
-OBJECT_CONSTRUCTORS_IMPL(HeapNumber, HeapObject)
+OBJECT_CONSTRUCTORS_IMPL(HeapNumber, PrimitiveHeapObject)
CAST_ACCESSOR(HeapNumber)
diff --git a/deps/v8/src/objects/heap-number.h b/deps/v8/src/objects/heap-number.h
index 9063f3d22c..0982cc232e 100644
--- a/deps/v8/src/objects/heap-number.h
+++ b/deps/v8/src/objects/heap-number.h
@@ -5,7 +5,7 @@
#ifndef V8_OBJECTS_HEAP_NUMBER_H_
#define V8_OBJECTS_HEAP_NUMBER_H_
-#include "src/objects/heap-object.h"
+#include "src/objects/primitive-heap-object.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -15,7 +15,7 @@ namespace internal {
// The HeapNumber class describes heap allocated numbers that cannot be
// represented in a Smi (small integer).
-class HeapNumber : public HeapObject {
+class HeapNumber : public PrimitiveHeapObject {
public:
// [value]: number value.
inline double value() const;
@@ -28,7 +28,7 @@ class HeapNumber : public HeapObject {
inline int get_sign();
// Layout description.
- static const int kValueOffset = HeapObject::kHeaderSize;
+ static const int kValueOffset = PrimitiveHeapObject::kHeaderSize;
// IEEE doubles are two 32 bit words. The first is just mantissa, the second
// is a mixture of sign, exponent and mantissa. The offsets of two 32 bit
// words within double numbers are endian dependent and they are set
@@ -59,7 +59,7 @@ class HeapNumber : public HeapObject {
DECL_CAST(HeapNumber)
V8_EXPORT_PRIVATE void HeapNumberPrint(std::ostream& os);
- OBJECT_CONSTRUCTORS(HeapNumber, HeapObject);
+ OBJECT_CONSTRUCTORS(HeapNumber, PrimitiveHeapObject);
};
} // namespace internal
diff --git a/deps/v8/src/objects/instance-type.h b/deps/v8/src/objects/instance-type.h
index 9a855de95b..f993197204 100644
--- a/deps/v8/src/objects/instance-type.h
+++ b/deps/v8/src/objects/instance-type.h
@@ -80,8 +80,8 @@ static inline bool IsShortcutCandidate(int type) {
enum InstanceType : uint16_t {
// String types.
- INTERNALIZED_STRING_TYPE = kTwoByteStringTag | kSeqStringTag |
- kInternalizedTag, // FIRST_PRIMITIVE_TYPE
+ INTERNALIZED_STRING_TYPE =
+ kTwoByteStringTag | kSeqStringTag | kInternalizedTag,
ONE_BYTE_INTERNALIZED_STRING_TYPE =
kOneByteStringTag | kSeqStringTag | kInternalizedTag,
EXTERNAL_INTERNALIZED_STRING_TYPE =
@@ -116,262 +116,41 @@ enum InstanceType : uint16_t {
THIN_ONE_BYTE_STRING_TYPE =
kOneByteStringTag | kThinStringTag | kNotInternalizedTag,
- // Non-string names
- SYMBOL_TYPE =
- 1 + (kIsNotInternalizedMask | kUncachedExternalStringMask |
- kStringEncodingMask |
- kStringRepresentationMask), // FIRST_NONSTRING_TYPE, LAST_NAME_TYPE
-
- // Other primitives (cannot contain non-map-word pointers to heap objects).
- HEAP_NUMBER_TYPE,
- BIGINT_TYPE,
- ODDBALL_TYPE, // LAST_PRIMITIVE_TYPE
-
- // Objects allocated in their own spaces (never in new space).
- MAP_TYPE,
- CODE_TYPE,
-
- // "Data", objects that cannot contain non-map-word pointers to heap
- // objects.
- FOREIGN_TYPE,
- BYTE_ARRAY_TYPE,
- BYTECODE_ARRAY_TYPE,
- FREE_SPACE_TYPE,
- FIXED_DOUBLE_ARRAY_TYPE,
- FEEDBACK_METADATA_TYPE,
- FILLER_TYPE, // LAST_DATA_TYPE
-
- // Structs.
- ACCESS_CHECK_INFO_TYPE,
- ACCESSOR_INFO_TYPE,
- ACCESSOR_PAIR_TYPE,
- ALIASED_ARGUMENTS_ENTRY_TYPE,
- ALLOCATION_MEMENTO_TYPE,
- ARRAY_BOILERPLATE_DESCRIPTION_TYPE,
- ASM_WASM_DATA_TYPE,
- ASYNC_GENERATOR_REQUEST_TYPE,
- CLASS_POSITIONS_TYPE,
- DEBUG_INFO_TYPE,
- ENUM_CACHE_TYPE,
- FUNCTION_TEMPLATE_INFO_TYPE,
- FUNCTION_TEMPLATE_RARE_DATA_TYPE,
- INTERCEPTOR_INFO_TYPE,
- INTERPRETER_DATA_TYPE,
- OBJECT_TEMPLATE_INFO_TYPE,
- PROMISE_CAPABILITY_TYPE,
- PROMISE_REACTION_TYPE,
- PROTOTYPE_INFO_TYPE,
- SCRIPT_TYPE,
- SOURCE_POSITION_TABLE_WITH_FRAME_CACHE_TYPE,
- SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE,
- STACK_FRAME_INFO_TYPE,
- STACK_TRACE_FRAME_TYPE,
- TEMPLATE_OBJECT_DESCRIPTION_TYPE,
- TUPLE2_TYPE,
- TUPLE3_TYPE,
- WASM_CAPI_FUNCTION_DATA_TYPE,
- WASM_DEBUG_INFO_TYPE,
- WASM_EXCEPTION_TAG_TYPE,
- WASM_EXPORTED_FUNCTION_DATA_TYPE,
- WASM_INDIRECT_FUNCTION_TABLE_TYPE,
- WASM_JS_FUNCTION_DATA_TYPE,
-
- CALLABLE_TASK_TYPE, // FIRST_MICROTASK_TYPE
- CALLBACK_TASK_TYPE,
- PROMISE_FULFILL_REACTION_JOB_TASK_TYPE,
- PROMISE_REJECT_REACTION_JOB_TASK_TYPE,
- PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, // LAST_MICROTASK_TYPE
-
-#define MAKE_TORQUE_INSTANCE_TYPE(V) V,
- TORQUE_DEFINED_INSTANCE_TYPES(MAKE_TORQUE_INSTANCE_TYPE)
+// Most instance types are defined in Torque, with the exception of the string
+// types above. They are ordered by inheritance hierarchy so that we can easily
+// use range checks to determine whether an object is an instance of a subclass
+// of any type. There are a few more constraints specified in the Torque type
+// definitions:
+// - Some instance types are exposed in v8.h, so they are locked to specific
+// values to not unnecessarily change the ABI.
+// - JSSpecialObject and JSCustomElementsObject are aligned with the beginning
+// of the JSObject range, so that we can use a larger range check from
+// FIRST_JS_RECEIVER_TYPE to the end of those ranges and include JSProxy too.
+// - JSFunction is last, meaning we can use a single inequality check to
+// determine whether an instance type is within the range for any class in the
+// inheritance hierarchy of JSFunction. This includes commonly-checked classes
+// JSObject and JSReceiver.
+#define MAKE_TORQUE_INSTANCE_TYPE(TYPE, value) TYPE = value,
+ TORQUE_ASSIGNED_INSTANCE_TYPES(MAKE_TORQUE_INSTANCE_TYPE)
#undef MAKE_TORQUE_INSTANCE_TYPE
- // Modules
- SOURCE_TEXT_MODULE_TYPE, // FIRST_MODULE_TYPE
- SYNTHETIC_MODULE_TYPE, // LAST_MODULE_TYPE
-
- ALLOCATION_SITE_TYPE,
- EMBEDDER_DATA_ARRAY_TYPE,
- // FixedArrays.
- FIXED_ARRAY_TYPE, // FIRST_FIXED_ARRAY_TYPE
- OBJECT_BOILERPLATE_DESCRIPTION_TYPE,
- CLOSURE_FEEDBACK_CELL_ARRAY_TYPE,
- HASH_TABLE_TYPE, // FIRST_HASH_TABLE_TYPE
- ORDERED_HASH_MAP_TYPE,
- ORDERED_HASH_SET_TYPE,
- ORDERED_NAME_DICTIONARY_TYPE,
- NAME_DICTIONARY_TYPE,
- GLOBAL_DICTIONARY_TYPE,
- NUMBER_DICTIONARY_TYPE,
- SIMPLE_NUMBER_DICTIONARY_TYPE,
- STRING_TABLE_TYPE,
- EPHEMERON_HASH_TABLE_TYPE, // LAST_HASH_TABLE_TYPE
- SCOPE_INFO_TYPE,
- SCRIPT_CONTEXT_TABLE_TYPE, // LAST_FIXED_ARRAY_TYPE,
-
- // Contexts.
- AWAIT_CONTEXT_TYPE, // FIRST_CONTEXT_TYPE
- BLOCK_CONTEXT_TYPE,
- CATCH_CONTEXT_TYPE,
- DEBUG_EVALUATE_CONTEXT_TYPE,
- EVAL_CONTEXT_TYPE,
- FUNCTION_CONTEXT_TYPE,
- MODULE_CONTEXT_TYPE,
- NATIVE_CONTEXT_TYPE,
- SCRIPT_CONTEXT_TYPE,
- WITH_CONTEXT_TYPE, // LAST_CONTEXT_TYPE
-
- WEAK_FIXED_ARRAY_TYPE, // FIRST_WEAK_FIXED_ARRAY_TYPE
- TRANSITION_ARRAY_TYPE, // LAST_WEAK_FIXED_ARRAY_TYPE
-
- // Misc.
- CALL_HANDLER_INFO_TYPE,
- CELL_TYPE,
- CODE_DATA_CONTAINER_TYPE,
- DESCRIPTOR_ARRAY_TYPE,
- FEEDBACK_CELL_TYPE,
- FEEDBACK_VECTOR_TYPE,
- LOAD_HANDLER_TYPE,
- PREPARSE_DATA_TYPE,
- PROPERTY_ARRAY_TYPE,
- PROPERTY_CELL_TYPE,
- SHARED_FUNCTION_INFO_TYPE,
- SMALL_ORDERED_HASH_MAP_TYPE,
- SMALL_ORDERED_HASH_SET_TYPE,
- SMALL_ORDERED_NAME_DICTIONARY_TYPE,
- STORE_HANDLER_TYPE,
- UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
- UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
- WEAK_ARRAY_LIST_TYPE,
- WEAK_CELL_TYPE,
-
- // All the following types are subtypes of JSReceiver, which corresponds to
- // objects in the JS sense. The first and the last type in this range are
- // the two forms of function. This organization enables using the same
- // compares for checking the JS_RECEIVER and the NONCALLABLE_JS_OBJECT range.
- // Some of the following instance types are exposed in v8.h, so to not
- // unnecessarily change the ABI when we introduce new instance types in the
- // future, we leave some space between instance types.
- JS_PROXY_TYPE = 0x0400, // FIRST_JS_RECEIVER_TYPE
- JS_GLOBAL_OBJECT_TYPE, // FIRST_JS_OBJECT_TYPE
- JS_GLOBAL_PROXY_TYPE,
- JS_MODULE_NAMESPACE_TYPE,
- // Like JS_API_OBJECT_TYPE, but requires access checks and/or has
- // interceptors.
- JS_SPECIAL_API_OBJECT_TYPE = 0x0410, // LAST_SPECIAL_RECEIVER_TYPE
- JS_PRIMITIVE_WRAPPER_TYPE, // LAST_CUSTOM_ELEMENTS_RECEIVER
- // Like JS_OBJECT_TYPE, but created from API function.
- JS_API_OBJECT_TYPE = 0x0420,
- JS_OBJECT_TYPE,
- JS_ARGUMENTS_TYPE,
- JS_ARRAY_BUFFER_TYPE,
- JS_ARRAY_ITERATOR_TYPE,
- JS_ARRAY_TYPE,
- JS_ASYNC_FROM_SYNC_ITERATOR_TYPE,
- JS_ASYNC_FUNCTION_OBJECT_TYPE,
- JS_ASYNC_GENERATOR_OBJECT_TYPE,
- JS_CONTEXT_EXTENSION_OBJECT_TYPE,
- JS_DATE_TYPE,
- JS_ERROR_TYPE,
- JS_GENERATOR_OBJECT_TYPE,
- JS_MAP_TYPE,
- JS_MAP_KEY_ITERATOR_TYPE,
- JS_MAP_KEY_VALUE_ITERATOR_TYPE,
- JS_MAP_VALUE_ITERATOR_TYPE,
- JS_MESSAGE_OBJECT_TYPE,
- JS_PROMISE_TYPE,
- JS_REGEXP_TYPE,
- JS_REGEXP_STRING_ITERATOR_TYPE,
- JS_SET_TYPE,
- JS_SET_KEY_VALUE_ITERATOR_TYPE,
- JS_SET_VALUE_ITERATOR_TYPE,
- JS_STRING_ITERATOR_TYPE,
- JS_WEAK_REF_TYPE,
- JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE,
- JS_FINALIZATION_GROUP_TYPE,
- JS_WEAK_MAP_TYPE,
- JS_WEAK_SET_TYPE,
-
- JS_TYPED_ARRAY_TYPE,
- JS_DATA_VIEW_TYPE,
-
-#ifdef V8_INTL_SUPPORT
- JS_INTL_V8_BREAK_ITERATOR_TYPE,
- JS_INTL_COLLATOR_TYPE,
- JS_INTL_DATE_TIME_FORMAT_TYPE,
- JS_INTL_LIST_FORMAT_TYPE,
- JS_INTL_LOCALE_TYPE,
- JS_INTL_NUMBER_FORMAT_TYPE,
- JS_INTL_PLURAL_RULES_TYPE,
- JS_INTL_RELATIVE_TIME_FORMAT_TYPE,
- JS_INTL_SEGMENT_ITERATOR_TYPE,
- JS_INTL_SEGMENTER_TYPE,
-#endif // V8_INTL_SUPPORT
-
- WASM_EXCEPTION_TYPE,
- WASM_GLOBAL_TYPE,
- WASM_INSTANCE_TYPE,
- WASM_MEMORY_TYPE,
- WASM_MODULE_TYPE,
- WASM_TABLE_TYPE,
- JS_BOUND_FUNCTION_TYPE,
- JS_FUNCTION_TYPE, // LAST_JS_OBJECT_TYPE, LAST_JS_RECEIVER_TYPE
-
// Pseudo-types
- FIRST_TYPE = 0x0,
- LAST_TYPE = JS_FUNCTION_TYPE,
- FIRST_STRING_TYPE = FIRST_TYPE,
- FIRST_NAME_TYPE = FIRST_STRING_TYPE,
- LAST_NAME_TYPE = SYMBOL_TYPE,
FIRST_UNIQUE_NAME_TYPE = INTERNALIZED_STRING_TYPE,
LAST_UNIQUE_NAME_TYPE = SYMBOL_TYPE,
FIRST_NONSTRING_TYPE = SYMBOL_TYPE,
- FIRST_PRIMITIVE_TYPE = FIRST_NAME_TYPE,
- LAST_PRIMITIVE_TYPE = ODDBALL_TYPE,
- FIRST_FUNCTION_TYPE = JS_BOUND_FUNCTION_TYPE,
- LAST_FUNCTION_TYPE = JS_FUNCTION_TYPE,
- // Boundaries for testing if given HeapObject is a subclass of FixedArray.
- FIRST_FIXED_ARRAY_TYPE = FIXED_ARRAY_TYPE,
- LAST_FIXED_ARRAY_TYPE = SCRIPT_CONTEXT_TABLE_TYPE,
- // Boundaries for testing if given HeapObject is a subclass of HashTable
- FIRST_HASH_TABLE_TYPE = HASH_TABLE_TYPE,
- LAST_HASH_TABLE_TYPE = EPHEMERON_HASH_TABLE_TYPE,
- // Boundaries for testing if given HeapObject is a subclass of WeakFixedArray.
- FIRST_WEAK_FIXED_ARRAY_TYPE = WEAK_FIXED_ARRAY_TYPE,
- LAST_WEAK_FIXED_ARRAY_TYPE = TRANSITION_ARRAY_TYPE,
- // Boundaries for testing if given HeapObject is a Context
- FIRST_CONTEXT_TYPE = AWAIT_CONTEXT_TYPE,
- LAST_CONTEXT_TYPE = WITH_CONTEXT_TYPE,
- // Boundaries for testing if given HeapObject is a subclass of Microtask.
- FIRST_MICROTASK_TYPE = CALLABLE_TASK_TYPE,
- LAST_MICROTASK_TYPE = PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE,
- // Boundaries of module record types
- FIRST_MODULE_TYPE = SOURCE_TEXT_MODULE_TYPE,
- LAST_MODULE_TYPE = SYNTHETIC_MODULE_TYPE,
- // Boundary for promotion to old space.
- LAST_DATA_TYPE = FILLER_TYPE,
- // Boundary for objects represented as JSReceiver (i.e. JSObject or JSProxy).
- // Note that there is no range for JSObject or JSProxy, since their subtypes
- // are not continuous in this enum! The enum ranges instead reflect the
- // external class names, where proxies are treated as either ordinary objects,
- // or functions.
- FIRST_JS_RECEIVER_TYPE = JS_PROXY_TYPE,
- LAST_JS_RECEIVER_TYPE = LAST_TYPE,
- // Boundaries for testing the types represented as JSObject
- FIRST_JS_OBJECT_TYPE = JS_GLOBAL_OBJECT_TYPE,
- LAST_JS_OBJECT_TYPE = LAST_TYPE,
// Boundary for testing JSReceivers that need special property lookup handling
- LAST_SPECIAL_RECEIVER_TYPE = JS_SPECIAL_API_OBJECT_TYPE,
+ LAST_SPECIAL_RECEIVER_TYPE = LAST_JS_SPECIAL_OBJECT_TYPE,
// Boundary case for testing JSReceivers that may have elements while having
// an empty fixed array as elements backing store. This is true for string
// wrappers.
- LAST_CUSTOM_ELEMENTS_RECEIVER = JS_PRIMITIVE_WRAPPER_TYPE,
-
- FIRST_SET_ITERATOR_TYPE = JS_SET_KEY_VALUE_ITERATOR_TYPE,
- LAST_SET_ITERATOR_TYPE = JS_SET_VALUE_ITERATOR_TYPE,
-
- FIRST_MAP_ITERATOR_TYPE = JS_MAP_KEY_ITERATOR_TYPE,
- LAST_MAP_ITERATOR_TYPE = JS_MAP_VALUE_ITERATOR_TYPE,
+ LAST_CUSTOM_ELEMENTS_RECEIVER = LAST_JS_CUSTOM_ELEMENTS_OBJECT_TYPE,
+
+ // Convenient names for things where the generated name is awkward:
+ FIRST_TYPE = FIRST_HEAP_OBJECT_TYPE,
+ LAST_TYPE = LAST_HEAP_OBJECT_TYPE,
+ FIRST_FUNCTION_TYPE = FIRST_JS_FUNCTION_OR_BOUND_FUNCTION_TYPE,
+ LAST_FUNCTION_TYPE = LAST_JS_FUNCTION_OR_BOUND_FUNCTION_TYPE,
+ BIGINT_TYPE = BIG_INT_BASE_TYPE,
};
// This constant is defined outside of the InstanceType enum because the
@@ -389,6 +168,40 @@ STATIC_ASSERT(FIRST_NONSTRING_TYPE == Internals::kFirstNonstringType);
STATIC_ASSERT(ODDBALL_TYPE == Internals::kOddballType);
STATIC_ASSERT(FOREIGN_TYPE == Internals::kForeignType);
+// Verify that string types are all less than other types.
+#define CHECK_STRING_RANGE(TYPE, ...) \
+ STATIC_ASSERT(TYPE < FIRST_NONSTRING_TYPE);
+STRING_TYPE_LIST(CHECK_STRING_RANGE)
+#undef CHECK_STRING_RANGE
+#define CHECK_NONSTRING_RANGE(TYPE) STATIC_ASSERT(TYPE >= FIRST_NONSTRING_TYPE);
+TORQUE_ASSIGNED_INSTANCE_TYPE_LIST(CHECK_NONSTRING_RANGE)
+#undef CHECK_NONSTRING_RANGE
+
+// Two ranges don't cleanly follow the inheritance hierarchy. Here we ensure
+// that only expected types fall within these ranges.
+// - From FIRST_JS_RECEIVER_TYPE to LAST_SPECIAL_RECEIVER_TYPE should correspond
+// to the union type JSProxy | JSSpecialObject.
+// - From FIRST_JS_RECEIVER_TYPE to LAST_CUSTOM_ELEMENTS_RECEIVER should
+// correspond to the union type JSProxy | JSCustomElementsObject.
+// Note in particular that these ranges include all subclasses of JSReceiver
+// that are not also subclasses of JSObject (currently only JSProxy).
+#define CHECK_INSTANCE_TYPE(TYPE) \
+ STATIC_ASSERT((TYPE >= FIRST_JS_RECEIVER_TYPE && \
+ TYPE <= LAST_SPECIAL_RECEIVER_TYPE) == \
+ (TYPE == JS_PROXY_TYPE || TYPE == JS_GLOBAL_OBJECT_TYPE || \
+ TYPE == JS_GLOBAL_PROXY_TYPE || \
+ TYPE == JS_MODULE_NAMESPACE_TYPE || \
+ TYPE == JS_SPECIAL_API_OBJECT_TYPE)); \
+ STATIC_ASSERT((TYPE >= FIRST_JS_RECEIVER_TYPE && \
+ TYPE <= LAST_CUSTOM_ELEMENTS_RECEIVER) == \
+ (TYPE == JS_PROXY_TYPE || TYPE == JS_GLOBAL_OBJECT_TYPE || \
+ TYPE == JS_GLOBAL_PROXY_TYPE || \
+ TYPE == JS_MODULE_NAMESPACE_TYPE || \
+ TYPE == JS_SPECIAL_API_OBJECT_TYPE || \
+ TYPE == JS_PRIMITIVE_WRAPPER_TYPE));
+TORQUE_ASSIGNED_INSTANCE_TYPE_LIST(CHECK_INSTANCE_TYPE)
+#undef CHECK_INSTANCE_TYPE
+
// Make sure it doesn't matter whether we sign-extend or zero-extend these
// values, because Torque treats InstanceType as signed.
STATIC_ASSERT(LAST_TYPE < 1 << 15);
@@ -424,8 +237,8 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
V(FreeSpace, FREE_SPACE_TYPE) \
V(GlobalDictionary, GLOBAL_DICTIONARY_TYPE) \
V(HeapNumber, HEAP_NUMBER_TYPE) \
- V(JSArgumentsObject, JS_ARGUMENTS_TYPE) \
- V(JSArgumentsObjectWithLength, JS_ARGUMENTS_TYPE) \
+ V(JSArgumentsObject, JS_ARGUMENTS_OBJECT_TYPE) \
+ V(JSArgumentsObjectWithLength, JS_ARGUMENTS_OBJECT_TYPE) \
V(JSArray, JS_ARRAY_TYPE) \
V(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE) \
V(JSArrayIterator, JS_ARRAY_ITERATOR_TYPE) \
@@ -449,9 +262,10 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
V(JSPrimitiveWrapper, JS_PRIMITIVE_WRAPPER_TYPE) \
V(JSPromise, JS_PROMISE_TYPE) \
V(JSProxy, JS_PROXY_TYPE) \
- V(JSRegExp, JS_REGEXP_TYPE) \
+ V(JSRegExp, JS_REG_EXP_TYPE) \
V(JSRegExpResult, JS_ARRAY_TYPE) \
- V(JSRegExpStringIterator, JS_REGEXP_STRING_ITERATOR_TYPE) \
+ V(JSRegExpResultIndices, JS_ARRAY_TYPE) \
+ V(JSRegExpStringIterator, JS_REG_EXP_STRING_ITERATOR_TYPE) \
V(JSSet, JS_SET_TYPE) \
V(JSStringIterator, JS_STRING_ITERATOR_TYPE) \
V(JSTypedArray, JS_TYPED_ARRAY_TYPE) \
@@ -487,28 +301,28 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
V(UncompiledDataWithoutPreparseData, \
UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE) \
V(UncompiledDataWithPreparseData, UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE) \
- V(WasmExceptionObject, WASM_EXCEPTION_TYPE) \
- V(WasmGlobalObject, WASM_GLOBAL_TYPE) \
- V(WasmInstanceObject, WASM_INSTANCE_TYPE) \
- V(WasmMemoryObject, WASM_MEMORY_TYPE) \
- V(WasmModuleObject, WASM_MODULE_TYPE) \
- V(WasmTableObject, WASM_TABLE_TYPE) \
+ V(WasmExceptionObject, WASM_EXCEPTION_OBJECT_TYPE) \
+ V(WasmGlobalObject, WASM_GLOBAL_OBJECT_TYPE) \
+ V(WasmInstanceObject, WASM_INSTANCE_OBJECT_TYPE) \
+ V(WasmMemoryObject, WASM_MEMORY_OBJECT_TYPE) \
+ V(WasmModuleObject, WASM_MODULE_OBJECT_TYPE) \
+ V(WasmTableObject, WASM_TABLE_OBJECT_TYPE) \
V(WeakArrayList, WEAK_ARRAY_LIST_TYPE) \
V(WeakCell, WEAK_CELL_TYPE)
#ifdef V8_INTL_SUPPORT
-#define INSTANCE_TYPE_CHECKERS_SINGLE(V) \
- INSTANCE_TYPE_CHECKERS_SINGLE_BASE(V) \
- V(JSV8BreakIterator, JS_INTL_V8_BREAK_ITERATOR_TYPE) \
- V(JSCollator, JS_INTL_COLLATOR_TYPE) \
- V(JSDateTimeFormat, JS_INTL_DATE_TIME_FORMAT_TYPE) \
- V(JSListFormat, JS_INTL_LIST_FORMAT_TYPE) \
- V(JSLocale, JS_INTL_LOCALE_TYPE) \
- V(JSNumberFormat, JS_INTL_NUMBER_FORMAT_TYPE) \
- V(JSPluralRules, JS_INTL_PLURAL_RULES_TYPE) \
- V(JSRelativeTimeFormat, JS_INTL_RELATIVE_TIME_FORMAT_TYPE) \
- V(JSSegmentIterator, JS_INTL_SEGMENT_ITERATOR_TYPE) \
- V(JSSegmenter, JS_INTL_SEGMENTER_TYPE)
+#define INSTANCE_TYPE_CHECKERS_SINGLE(V) \
+ INSTANCE_TYPE_CHECKERS_SINGLE_BASE(V) \
+ V(JSV8BreakIterator, JS_V8_BREAK_ITERATOR_TYPE) \
+ V(JSCollator, JS_COLLATOR_TYPE) \
+ V(JSDateTimeFormat, JS_DATE_TIME_FORMAT_TYPE) \
+ V(JSListFormat, JS_LIST_FORMAT_TYPE) \
+ V(JSLocale, JS_LOCALE_TYPE) \
+ V(JSNumberFormat, JS_NUMBER_FORMAT_TYPE) \
+ V(JSPluralRules, JS_PLURAL_RULES_TYPE) \
+ V(JSRelativeTimeFormat, JS_RELATIVE_TIME_FORMAT_TYPE) \
+ V(JSSegmentIterator, JS_SEGMENT_ITERATOR_TYPE) \
+ V(JSSegmenter, JS_SEGMENTER_TYPE)
#else
@@ -516,16 +330,23 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
#endif // V8_INTL_SUPPORT
-#define INSTANCE_TYPE_CHECKERS_RANGE(V) \
- V(Context, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE) \
- V(FixedArray, FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE) \
- V(HashTable, FIRST_HASH_TABLE_TYPE, LAST_HASH_TABLE_TYPE) \
- V(JSMapIterator, FIRST_MAP_ITERATOR_TYPE, LAST_MAP_ITERATOR_TYPE) \
- V(JSSetIterator, FIRST_SET_ITERATOR_TYPE, LAST_SET_ITERATOR_TYPE) \
- V(Microtask, FIRST_MICROTASK_TYPE, LAST_MICROTASK_TYPE) \
- V(Module, FIRST_MODULE_TYPE, LAST_MODULE_TYPE) \
- V(Name, FIRST_NAME_TYPE, LAST_NAME_TYPE) \
- V(String, FIRST_STRING_TYPE, LAST_STRING_TYPE) \
+#define INSTANCE_TYPE_CHECKERS_RANGE(V) \
+ V(Context, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE) \
+ V(FixedArray, FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE) \
+ V(HashTable, FIRST_HASH_TABLE_TYPE, LAST_HASH_TABLE_TYPE) \
+ V(JSCustomElementsObject, FIRST_JS_CUSTOM_ELEMENTS_OBJECT_TYPE, \
+ LAST_JS_CUSTOM_ELEMENTS_OBJECT_TYPE) \
+ V(JSFunctionOrBoundFunction, FIRST_FUNCTION_TYPE, LAST_FUNCTION_TYPE) \
+ V(JSMapIterator, FIRST_JS_MAP_ITERATOR_TYPE, LAST_JS_MAP_ITERATOR_TYPE) \
+ V(JSSetIterator, FIRST_JS_SET_ITERATOR_TYPE, LAST_JS_SET_ITERATOR_TYPE) \
+ V(JSSpecialObject, FIRST_JS_SPECIAL_OBJECT_TYPE, \
+ LAST_JS_SPECIAL_OBJECT_TYPE) \
+ V(Microtask, FIRST_MICROTASK_TYPE, LAST_MICROTASK_TYPE) \
+ V(Module, FIRST_MODULE_TYPE, LAST_MODULE_TYPE) \
+ V(Name, FIRST_NAME_TYPE, LAST_NAME_TYPE) \
+ V(PrimitiveHeapObject, FIRST_PRIMITIVE_HEAP_OBJECT_TYPE, \
+ LAST_PRIMITIVE_HEAP_OBJECT_TYPE) \
+ V(String, FIRST_STRING_TYPE, LAST_STRING_TYPE) \
V(WeakFixedArray, FIRST_WEAK_FIXED_ARRAY_TYPE, LAST_WEAK_FIXED_ARRAY_TYPE)
#define INSTANCE_TYPE_CHECKERS_CUSTOM(V) \
diff --git a/deps/v8/src/objects/internal-index.h b/deps/v8/src/objects/internal-index.h
new file mode 100644
index 0000000000..ce7378a901
--- /dev/null
+++ b/deps/v8/src/objects/internal-index.h
@@ -0,0 +1,79 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_INTERNAL_INDEX_H_
+#define V8_OBJECTS_INTERNAL_INDEX_H_
+
+#include <stdint.h>
+
+#include <limits>
+
+#include "src/base/logging.h"
+
+namespace v8 {
+namespace internal {
+
+// Simple wrapper around an entry (which is notably different from "index" for
+// dictionary backing stores). Most code should treat this as an opaque
+// wrapper: get it via GetEntryForIndex, pass it on to consumers.
+class InternalIndex {
+ public:
+ explicit InternalIndex(size_t raw) : entry_(raw) {}
+ static InternalIndex NotFound() { return InternalIndex(kNotFound); }
+
+ InternalIndex adjust_down(size_t subtract) {
+ DCHECK_GE(entry_, subtract);
+ return InternalIndex(entry_ - subtract);
+ }
+ InternalIndex adjust_up(size_t add) {
+ DCHECK_LT(entry_, std::numeric_limits<size_t>::max() - add);
+ return InternalIndex(entry_ + add);
+ }
+
+ bool is_found() const { return entry_ != kNotFound; }
+ bool is_not_found() const { return entry_ == kNotFound; }
+
+ size_t raw_value() const { return entry_; }
+ uint32_t as_uint32() const {
+ DCHECK_LE(entry_, std::numeric_limits<uint32_t>::max());
+ return static_cast<uint32_t>(entry_);
+ }
+ int as_int() const {
+ DCHECK(entry_ >= 0 && entry_ <= std::numeric_limits<int>::max());
+ return static_cast<int>(entry_);
+ }
+
+ bool operator==(const InternalIndex& other) { return entry_ == other.entry_; }
+
+ // Iteration support.
+ InternalIndex operator*() { return *this; }
+ bool operator!=(const InternalIndex& other) { return entry_ != other.entry_; }
+ InternalIndex& operator++() {
+ entry_++;
+ return *this;
+ }
+
+ class Range {
+ public:
+ explicit Range(size_t max) : min_(0), max_(max) {}
+ Range(size_t min, size_t max) : min_(min), max_(max) {}
+
+ InternalIndex begin() { return InternalIndex(min_); }
+ InternalIndex end() { return InternalIndex(max_); }
+
+ private:
+ size_t min_;
+ size_t max_;
+ };
+
+ private:
+ static const size_t kNotFound = std::numeric_limits<size_t>::max();
+
+ size_t entry_;
+};
+
+} // namespace internal
+} // namespace v8
+
+#endif // V8_OBJECTS_INTERNAL_INDEX_H_
diff --git a/deps/v8/src/objects/intl-objects.cc b/deps/v8/src/objects/intl-objects.cc
index dbf212aaf8..a6a2fdd229 100644
--- a/deps/v8/src/objects/intl-objects.cc
+++ b/deps/v8/src/objects/intl-objects.cc
@@ -20,6 +20,7 @@
#include "src/objects/js-collator-inl.h"
#include "src/objects/js-date-time-format-inl.h"
#include "src/objects/js-locale-inl.h"
+#include "src/objects/js-locale.h"
#include "src/objects/js-number-format-inl.h"
#include "src/objects/objects-inl.h"
#include "src/objects/property-descriptor.h"
@@ -32,6 +33,7 @@
#include "unicode/datefmt.h"
#include "unicode/decimfmt.h"
#include "unicode/formattedvalue.h"
+#include "unicode/localebuilder.h"
#include "unicode/locid.h"
#include "unicode/normalizer2.h"
#include "unicode/numberformatter.h"
@@ -177,12 +179,13 @@ const UChar* GetUCharBufferFromFlat(const String::FlatContent& flat,
template <typename T>
MaybeHandle<T> New(Isolate* isolate, Handle<JSFunction> constructor,
- Handle<Object> locales, Handle<Object> options) {
+ Handle<Object> locales, Handle<Object> options,
+ const char* method) {
Handle<Map> map;
ASSIGN_RETURN_ON_EXCEPTION(
isolate, map,
JSFunction::GetDerivedMap(isolate, constructor, constructor), T);
- return T::New(isolate, map, locales, options);
+ return T::New(isolate, map, locales, options, method);
}
} // namespace
@@ -783,6 +786,11 @@ Maybe<std::string> Intl::CanonicalizeLanguageTag(Isolate* isolate,
}
std::string locale(locale_str->ToCString().get());
+ if (!IsStructurallyValidLanguageTag(locale)) {
+ THROW_NEW_ERROR_RETURN_VALUE(
+ isolate, NewRangeError(MessageTemplate::kLocaleBadParameters),
+ Nothing<std::string>());
+ }
return Intl::CanonicalizeLanguageTag(isolate, locale);
}
@@ -995,11 +1003,9 @@ MaybeHandle<String> Intl::StringLocaleConvertCase(Isolate* isolate,
}
}
-MaybeHandle<Object> Intl::StringLocaleCompare(Isolate* isolate,
- Handle<String> string1,
- Handle<String> string2,
- Handle<Object> locales,
- Handle<Object> options) {
+MaybeHandle<Object> Intl::StringLocaleCompare(
+ Isolate* isolate, Handle<String> string1, Handle<String> string2,
+ Handle<Object> locales, Handle<Object> options, const char* method) {
// We only cache the instance when both locales and options are undefined,
// as that is the only case when the specified side-effects of examining
// those arguments are unobservable.
@@ -1025,7 +1031,7 @@ MaybeHandle<Object> Intl::StringLocaleCompare(Isolate* isolate,
Handle<JSCollator> collator;
ASSIGN_RETURN_ON_EXCEPTION(
isolate, collator,
- New<JSCollator>(isolate, constructor, locales, options), Object);
+ New<JSCollator>(isolate, constructor, locales, options, method), Object);
if (can_cache) {
isolate->set_icu_object_in_cache(
Isolate::ICUObjectCacheType::kDefaultCollator,
@@ -1084,15 +1090,11 @@ Handle<Object> Intl::CompareStrings(Isolate* isolate,
MaybeHandle<String> Intl::NumberToLocaleString(Isolate* isolate,
Handle<Object> num,
Handle<Object> locales,
- Handle<Object> options) {
+ Handle<Object> options,
+ const char* method) {
Handle<Object> numeric_obj;
- if (FLAG_harmony_intl_bigint) {
- ASSIGN_RETURN_ON_EXCEPTION(isolate, numeric_obj,
- Object::ToNumeric(isolate, num), String);
- } else {
- ASSIGN_RETURN_ON_EXCEPTION(isolate, numeric_obj,
- Object::ToNumber(isolate, num), String);
- }
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, numeric_obj,
+ Object::ToNumeric(isolate, num), String);
// We only cache the instance when both locales and options are undefined,
// as that is the only case when the specified side-effects of examining
@@ -1119,7 +1121,8 @@ MaybeHandle<String> Intl::NumberToLocaleString(Isolate* isolate,
// 2. Let numberFormat be ? Construct(%NumberFormat%, « locales, options »).
ASSIGN_RETURN_ON_EXCEPTION(
isolate, number_format,
- New<JSNumberFormat>(isolate, constructor, locales, options), String);
+ New<JSNumberFormat>(isolate, constructor, locales, options, method),
+ String);
if (can_cache) {
isolate->set_icu_object_in_cache(
@@ -1203,40 +1206,18 @@ Maybe<Intl::NumberFormatDigitOptions> Intl::SetNumberFormatDigitOptions(
int mxfd = 0;
Handle<Object> mnfd_obj;
Handle<Object> mxfd_obj;
- if (FLAG_harmony_intl_numberformat_unified) {
- // 6. Let mnfd be ? Get(options, "minimumFractionDigits").
- Handle<String> mnfd_str = factory->minimumFractionDigits_string();
- ASSIGN_RETURN_ON_EXCEPTION_VALUE(
- isolate, mnfd_obj, JSReceiver::GetProperty(isolate, options, mnfd_str),
- Nothing<NumberFormatDigitOptions>());
-
- // 8. Let mnfd be ? Get(options, "maximumFractionDigits").
- Handle<String> mxfd_str = factory->maximumFractionDigits_string();
- ASSIGN_RETURN_ON_EXCEPTION_VALUE(
- isolate, mxfd_obj, JSReceiver::GetProperty(isolate, options, mxfd_str),
- Nothing<NumberFormatDigitOptions>());
- } else {
- // 6. Let mnfd be ? GetNumberOption(options, "minimumFractionDigits", 0, 20,
- // mnfdDefault).
- if (!Intl::GetNumberOption(isolate, options,
- factory->minimumFractionDigits_string(), 0, 20,
- mnfd_default)
- .To(&mnfd)) {
- return Nothing<NumberFormatDigitOptions>();
- }
- // 7. Let mxfdActualDefault be max( mnfd, mxfdDefault ).
- int mxfd_actual_default = std::max(mnfd, mxfd_default);
+ // 6. Let mnfd be ? Get(options, "minimumFractionDigits").
+ Handle<String> mnfd_str = factory->minimumFractionDigits_string();
+ ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+ isolate, mnfd_obj, JSReceiver::GetProperty(isolate, options, mnfd_str),
+ Nothing<NumberFormatDigitOptions>());
- // 8. Let mxfd be ? GetNumberOption(options,
- // "maximumFractionDigits", mnfd, 20, mxfdActualDefault).
- if (!Intl::GetNumberOption(isolate, options,
- factory->maximumFractionDigits_string(), mnfd,
- 20, mxfd_actual_default)
- .To(&mxfd)) {
- return Nothing<NumberFormatDigitOptions>();
- }
- }
+ // 8. Let mxfd be ? Get(options, "maximumFractionDigits").
+ Handle<String> mxfd_str = factory->maximumFractionDigits_string();
+ ASSIGN_RETURN_ON_EXCEPTION_VALUE(
+ isolate, mxfd_obj, JSReceiver::GetProperty(isolate, options, mxfd_str),
+ Nothing<NumberFormatDigitOptions>());
// 9. Let mnsd be ? Get(options, "minimumSignificantDigits").
Handle<Object> mnsd_obj;
@@ -1285,47 +1266,44 @@ Maybe<Intl::NumberFormatDigitOptions> Intl::SetNumberFormatDigitOptions(
digit_options.minimum_significant_digits = 0;
digit_options.maximum_significant_digits = 0;
- if (FLAG_harmony_intl_numberformat_unified) {
- // 15. Else If mnfd is not undefined or mxfd is not undefined, then
- if (!mnfd_obj->IsUndefined(isolate) || !mxfd_obj->IsUndefined(isolate)) {
- // 15. b. Let mnfd be ? DefaultNumberOption(mnfd, 0, 20, mnfdDefault).
- Handle<String> mnfd_str = factory->minimumFractionDigits_string();
- if (!DefaultNumberOption(isolate, mnfd_obj, 0, 20, mnfd_default,
- mnfd_str)
- .To(&mnfd)) {
- return Nothing<NumberFormatDigitOptions>();
- }
-
- // 15. c. Let mxfdActualDefault be max( mnfd, mxfdDefault ).
- int mxfd_actual_default = std::max(mnfd, mxfd_default);
+ // 15. Else If mnfd is not undefined or mxfd is not undefined, then
+ if (!mnfd_obj->IsUndefined(isolate) || !mxfd_obj->IsUndefined(isolate)) {
+ // 15. b. Let mnfd be ? DefaultNumberOption(mnfd, 0, 20, mnfdDefault).
+ Handle<String> mnfd_str = factory->minimumFractionDigits_string();
+ if (!DefaultNumberOption(isolate, mnfd_obj, 0, 20, mnfd_default, mnfd_str)
+ .To(&mnfd)) {
+ return Nothing<NumberFormatDigitOptions>();
+ }
- // 15. d. Let mxfd be ? DefaultNumberOption(mxfd, mnfd, 20,
- // mxfdActualDefault).
- Handle<String> mxfd_str = factory->maximumFractionDigits_string();
- if (!DefaultNumberOption(isolate, mxfd_obj, mnfd, 20,
- mxfd_actual_default, mxfd_str)
- .To(&mxfd)) {
- return Nothing<NumberFormatDigitOptions>();
- }
- // 15. e. Set intlObj.[[MinimumFractionDigits]] to mnfd.
- digit_options.minimum_fraction_digits = mnfd;
-
- // 15. f. Set intlObj.[[MaximumFractionDigits]] to mxfd.
- digit_options.maximum_fraction_digits = mxfd;
- // Else If intlObj.[[Notation]] is "compact", then
- } else if (notation_is_compact) {
- // a. Set intlObj.[[RoundingType]] to "compact-rounding".
- // Set minimum_significant_digits to -1 to represent roundingtype is
- // "compact-rounding".
- digit_options.minimum_significant_digits = -1;
- // 17. Else,
- } else {
- // 17. b. Set intlObj.[[MinimumFractionDigits]] to mnfdDefault.
- digit_options.minimum_fraction_digits = mnfd_default;
+ // 15. c. Let mxfdActualDefault be max( mnfd, mxfdDefault ).
+ int mxfd_actual_default = std::max(mnfd, mxfd_default);
- // 17. c. Set intlObj.[[MaximumFractionDigits]] to mxfdDefault.
- digit_options.maximum_fraction_digits = mxfd_default;
+ // 15. d. Let mxfd be ? DefaultNumberOption(mxfd, mnfd, 20,
+ // mxfdActualDefault).
+ Handle<String> mxfd_str = factory->maximumFractionDigits_string();
+ if (!DefaultNumberOption(isolate, mxfd_obj, mnfd, 20, mxfd_actual_default,
+ mxfd_str)
+ .To(&mxfd)) {
+ return Nothing<NumberFormatDigitOptions>();
}
+ // 15. e. Set intlObj.[[MinimumFractionDigits]] to mnfd.
+ digit_options.minimum_fraction_digits = mnfd;
+
+ // 15. f. Set intlObj.[[MaximumFractionDigits]] to mxfd.
+ digit_options.maximum_fraction_digits = mxfd;
+ // Else If intlObj.[[Notation]] is "compact", then
+ } else if (notation_is_compact) {
+ // a. Set intlObj.[[RoundingType]] to "compact-rounding".
+ // Set minimum_significant_digits to -1 to represent roundingtype is
+ // "compact-rounding".
+ digit_options.minimum_significant_digits = -1;
+ // 17. Else,
+ } else {
+ // 17. b. Set intlObj.[[MinimumFractionDigits]] to mnfdDefault.
+ digit_options.minimum_fraction_digits = mnfd_default;
+
+ // 17. c. Set intlObj.[[MaximumFractionDigits]] to mxfdDefault.
+ digit_options.maximum_fraction_digits = mxfd_default;
}
}
return Just(digit_options);
@@ -1605,14 +1583,16 @@ bool IsValidCollation(const icu::Locale& locale, const std::string& value) {
} // namespace
+bool Intl::IsWellFormedCalendar(const std::string& value) {
+ return JSLocale::Is38AlphaNumList(value);
+}
+
bool Intl::IsValidCalendar(const icu::Locale& locale,
const std::string& value) {
return IsValidExtension<icu::Calendar>(locale, "calendar", value);
}
-namespace {
-
-bool IsValidNumberingSystem(const std::string& value) {
+bool Intl::IsValidNumberingSystem(const std::string& value) {
std::set<std::string> invalid_values = {"native", "traditio", "finance"};
if (invalid_values.find(value) != invalid_values.end()) return false;
UErrorCode status = U_ZERO_ERROR;
@@ -1621,11 +1601,19 @@ bool IsValidNumberingSystem(const std::string& value) {
return U_SUCCESS(status) && numbering_system.get() != nullptr;
}
+namespace {
+
+bool IsWellFormedNumberingSystem(const std::string& value) {
+ return JSLocale::Is38AlphaNumList(value);
+}
+
std::map<std::string, std::string> LookupAndValidateUnicodeExtensions(
icu::Locale* icu_locale, const std::set<std::string>& relevant_keys) {
std::map<std::string, std::string> extensions;
UErrorCode status = U_ZERO_ERROR;
+ icu::LocaleBuilder builder;
+ builder.setLocale(*icu_locale).clearExtensions();
std::unique_ptr<icu::StringEnumeration> keywords(
icu_locale->createKeywords(status));
if (U_FAILURE(status)) return extensions;
@@ -1682,20 +1670,19 @@ std::map<std::string, std::string> LookupAndValidateUnicodeExtensions(
std::set<std::string> valid_values = {"upper", "lower", "false"};
is_valid_value = valid_values.find(bcp47_value) != valid_values.end();
} else if (strcmp("nu", bcp47_key) == 0) {
- is_valid_value = IsValidNumberingSystem(bcp47_value);
+ is_valid_value = Intl::IsValidNumberingSystem(bcp47_value);
}
if (is_valid_value) {
extensions.insert(
std::pair<std::string, std::string>(bcp47_key, bcp47_value));
- continue;
+ builder.setUnicodeLocaleKeyword(bcp47_key, bcp47_value);
}
}
- status = U_ZERO_ERROR;
- icu_locale->setUnicodeKeywordValue(
- bcp47_key == nullptr ? keyword : bcp47_key, nullptr, status);
- CHECK(U_SUCCESS(status));
}
+ status = U_ZERO_ERROR;
+ *icu_locale = builder.build(status);
+
return extensions;
}
@@ -2003,7 +1990,7 @@ Maybe<bool> Intl::GetNumberingSystem(Isolate* isolate,
empty_values, method, result);
MAYBE_RETURN(maybe, Nothing<bool>());
if (maybe.FromJust() && *result != nullptr) {
- if (!IsValidNumberingSystem(result->get())) {
+ if (!IsWellFormedNumberingSystem(result->get())) {
THROW_NEW_ERROR_RETURN_VALUE(
isolate,
NewRangeError(
@@ -2120,5 +2107,9 @@ MaybeHandle<String> Intl::FormattedToString(
return Intl::ToString(isolate, result);
}
+bool Intl::IsStructurallyValidLanguageTag(const std::string& tag) {
+ return JSLocale::StartsWithUnicodeLanguageId(tag);
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/intl-objects.h b/deps/v8/src/objects/intl-objects.h
index 4d4d3245fd..0c4a77b745 100644
--- a/deps/v8/src/objects/intl-objects.h
+++ b/deps/v8/src/objects/intl-objects.h
@@ -10,6 +10,7 @@
#define V8_OBJECTS_INTL_OBJECTS_H_
#include <map>
+#include <memory>
#include <set>
#include <string>
@@ -164,7 +165,7 @@ class Intl {
V8_WARN_UNUSED_RESULT static MaybeHandle<Object> StringLocaleCompare(
Isolate* isolate, Handle<String> s1, Handle<String> s2,
- Handle<Object> locales, Handle<Object> options);
+ Handle<Object> locales, Handle<Object> options, const char* method);
V8_WARN_UNUSED_RESULT static Handle<Object> CompareStrings(
Isolate* isolate, const icu::Collator& collator, Handle<String> s1,
@@ -173,7 +174,7 @@ class Intl {
// ecma402/#sup-properties-of-the-number-prototype-object
V8_WARN_UNUSED_RESULT static MaybeHandle<String> NumberToLocaleString(
Isolate* isolate, Handle<Object> num, Handle<Object> locales,
- Handle<Object> options);
+ Handle<Object> options, const char* method);
// ecma402/#sec-setnfdigitoptions
struct NumberFormatDigitOptions {
@@ -239,14 +240,14 @@ class Intl {
Handle<JSFunction> constructor, bool has_initialized_slot);
// enum for "caseFirst" option: shared by Intl.Locale and Intl.Collator.
- enum class CaseFirst { kUpper, kLower, kFalse, kUndefined };
+ enum class CaseFirst { kUndefined, kUpper, kLower, kFalse };
// Shared function to read the "caseFirst" option.
V8_WARN_UNUSED_RESULT static Maybe<CaseFirst> GetCaseFirst(
Isolate* isolate, Handle<JSReceiver> options, const char* method);
// enum for "hourCycle" option: shared by Intl.Locale and Intl.DateTimeFormat.
- enum class HourCycle { kH11, kH12, kH23, kH24, kUndefined };
+ enum class HourCycle { kUndefined, kH11, kH12, kH23, kH24 };
static HourCycle ToHourCycle(const std::string& str);
@@ -270,6 +271,12 @@ class Intl {
static bool IsValidCalendar(const icu::Locale& locale,
const std::string& value);
+ // Check the numberingSystem is valid.
+ static bool IsValidNumberingSystem(const std::string& value);
+
+ // Check the calendar is well formed.
+ static bool IsWellFormedCalendar(const std::string& value);
+
struct ResolvedLocale {
std::string locale;
icu::Locale icu_locale;
@@ -336,6 +343,8 @@ class Intl {
static const std::set<std::string>& GetAvailableLocalesForLocale();
static const std::set<std::string>& GetAvailableLocalesForDateFormat();
+
+ static bool IsStructurallyValidLanguageTag(const std::string& tag);
};
} // namespace internal
diff --git a/deps/v8/src/objects/js-array-buffer-inl.h b/deps/v8/src/objects/js-array-buffer-inl.h
index 9151be6da4..4ed347baa8 100644
--- a/deps/v8/src/objects/js-array-buffer-inl.h
+++ b/deps/v8/src/objects/js-array-buffer-inl.h
@@ -48,14 +48,6 @@ size_t JSArrayBuffer::allocation_length() const {
if (backing_store() == nullptr) {
return 0;
}
- // If this buffer is managed by the WasmMemoryTracker
- if (is_wasm_memory()) {
- const auto* data =
- GetIsolate()->wasm_engine()->memory_tracker()->FindAllocationData(
- backing_store());
- DCHECK_NOT_NULL(data);
- return data->allocation_length;
- }
return byte_length();
}
@@ -63,25 +55,9 @@ void* JSArrayBuffer::allocation_base() const {
if (backing_store() == nullptr) {
return nullptr;
}
- // If this buffer is managed by the WasmMemoryTracker
- if (is_wasm_memory()) {
- const auto* data =
- GetIsolate()->wasm_engine()->memory_tracker()->FindAllocationData(
- backing_store());
- DCHECK_NOT_NULL(data);
- return data->allocation_base;
- }
return backing_store();
}
-bool JSArrayBuffer::is_wasm_memory() const {
- return IsWasmMemoryBit::decode(bit_field());
-}
-
-void JSArrayBuffer::set_is_wasm_memory(bool is_wasm_memory) {
- set_bit_field(IsWasmMemoryBit::update(bit_field(), is_wasm_memory));
-}
-
void JSArrayBuffer::clear_padding() {
if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
@@ -105,6 +81,8 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_detachable,
JSArrayBuffer::IsDetachableBit)
BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, was_detached,
JSArrayBuffer::WasDetachedBit)
+BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory,
+ JSArrayBuffer::IsAsmJsMemoryBit)
BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
JSArrayBuffer::IsSharedBit)
@@ -136,31 +114,61 @@ void JSTypedArray::set_length(size_t value) {
WriteField<size_t>(kLengthOffset, value);
}
-void* JSTypedArray::external_pointer() const {
- return reinterpret_cast<void*>(ReadField<Address>(kExternalPointerOffset));
+Address JSTypedArray::external_pointer() const {
+ return ReadField<Address>(kExternalPointerOffset);
+}
+
+void JSTypedArray::set_external_pointer(Address value) {
+ WriteField<Address>(kExternalPointerOffset, value);
}
-void JSTypedArray::set_external_pointer(void* value) {
- WriteField<Address>(kExternalPointerOffset, reinterpret_cast<Address>(value));
+Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
+ Isolate* isolate) {
+#ifdef V8_COMPRESS_POINTERS
+ return GetIsolateRoot(isolate);
+#else
+ return 0;
+#endif
+}
+
+void JSTypedArray::RemoveExternalPointerCompensationForSerialization() {
+ DCHECK(is_on_heap());
+ Isolate* isolate = GetIsolateForPtrCompr(*this);
+ set_external_pointer(external_pointer() -
+ ExternalPointerCompensationForOnHeapArray(isolate));
}
ACCESSORS(JSTypedArray, base_pointer, Object, kBasePointerOffset)
void* JSTypedArray::DataPtr() {
- return reinterpret_cast<void*>(
- base_pointer().ptr() + reinterpret_cast<intptr_t>(external_pointer()));
+ // Zero-extend Tagged_t to Address according to current compression scheme
+ // so that the addition with |external_pointer| (which already contains
+ // compensated offset value) will decompress the tagged value.
+ // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for details.
+ return reinterpret_cast<void*>(external_pointer() +
+ static_cast<Tagged_t>(base_pointer().ptr()));
+}
+
+void JSTypedArray::SetOffHeapDataPtr(void* base, Address offset) {
+ set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER);
+ Address address = reinterpret_cast<Address>(base) + offset;
+ set_external_pointer(address);
+ DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr()));
+}
+
+void JSTypedArray::SetOnHeapDataPtr(HeapObject base, Address offset) {
+ set_base_pointer(base);
+ Isolate* isolate = GetIsolateForPtrCompr(*this);
+ set_external_pointer(offset +
+ ExternalPointerCompensationForOnHeapArray(isolate));
+ DCHECK_EQ(base.ptr() + offset, reinterpret_cast<Address>(DataPtr()));
}
bool JSTypedArray::is_on_heap() const {
DisallowHeapAllocation no_gc;
// Checking that buffer()->backing_store() is not nullptr is not sufficient;
// it will be nullptr when byte_length is 0 as well.
- return base_pointer().ptr() == elements().ptr();
-}
-
-// static
-void* JSTypedArray::ExternalPointerForOnHeapArray() {
- return reinterpret_cast<void*>(ByteArray::kHeaderSize - kHeapObjectTag);
+ return base_pointer() == elements();
}
// static
diff --git a/deps/v8/src/objects/js-array-buffer.cc b/deps/v8/src/objects/js-array-buffer.cc
index a506920f95..d3f5a0a952 100644
--- a/deps/v8/src/objects/js-array-buffer.cc
+++ b/deps/v8/src/objects/js-array-buffer.cc
@@ -5,6 +5,7 @@
#include "src/objects/js-array-buffer.h"
#include "src/objects/js-array-buffer-inl.h"
+#include "src/execution/protectors-inl.h"
#include "src/logging/counters.h"
#include "src/objects/property-descriptor.h"
@@ -31,167 +32,105 @@ bool CanonicalNumericIndexString(Isolate* isolate, Handle<Object> s,
*index = result;
return true;
}
-
-inline int ConvertToMb(size_t size) {
- return static_cast<int>(size / static_cast<size_t>(MB));
-}
-
} // anonymous namespace
-void JSArrayBuffer::Detach() {
- CHECK(is_detachable());
- CHECK(!was_detached());
- CHECK(is_external());
- set_backing_store(nullptr);
- set_byte_length(0);
- set_was_detached(true);
- set_is_detachable(false);
- // Invalidate the detaching protector.
- Isolate* const isolate = GetIsolate();
- if (isolate->IsArrayBufferDetachingIntact()) {
- isolate->InvalidateArrayBufferDetachingProtector();
+void JSArrayBuffer::Setup(SharedFlag shared,
+ std::shared_ptr<BackingStore> backing_store) {
+ clear_padding();
+ set_bit_field(0);
+ set_is_shared(shared == SharedFlag::kShared);
+ set_is_detachable(shared != SharedFlag::kShared);
+ for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) {
+ SetEmbedderField(i, Smi::kZero);
+ }
+ if (!backing_store) {
+ set_backing_store(nullptr);
+ set_byte_length(0);
+ } else {
+ Attach(std::move(backing_store));
}
}
-void JSArrayBuffer::FreeBackingStoreFromMainThread() {
- if (allocation_base() == nullptr) {
- return;
- }
- FreeBackingStore(GetIsolate(), {allocation_base(), allocation_length(),
- backing_store(), is_wasm_memory()});
- // Zero out the backing store and allocation base to avoid dangling
- // pointers.
- set_backing_store(nullptr);
+void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
+ DCHECK_NOT_NULL(backing_store);
+ DCHECK_EQ(is_shared(), backing_store->is_shared());
+ set_backing_store(backing_store->buffer_start());
+ set_byte_length(backing_store->byte_length());
+ if (backing_store->is_wasm_memory()) set_is_detachable(false);
+ if (!backing_store->free_on_destruct()) set_is_external(true);
+ GetIsolate()->heap()->RegisterBackingStore(*this, std::move(backing_store));
}
-// static
-void JSArrayBuffer::FreeBackingStore(Isolate* isolate, Allocation allocation) {
- if (allocation.is_wasm_memory) {
- wasm::WasmMemoryTracker* memory_tracker =
- isolate->wasm_engine()->memory_tracker();
- memory_tracker->FreeWasmMemory(isolate, allocation.backing_store);
- } else {
- isolate->array_buffer_allocator()->Free(allocation.allocation_base,
- allocation.length);
+void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
+ if (was_detached()) return;
+
+ if (force_for_wasm_memory) {
+ // Skip the is_detachable() check.
+ } else if (!is_detachable()) {
+ // Not detachable, do nothing.
+ return;
}
-}
-void JSArrayBuffer::Setup(Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
- bool is_external, void* data, size_t byte_length,
- SharedFlag shared_flag, bool is_wasm_memory) {
- DCHECK_EQ(array_buffer->GetEmbedderFieldCount(),
- v8::ArrayBuffer::kEmbedderFieldCount);
- DCHECK_LE(byte_length, JSArrayBuffer::kMaxByteLength);
- for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) {
- array_buffer->SetEmbedderField(i, Smi::kZero);
+ Isolate* const isolate = GetIsolate();
+ if (backing_store()) {
+ auto backing_store = isolate->heap()->UnregisterBackingStore(*this);
+ CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
}
- array_buffer->set_byte_length(byte_length);
- array_buffer->set_bit_field(0);
- array_buffer->clear_padding();
- array_buffer->set_is_external(is_external);
- array_buffer->set_is_detachable(shared_flag == SharedFlag::kNotShared);
- array_buffer->set_is_shared(shared_flag == SharedFlag::kShared);
- array_buffer->set_is_wasm_memory(is_wasm_memory);
- // Initialize backing store at last to avoid handling of |JSArrayBuffers| that
- // are currently being constructed in the |ArrayBufferTracker|. The
- // registration method below handles the case of registering a buffer that has
- // already been promoted.
- array_buffer->set_backing_store(data);
- if (data && !is_external) {
- isolate->heap()->RegisterNewArrayBuffer(*array_buffer);
+ if (Protectors::IsArrayBufferDetachingIntact(isolate)) {
+ Protectors::InvalidateArrayBufferDetaching(isolate);
}
-}
-void JSArrayBuffer::SetupAsEmpty(Handle<JSArrayBuffer> array_buffer,
- Isolate* isolate) {
- Setup(array_buffer, isolate, false, nullptr, 0, SharedFlag::kNotShared);
+ DCHECK(!is_shared());
+ DCHECK(!is_asmjs_memory());
+ set_backing_store(nullptr);
+ set_byte_length(0);
+ set_was_detached(true);
}
-bool JSArrayBuffer::SetupAllocatingData(Handle<JSArrayBuffer> array_buffer,
- Isolate* isolate,
- size_t allocated_length,
- bool initialize,
- SharedFlag shared_flag) {
- void* data;
- CHECK_NOT_NULL(isolate->array_buffer_allocator());
- if (allocated_length != 0) {
- if (allocated_length >= MB)
- isolate->counters()->array_buffer_big_allocations()->AddSample(
- ConvertToMb(allocated_length));
- if (shared_flag == SharedFlag::kShared)
- isolate->counters()->shared_array_allocations()->AddSample(
- ConvertToMb(allocated_length));
- if (initialize) {
- data = isolate->array_buffer_allocator()->Allocate(allocated_length);
- } else {
- data = isolate->array_buffer_allocator()->AllocateUninitialized(
- allocated_length);
- }
- if (data == nullptr) {
- isolate->counters()->array_buffer_new_size_failures()->AddSample(
- ConvertToMb(allocated_length));
- SetupAsEmpty(array_buffer, isolate);
- return false;
- }
- } else {
- data = nullptr;
- }
-
- const bool is_external = false;
- JSArrayBuffer::Setup(array_buffer, isolate, is_external, data,
- allocated_length, shared_flag);
- return true;
+std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() {
+ return GetIsolate()->heap()->LookupBackingStore(*this);
}
-Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
- Handle<JSTypedArray> typed_array) {
- DCHECK(typed_array->is_on_heap());
+Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
+ Isolate* isolate = GetIsolate();
+ Handle<JSTypedArray> self(*this, isolate);
+ DCHECK(IsTypedArrayElementsKind(self->GetElementsKind()));
+
+ Handle<JSArrayBuffer> array_buffer(JSArrayBuffer::cast(self->buffer()),
+ isolate);
+ if (!is_on_heap()) {
+ // Already is off heap, so return the existing buffer.
+ return array_buffer;
+ }
- Isolate* isolate = typed_array->GetIsolate();
+ // The existing array buffer should be empty.
+ DCHECK_NULL(array_buffer->backing_store());
- DCHECK(IsTypedArrayElementsKind(typed_array->GetElementsKind()));
+ // Allocate a new backing store and attach it to the existing array buffer.
+ size_t byte_length = self->byte_length();
+ auto backing_store =
+ BackingStore::Allocate(isolate, byte_length, SharedFlag::kNotShared,
+ InitializedFlag::kUninitialized);
- Handle<JSArrayBuffer> buffer(JSArrayBuffer::cast(typed_array->buffer()),
- isolate);
- // This code does not know how to materialize from wasm buffers.
- DCHECK(!buffer->is_wasm_memory());
+ if (!backing_store) {
+ isolate->heap()->FatalProcessOutOfMemory("JSTypedArray::GetBuffer");
+ }
- void* backing_store =
- isolate->array_buffer_allocator()->AllocateUninitialized(
- typed_array->byte_length());
- if (backing_store == nullptr) {
- isolate->heap()->FatalProcessOutOfMemory(
- "JSTypedArray::MaterializeArrayBuffer");
+ // Copy the elements into the backing store of the array buffer.
+ if (byte_length > 0) {
+ memcpy(backing_store->buffer_start(), self->DataPtr(), byte_length);
}
- buffer->set_is_external(false);
- DCHECK_EQ(buffer->byte_length(), typed_array->byte_length());
- // Initialize backing store at last to avoid handling of |JSArrayBuffers| that
- // are currently being constructed in the |ArrayBufferTracker|. The
- // registration method below handles the case of registering a buffer that has
- // already been promoted.
- buffer->set_backing_store(backing_store);
- // RegisterNewArrayBuffer expects a valid length for adjusting counters.
- isolate->heap()->RegisterNewArrayBuffer(*buffer);
- memcpy(buffer->backing_store(), typed_array->DataPtr(),
- typed_array->byte_length());
- typed_array->set_elements(ReadOnlyRoots(isolate).empty_byte_array());
- typed_array->set_external_pointer(backing_store);
- typed_array->set_base_pointer(Smi::kZero);
- DCHECK(!typed_array->is_on_heap());
+ // Attach the backing store to the array buffer.
+ array_buffer->Setup(SharedFlag::kNotShared, std::move(backing_store));
- return buffer;
-}
+ // Clear the elements of the typed array.
+ self->set_elements(ReadOnlyRoots(isolate).empty_byte_array());
+ self->SetOffHeapDataPtr(array_buffer->backing_store(), 0);
+ DCHECK(!self->is_on_heap());
-Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
- if (!is_on_heap()) {
- Handle<JSArrayBuffer> array_buffer(JSArrayBuffer::cast(buffer()),
- GetIsolate());
- return array_buffer;
- }
- Handle<JSTypedArray> self(*this, GetIsolate());
- return MaterializeArrayBuffer(self);
+ return array_buffer;
}
// ES#sec-integer-indexed-exotic-objects-defineownproperty-p-desc
diff --git a/deps/v8/src/objects/js-array-buffer.h b/deps/v8/src/objects/js-array-buffer.h
index 7bf2e1ae94..71adb42ae8 100644
--- a/deps/v8/src/objects/js-array-buffer.h
+++ b/deps/v8/src/objects/js-array-buffer.h
@@ -5,6 +5,7 @@
#ifndef V8_OBJECTS_JS_ARRAY_BUFFER_H_
#define V8_OBJECTS_JS_ARRAY_BUFFER_H_
+#include "src/objects/backing-store.h"
#include "src/objects/js-objects.h"
// Has to be the last include (doesn't have include guards):
@@ -13,9 +14,6 @@
namespace v8 {
namespace internal {
-// Whether a JSArrayBuffer is a SharedArrayBuffer or not.
-enum class SharedFlag : uint32_t { kNotShared, kShared };
-
class JSArrayBuffer : public JSObject {
public:
// The maximum length for JSArrayBuffer's supported by V8.
@@ -51,8 +49,8 @@ class JSArrayBuffer : public JSObject {
V(IsExternalBit, bool, 1, _) \
V(IsDetachableBit, bool, 1, _) \
V(WasDetachedBit, bool, 1, _) \
- V(IsSharedBit, bool, 1, _) \
- V(IsWasmMemoryBit, bool, 1, _)
+ V(IsAsmJsMemoryBit, bool, 1, _) \
+ V(IsSharedBit, bool, 1, _)
DEFINE_BIT_FIELDS(JS_ARRAY_BUFFER_BIT_FIELD_FIELDS)
#undef JS_ARRAY_BUFFER_BIT_FIELD_FIELDS
@@ -61,57 +59,45 @@ class JSArrayBuffer : public JSObject {
// memory block once all ArrayBuffers referencing it are collected by the GC.
DECL_BOOLEAN_ACCESSORS(is_external)
- // [is_detachable]: false indicates that this buffer cannot be detached.
+ // [is_detachable]: false => this buffer cannot be detached.
DECL_BOOLEAN_ACCESSORS(is_detachable)
- // [was_detached]: true if the buffer was previously detached.
+ // [was_detached]: true => the buffer was previously detached.
DECL_BOOLEAN_ACCESSORS(was_detached)
+ // [is_asmjs_memory]: true => this buffer was once used as asm.js memory.
+ DECL_BOOLEAN_ACCESSORS(is_asmjs_memory)
+
// [is_shared]: tells whether this is an ArrayBuffer or a SharedArrayBuffer.
DECL_BOOLEAN_ACCESSORS(is_shared)
- // [is_wasm_memory]: whether the buffer is tracked by the WasmMemoryTracker.
- DECL_BOOLEAN_ACCESSORS(is_wasm_memory)
-
DECL_CAST(JSArrayBuffer)
- void Detach();
-
- struct Allocation {
- Allocation(void* allocation_base, size_t length, void* backing_store,
- bool is_wasm_memory)
- : allocation_base(allocation_base),
- length(length),
- backing_store(backing_store),
- is_wasm_memory(is_wasm_memory) {}
-
- void* allocation_base;
- size_t length;
- void* backing_store;
- bool is_wasm_memory;
- };
-
- V8_EXPORT_PRIVATE void FreeBackingStoreFromMainThread();
- V8_EXPORT_PRIVATE static void FreeBackingStore(Isolate* isolate,
- Allocation allocation);
-
- V8_EXPORT_PRIVATE static void Setup(
- Handle<JSArrayBuffer> array_buffer, Isolate* isolate, bool is_external,
- void* data, size_t allocated_length,
- SharedFlag shared_flag = SharedFlag::kNotShared,
- bool is_wasm_memory = false);
-
- // Initialize the object as empty one to avoid confusing heap verifier if
- // the failure happened in the middle of JSArrayBuffer construction.
- V8_EXPORT_PRIVATE static void SetupAsEmpty(Handle<JSArrayBuffer> array_buffer,
- Isolate* isolate);
-
- // Returns false if array buffer contents could not be allocated.
- // In this case, |array_buffer| will not be set up.
- V8_EXPORT_PRIVATE static bool SetupAllocatingData(
- Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
- size_t allocated_length, bool initialize = true,
- SharedFlag shared_flag = SharedFlag::kNotShared) V8_WARN_UNUSED_RESULT;
+ // Initializes the fields of the ArrayBuffer. The provided backing_store can
+ // be nullptr. If it is not nullptr, then the function registers it with
+ // src/heap/array-buffer-tracker.h.
+ V8_EXPORT_PRIVATE void Setup(SharedFlag shared,
+ std::shared_ptr<BackingStore> backing_store);
+
+ // Attaches the backing store to an already constructed empty ArrayBuffer.
+ // This is intended to be used only in ArrayBufferConstructor builtin.
+ V8_EXPORT_PRIVATE void Attach(std::shared_ptr<BackingStore> backing_store);
+ // Detach the backing store from this array buffer if it is detachable.
+ // This sets the internal pointer and length to 0 and unregisters the backing
+ // store from the array buffer tracker. If the array buffer is not detachable,
+ // this is a nop.
+ //
+ // Array buffers that wrap wasm memory objects are special in that they
+ // are normally not detachable, but can become detached as a side effect
+ // of growing the underlying memory object. The {force_for_wasm_memory} flag
+ // is used by the implementation of Wasm memory growth in order to bypass the
+ // non-detachable check.
+ V8_EXPORT_PRIVATE void Detach(bool force_for_wasm_memory = false);
+
+ // Get a reference to backing store of this array buffer, if there is a
+ // backing store. Returns nullptr if there is no backing store (e.g. detached
+ // or a zero-length array buffer).
+ std::shared_ptr<BackingStore> GetBackingStore();
// Dispatched behavior.
DECL_PRINTER(JSArrayBuffer)
@@ -187,12 +173,6 @@ class JSTypedArray : public JSArrayBufferView {
// [length]: length of typed array in elements.
DECL_PRIMITIVE_ACCESSORS(length, size_t)
- // [external_pointer]: TODO(v8:4153)
- DECL_PRIMITIVE_ACCESSORS(external_pointer, void*)
-
- // [base_pointer]: TODO(v8:4153)
- DECL_ACCESSORS(base_pointer, Object)
-
// ES6 9.4.5.3
V8_WARN_UNUSED_RESULT static Maybe<bool> DefineOwnProperty(
Isolate* isolate, Handle<JSTypedArray> o, Handle<Object> key,
@@ -208,10 +188,26 @@ class JSTypedArray : public JSArrayBufferView {
// Use with care: returns raw pointer into heap.
inline void* DataPtr();
+ inline void SetOffHeapDataPtr(void* base, Address offset);
+ inline void SetOnHeapDataPtr(HeapObject base, Address offset);
+
// Whether the buffer's backing store is on-heap or off-heap.
inline bool is_on_heap() const;
- static inline void* ExternalPointerForOnHeapArray();
+ // Note: this is a pointer compression specific optimization.
+ // Normally, on-heap typed arrays contain HeapObject value in |base_pointer|
+ // field and an offset in |external_pointer|.
+ // When pointer compression is enabled we want to combine decompression with
+ // the offset addition. In order to do that we add an isolate root to the
+ // |external_pointer| value and therefore the data pointer computation can
+ // is a simple addition of a (potentially sign-extended) |base_pointer| loaded
+ // as Tagged_t value and an |external_pointer| value.
+ // For full-pointer mode the compensation value is zero.
+ static inline Address ExternalPointerCompensationForOnHeapArray(
+ Isolate* isolate);
+
+ // Subtracts external pointer compensation from the external pointer value.
+ inline void RemoveExternalPointerCompensationForSerialization();
static inline MaybeHandle<JSTypedArray> Validate(Isolate* isolate,
Handle<Object> receiver,
@@ -250,8 +246,13 @@ class JSTypedArray : public JSArrayBufferView {
#endif
private:
- static Handle<JSArrayBuffer> MaterializeArrayBuffer(
- Handle<JSTypedArray> typed_array);
+ friend class Deserializer;
+
+ // [base_pointer]: TODO(v8:4153)
+ DECL_ACCESSORS(base_pointer, Object)
+
+ // [external_pointer]: TODO(v8:4153)
+ DECL_PRIMITIVE_ACCESSORS(external_pointer, Address)
OBJECT_CONSTRUCTORS(JSTypedArray, JSArrayBufferView);
};
diff --git a/deps/v8/src/objects/js-array.h b/deps/v8/src/objects/js-array.h
index eb581c104e..c990151b27 100644
--- a/deps/v8/src/objects/js-array.h
+++ b/deps/v8/src/objects/js-array.h
@@ -108,7 +108,7 @@ class JSArray : public JSObject {
static const int kPreallocatedArrayElements = 4;
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSARRAY_FIELDS)
+ TORQUE_GENERATED_JS_ARRAY_FIELDS)
static const int kLengthDescriptorIndex = 0;
@@ -178,7 +178,7 @@ class JSArrayIterator : public JSObject {
inline void set_kind(IterationKind kind);
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSARRAY_ITERATOR_FIELDS)
+ TORQUE_GENERATED_JS_ARRAY_ITERATOR_FIELDS)
private:
DECL_INT_ACCESSORS(raw_kind)
diff --git a/deps/v8/src/objects/js-break-iterator.cc b/deps/v8/src/objects/js-break-iterator.cc
index 31ed3f8611..1a9d096411 100644
--- a/deps/v8/src/objects/js-break-iterator.cc
+++ b/deps/v8/src/objects/js-break-iterator.cc
@@ -17,7 +17,7 @@ namespace internal {
MaybeHandle<JSV8BreakIterator> JSV8BreakIterator::New(
Isolate* isolate, Handle<Map> map, Handle<Object> locales,
- Handle<Object> options_obj) {
+ Handle<Object> options_obj, const char* service) {
Factory* factory = isolate->factory();
// 1. Let requestedLocales be ? CanonicalizeLocaleList(locales).
@@ -31,15 +31,14 @@ MaybeHandle<JSV8BreakIterator> JSV8BreakIterator::New(
if (options_obj->IsUndefined(isolate)) {
options = factory->NewJSObjectWithNullProto();
} else {
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, options,
- Object::ToObject(isolate, options_obj, "Intl.JSV8BreakIterator"),
- JSV8BreakIterator);
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, options,
+ Object::ToObject(isolate, options_obj, service),
+ JSV8BreakIterator);
}
// Extract locale string
Maybe<Intl::MatcherOption> maybe_locale_matcher =
- Intl::GetLocaleMatcher(isolate, options, "Intl.JSV8BreakIterator");
+ Intl::GetLocaleMatcher(isolate, options, service);
MAYBE_RETURN(maybe_locale_matcher, MaybeHandle<JSV8BreakIterator>());
Intl::MatcherOption matcher = maybe_locale_matcher.FromJust();
@@ -49,7 +48,7 @@ MaybeHandle<JSV8BreakIterator> JSV8BreakIterator::New(
// Extract type from options
Maybe<Type> maybe_type = Intl::GetStringOption<Type>(
- isolate, options, "type", "Intl.v8BreakIterator",
+ isolate, options, "type", service,
{"word", "character", "sentence", "line"},
{Type::WORD, Type::CHARACTER, Type::SENTENCE, Type::LINE}, Type::WORD);
MAYBE_RETURN(maybe_type, MaybeHandle<JSV8BreakIterator>());
diff --git a/deps/v8/src/objects/js-break-iterator.h b/deps/v8/src/objects/js-break-iterator.h
index 4b40192c81..ea66fe6732 100644
--- a/deps/v8/src/objects/js-break-iterator.h
+++ b/deps/v8/src/objects/js-break-iterator.h
@@ -31,7 +31,7 @@ class JSV8BreakIterator : public JSObject {
public:
V8_WARN_UNUSED_RESULT static MaybeHandle<JSV8BreakIterator> New(
Isolate* isolate, Handle<Map> map, Handle<Object> input_locales,
- Handle<Object> input_options);
+ Handle<Object> input_options, const char* service);
static Handle<JSObject> ResolvedOptions(
Isolate* isolate, Handle<JSV8BreakIterator> break_iterator);
@@ -72,7 +72,7 @@ class JSV8BreakIterator : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSV8BREAK_ITERATOR_FIELDS)
+ TORQUE_GENERATED_JS_V8_BREAK_ITERATOR_FIELDS)
private:
DECL_INT_ACCESSORS(raw_type)
diff --git a/deps/v8/src/objects/js-collator.cc b/deps/v8/src/objects/js-collator.cc
index 0413e2acd1..39178b3acf 100644
--- a/deps/v8/src/objects/js-collator.cc
+++ b/deps/v8/src/objects/js-collator.cc
@@ -243,7 +243,8 @@ void SetCaseFirstOption(icu::Collator* icu_collator,
// static
MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
Handle<Object> locales,
- Handle<Object> options_obj) {
+ Handle<Object> options_obj,
+ const char* service) {
// 1. Let requestedLocales be ? CanonicalizeLocaleList(locales).
Maybe<std::vector<std::string>> maybe_requested_locales =
Intl::CanonicalizeLocaleList(isolate, locales);
@@ -258,9 +259,9 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
} else {
// 3. Else
// 3. a. Let options be ? ToObject(options).
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, options_obj,
- Object::ToObject(isolate, options_obj, "Intl.Collator"), JSCollator);
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, options_obj,
+ Object::ToObject(isolate, options_obj, service),
+ JSCollator);
}
// At this point, options_obj can either be a JSObject or a JSProxy only.
@@ -269,7 +270,7 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
// 4. Let usage be ? GetOption(options, "usage", "string", « "sort",
// "search" », "sort").
Maybe<Usage> maybe_usage = Intl::GetStringOption<Usage>(
- isolate, options, "usage", "Intl.Collator", {"sort", "search"},
+ isolate, options, "usage", service, {"sort", "search"},
{Usage::SORT, Usage::SEARCH}, Usage::SORT);
MAYBE_RETURN(maybe_usage, MaybeHandle<JSCollator>());
Usage usage = maybe_usage.FromJust();
@@ -278,7 +279,7 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
// « "lookup", "best fit" », "best fit").
// 10. Set opt.[[localeMatcher]] to matcher.
Maybe<Intl::MatcherOption> maybe_locale_matcher =
- Intl::GetLocaleMatcher(isolate, options, "Intl.Collator");
+ Intl::GetLocaleMatcher(isolate, options, service);
MAYBE_RETURN(maybe_locale_matcher, MaybeHandle<JSCollator>());
Intl::MatcherOption matcher = maybe_locale_matcher.FromJust();
@@ -293,14 +294,14 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
//
// 13. Set opt.[[kn]] to numeric.
bool numeric;
- Maybe<bool> found_numeric = Intl::GetBoolOption(isolate, options, "numeric",
- "Intl.Collator", &numeric);
+ Maybe<bool> found_numeric =
+ Intl::GetBoolOption(isolate, options, "numeric", service, &numeric);
MAYBE_RETURN(found_numeric, MaybeHandle<JSCollator>());
// 14. Let caseFirst be ? GetOption(options, "caseFirst", "string",
// « "upper", "lower", "false" », undefined).
Maybe<Intl::CaseFirst> maybe_case_first =
- Intl::GetCaseFirst(isolate, options, "Intl.Collator");
+ Intl::GetCaseFirst(isolate, options, service);
MAYBE_RETURN(maybe_case_first, MaybeHandle<JSCollator>());
Intl::CaseFirst case_first = maybe_case_first.FromJust();
@@ -411,7 +412,7 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
// 24. Let sensitivity be ? GetOption(options, "sensitivity",
// "string", « "base", "accent", "case", "variant" », undefined).
Maybe<Sensitivity> maybe_sensitivity = Intl::GetStringOption<Sensitivity>(
- isolate, options, "sensitivity", "Intl.Collator",
+ isolate, options, "sensitivity", service,
{"base", "accent", "case", "variant"},
{Sensitivity::kBase, Sensitivity::kAccent, Sensitivity::kCase,
Sensitivity::kVariant},
@@ -451,9 +452,8 @@ MaybeHandle<JSCollator> JSCollator::New(Isolate* isolate, Handle<Map> map,
// 27.Let ignorePunctuation be ? GetOption(options,
// "ignorePunctuation", "boolean", undefined, false).
bool ignore_punctuation;
- Maybe<bool> found_ignore_punctuation =
- Intl::GetBoolOption(isolate, options, "ignorePunctuation",
- "Intl.Collator", &ignore_punctuation);
+ Maybe<bool> found_ignore_punctuation = Intl::GetBoolOption(
+ isolate, options, "ignorePunctuation", service, &ignore_punctuation);
MAYBE_RETURN(found_ignore_punctuation, MaybeHandle<JSCollator>());
// 28. Set collator.[[IgnorePunctuation]] to ignorePunctuation.
diff --git a/deps/v8/src/objects/js-collator.h b/deps/v8/src/objects/js-collator.h
index e9114afeb1..0147b80ebb 100644
--- a/deps/v8/src/objects/js-collator.h
+++ b/deps/v8/src/objects/js-collator.h
@@ -34,7 +34,7 @@ class JSCollator : public JSObject {
// ecma402/#sec-initializecollator
V8_WARN_UNUSED_RESULT static MaybeHandle<JSCollator> New(
Isolate* isolate, Handle<Map> map, Handle<Object> locales,
- Handle<Object> options);
+ Handle<Object> options, const char* service);
// ecma402/#sec-intl.collator.prototype.resolvedoptions
static Handle<JSObject> ResolvedOptions(Isolate* isolate,
@@ -48,7 +48,7 @@ class JSCollator : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSCOLLATOR_FIELDS)
+ TORQUE_GENERATED_JS_COLLATOR_FIELDS)
DECL_ACCESSORS(icu_collator, Managed<icu::Collator>)
DECL_ACCESSORS(bound_compare, Object)
diff --git a/deps/v8/src/objects/js-collection-iterator.h b/deps/v8/src/objects/js-collection-iterator.h
index b193aa84cd..0a40837677 100644
--- a/deps/v8/src/objects/js-collection-iterator.h
+++ b/deps/v8/src/objects/js-collection-iterator.h
@@ -22,6 +22,10 @@ class JSCollectionIterator
public:
void JSCollectionIteratorPrint(std::ostream& os, const char* name);
+ // JSCollectionIterator is abstract, but also defines the size for all of its
+ // concrete subclasses.
+ static constexpr int kSize = kHeaderSize;
+
TQ_OBJECT_CONSTRUCTORS(JSCollectionIterator)
};
diff --git a/deps/v8/src/objects/js-date-time-format.cc b/deps/v8/src/objects/js-date-time-format.cc
index 29fcfb0d7c..835f3dc43a 100644
--- a/deps/v8/src/objects/js-date-time-format.cc
+++ b/deps/v8/src/objects/js-date-time-format.cc
@@ -79,16 +79,6 @@ static std::vector<PatternItem> BuildPatternItems() {
kNarrowLongShort),
PatternItem("year", {{"yy", "2-digit"}, {"y", "numeric"}},
k2DigitNumeric)};
- if (FLAG_harmony_intl_dateformat_quarter) {
- items.push_back(PatternItem("quarter",
- {{"QQQQQ", "narrow"},
- {"QQQQ", "long"},
- {"QQQ", "short"},
- {"qqqqq", "narrow"},
- {"qqqq", "long"},
- {"qqq", "short"}},
- kNarrowLongShort));
- }
// Sometimes we get L instead of M for month - standalone name.
items.push_back(PatternItem("month",
{{"MMMMM", "narrow"},
@@ -641,7 +631,8 @@ Isolate::ICUObjectCacheType ConvertToCacheType(
MaybeHandle<String> JSDateTimeFormat::ToLocaleDateTime(
Isolate* isolate, Handle<Object> date, Handle<Object> locales,
- Handle<Object> options, RequiredOption required, DefaultsOption defaults) {
+ Handle<Object> options, RequiredOption required, DefaultsOption defaults,
+ const char* method) {
Isolate::ICUObjectCacheType cache_type = ConvertToCacheType(defaults);
Factory* factory = isolate->factory();
@@ -691,7 +682,8 @@ MaybeHandle<String> JSDateTimeFormat::ToLocaleDateTime(
Handle<JSDateTimeFormat> date_time_format;
ASSIGN_RETURN_ON_EXCEPTION(
isolate, date_time_format,
- JSDateTimeFormat::New(isolate, map, locales, internal_options), String);
+ JSDateTimeFormat::New(isolate, map, locales, internal_options, method),
+ String);
if (can_cache) {
isolate->set_icu_object_in_cache(
@@ -775,13 +767,10 @@ MaybeHandle<JSObject> JSDateTimeFormat::ToDateTimeOptions(
// 4. If required is "date" or "any", then
if (required == RequiredOption::kAny || required == RequiredOption::kDate) {
- // a. For each of the property names "weekday", "year", "quarter", "month",
+ // a. For each of the property names "weekday", "year", "month",
// "day", do
std::vector<Handle<String>> list(
{factory->weekday_string(), factory->year_string()});
- if (FLAG_harmony_intl_dateformat_quarter) {
- list.push_back(factory->quarter_string());
- }
list.push_back(factory->month_string());
list.push_back(factory->day_string());
Maybe<bool> maybe_needs_default = NeedsDefault(isolate, options, list);
@@ -941,7 +930,7 @@ icu::Calendar* CreateCalendar(Isolate* isolate, const icu::Locale& icu_locale,
std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormat(
const icu::Locale& icu_locale, const icu::UnicodeString& skeleton,
- icu::DateTimePatternGenerator& generator) { // NOLINT(runtime/references)
+ icu::DateTimePatternGenerator* generator) {
// See https://github.com/tc39/ecma402/issues/225 . The best pattern
// generation needs to be done in the base locale according to the
// current spec however odd it may be. See also crbug.com/826549 .
@@ -954,8 +943,8 @@ std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormat(
// has to be discussed. Revisit once the spec is clarified/revised.
icu::UnicodeString pattern;
UErrorCode status = U_ZERO_ERROR;
- pattern = generator.getBestPattern(skeleton, UDATPG_MATCH_HOUR_FIELD_LENGTH,
- status);
+ pattern = generator->getBestPattern(skeleton, UDATPG_MATCH_HOUR_FIELD_LENGTH,
+ status);
CHECK(U_SUCCESS(status));
// Make formatter from skeleton. Calendar and numbering system are added
@@ -971,9 +960,9 @@ std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormat(
class DateFormatCache {
public:
- icu::SimpleDateFormat* Create(
- const icu::Locale& icu_locale, const icu::UnicodeString& skeleton,
- icu::DateTimePatternGenerator& generator) { // NOLINT(runtime/references)
+ icu::SimpleDateFormat* Create(const icu::Locale& icu_locale,
+ const icu::UnicodeString& skeleton,
+ icu::DateTimePatternGenerator* generator) {
std::string key;
skeleton.toUTF8String<std::string>(key);
key += ":";
@@ -1002,7 +991,7 @@ class DateFormatCache {
std::unique_ptr<icu::SimpleDateFormat> CreateICUDateFormatFromCache(
const icu::Locale& icu_locale, const icu::UnicodeString& skeleton,
- icu::DateTimePatternGenerator& generator) { // NOLINT(runtime/references)
+ icu::DateTimePatternGenerator* generator) {
static base::LazyInstance<DateFormatCache>::type cache =
LAZY_INSTANCE_INITIALIZER;
return std::unique_ptr<icu::SimpleDateFormat>(
@@ -1138,8 +1127,7 @@ icu::UnicodeString ReplaceSkeleton(const icu::UnicodeString input,
std::unique_ptr<icu::SimpleDateFormat> DateTimeStylePattern(
JSDateTimeFormat::DateTimeStyle date_style,
JSDateTimeFormat::DateTimeStyle time_style, const icu::Locale& icu_locale,
- Intl::HourCycle hc,
- icu::DateTimePatternGenerator& generator) { // NOLINT(runtime/references)
+ Intl::HourCycle hc, icu::DateTimePatternGenerator* generator) {
std::unique_ptr<icu::SimpleDateFormat> result;
if (date_style != JSDateTimeFormat::DateTimeStyle::kUndefined) {
if (time_style != JSDateTimeFormat::DateTimeStyle::kUndefined) {
@@ -1164,10 +1152,40 @@ std::unique_ptr<icu::SimpleDateFormat> DateTimeStylePattern(
UNREACHABLE();
}
}
+
+ UErrorCode status = U_ZERO_ERROR;
+ // Somehow we fail to create the instance.
+ if (result.get() == nullptr) {
+ icu::Locale modified_locale(icu_locale);
+ // Fallback to the locale without "nu".
+ if (!icu_locale.getUnicodeKeywordValue<std::string>("nu", status).empty()) {
+ status = U_ZERO_ERROR;
+ modified_locale.setUnicodeKeywordValue("nu", nullptr, status);
+ return DateTimeStylePattern(date_style, time_style, modified_locale, hc,
+ generator);
+ }
+ status = U_ZERO_ERROR;
+ // Fallback to the locale without "hc".
+ if (!icu_locale.getUnicodeKeywordValue<std::string>("hc", status).empty()) {
+ status = U_ZERO_ERROR;
+ modified_locale.setUnicodeKeywordValue("hc", nullptr, status);
+ return DateTimeStylePattern(date_style, time_style, modified_locale, hc,
+ generator);
+ }
+ status = U_ZERO_ERROR;
+ // Fallback to the locale without "ca".
+ if (!icu_locale.getUnicodeKeywordValue<std::string>("ca", status).empty()) {
+ status = U_ZERO_ERROR;
+ modified_locale.setUnicodeKeywordValue("ca", nullptr, status);
+ return DateTimeStylePattern(date_style, time_style, modified_locale, hc,
+ generator);
+ }
+ return nullptr;
+ }
icu::UnicodeString pattern;
pattern = result->toPattern(pattern);
- UErrorCode status = U_ZERO_ERROR;
+ status = U_ZERO_ERROR;
icu::UnicodeString skeleton =
icu::DateTimePatternGenerator::staticGetSkeleton(pattern, status);
CHECK(U_SUCCESS(status));
@@ -1185,7 +1203,8 @@ class DateTimePatternGeneratorCache {
public:
// Return a clone copy that the caller have to free.
icu::DateTimePatternGenerator* CreateGenerator(const icu::Locale& locale) {
- std::string key(locale.getBaseName());
+ std::string key(FLAG_harmony_intl_other_calendars ? locale.getName()
+ : locale.getBaseName());
base::MutexGuard guard(&mutex_);
auto it = map_.find(key);
if (it != map_.end()) {
@@ -1193,7 +1212,8 @@ class DateTimePatternGeneratorCache {
}
UErrorCode status = U_ZERO_ERROR;
map_[key].reset(icu::DateTimePatternGenerator::createInstance(
- icu::Locale(key.c_str()), status));
+ FLAG_harmony_intl_other_calendars ? locale : icu::Locale(key.c_str()),
+ status));
// Fallback to use "root".
if (U_FAILURE(status)) {
status = U_ZERO_ERROR;
@@ -1216,7 +1236,7 @@ enum FormatMatcherOption { kBestFit, kBasic };
// ecma402/#sec-initializedatetimeformat
MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
Isolate* isolate, Handle<Map> map, Handle<Object> locales,
- Handle<Object> input_options) {
+ Handle<Object> input_options, const char* service) {
Factory* factory = isolate->factory();
// 1. Let requestedLocales be ? CanonicalizeLocaleList(locales).
Maybe<std::vector<std::string>> maybe_requested_locales =
@@ -1235,6 +1255,10 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// 4. Let matcher be ? GetOption(options, "localeMatcher", "string",
// « "lookup", "best fit" », "best fit").
// 5. Set opt.[[localeMatcher]] to matcher.
+ Maybe<Intl::MatcherOption> maybe_locale_matcher =
+ Intl::GetLocaleMatcher(isolate, options, service);
+ MAYBE_RETURN(maybe_locale_matcher, MaybeHandle<JSDateTimeFormat>());
+ Intl::MatcherOption locale_matcher = maybe_locale_matcher.FromJust();
std::unique_ptr<char[]> calendar_str = nullptr;
std::unique_ptr<char[]> numbering_system_str = nullptr;
@@ -1242,13 +1266,12 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
const std::vector<const char*> empty_values = {};
// 6. Let calendar be ? GetOption(options, "calendar",
// "string", undefined, undefined).
- Maybe<bool> maybe_calendar =
- Intl::GetStringOption(isolate, options, "calendar", empty_values,
- "Intl.NumberFormat", &calendar_str);
+ Maybe<bool> maybe_calendar = Intl::GetStringOption(
+ isolate, options, "calendar", empty_values, service, &calendar_str);
MAYBE_RETURN(maybe_calendar, MaybeHandle<JSDateTimeFormat>());
if (maybe_calendar.FromJust() && calendar_str != nullptr) {
icu::Locale default_locale;
- if (!Intl::IsValidCalendar(default_locale, calendar_str.get())) {
+ if (!Intl::IsWellFormedCalendar(calendar_str.get())) {
THROW_NEW_ERROR(
isolate,
NewRangeError(
@@ -1261,26 +1284,21 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// 8. Let numberingSystem be ? GetOption(options, "numberingSystem",
// "string", undefined, undefined).
Maybe<bool> maybe_numberingSystem = Intl::GetNumberingSystem(
- isolate, options, "Intl.NumberFormat", &numbering_system_str);
+ isolate, options, service, &numbering_system_str);
MAYBE_RETURN(maybe_numberingSystem, MaybeHandle<JSDateTimeFormat>());
}
- Maybe<Intl::MatcherOption> maybe_locale_matcher =
- Intl::GetLocaleMatcher(isolate, options, "Intl.DateTimeFormat");
- MAYBE_RETURN(maybe_locale_matcher, MaybeHandle<JSDateTimeFormat>());
- Intl::MatcherOption locale_matcher = maybe_locale_matcher.FromJust();
-
// 6. Let hour12 be ? GetOption(options, "hour12", "boolean", undefined,
// undefined).
bool hour12;
- Maybe<bool> maybe_get_hour12 = Intl::GetBoolOption(
- isolate, options, "hour12", "Intl.DateTimeFormat", &hour12);
+ Maybe<bool> maybe_get_hour12 =
+ Intl::GetBoolOption(isolate, options, "hour12", service, &hour12);
MAYBE_RETURN(maybe_get_hour12, Handle<JSDateTimeFormat>());
// 7. Let hourCycle be ? GetOption(options, "hourCycle", "string", « "h11",
// "h12", "h23", "h24" », undefined).
Maybe<Intl::HourCycle> maybe_hour_cycle =
- Intl::GetHourCycle(isolate, options, "Intl.DateTimeFormat");
+ Intl::GetHourCycle(isolate, options, service);
MAYBE_RETURN(maybe_hour_cycle, MaybeHandle<JSDateTimeFormat>());
Intl::HourCycle hour_cycle = maybe_hour_cycle.FromJust();
@@ -1309,12 +1327,14 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
DCHECK(!icu_locale.isBogus());
UErrorCode status = U_ZERO_ERROR;
- if (calendar_str != nullptr) {
+ if (calendar_str != nullptr &&
+ Intl::IsValidCalendar(icu_locale, calendar_str.get())) {
icu_locale.setUnicodeKeywordValue("ca", calendar_str.get(), status);
CHECK(U_SUCCESS(status));
}
- if (numbering_system_str != nullptr) {
+ if (numbering_system_str != nullptr &&
+ Intl::IsValidNumberingSystem(numbering_system_str.get())) {
icu_locale.setUnicodeKeywordValue("nu", numbering_system_str.get(), status);
CHECK(U_SUCCESS(status));
}
@@ -1322,9 +1342,8 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// 17. Let timeZone be ? Get(options, "timeZone").
const std::vector<const char*> empty_values;
std::unique_ptr<char[]> timezone = nullptr;
- Maybe<bool> maybe_timezone =
- Intl::GetStringOption(isolate, options, "timeZone", empty_values,
- "Intl.DateTimeFormat", &timezone);
+ Maybe<bool> maybe_timezone = Intl::GetStringOption(
+ isolate, options, "timeZone", empty_values, service, &timezone);
MAYBE_RETURN(maybe_timezone, Handle<JSDateTimeFormat>());
std::unique_ptr<icu::TimeZone> tz = CreateTimeZone(isolate, timezone.get());
@@ -1409,43 +1428,40 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
DateTimeStyle time_style = DateTimeStyle::kUndefined;
std::unique_ptr<icu::SimpleDateFormat> icu_date_format;
- if (FLAG_harmony_intl_datetime_style) {
- // 28. Let dateStyle be ? GetOption(options, "dateStyle", "string", «
- // "full", "long", "medium", "short" », undefined).
- Maybe<DateTimeStyle> maybe_date_style =
- Intl::GetStringOption<DateTimeStyle>(
- isolate, options, "dateStyle", "Intl.DateTimeFormat",
- {"full", "long", "medium", "short"},
- {DateTimeStyle::kFull, DateTimeStyle::kLong, DateTimeStyle::kMedium,
- DateTimeStyle::kShort},
- DateTimeStyle::kUndefined);
- MAYBE_RETURN(maybe_date_style, MaybeHandle<JSDateTimeFormat>());
- // 29. If dateStyle is not undefined, set dateTimeFormat.[[DateStyle]] to
- // dateStyle.
- date_style = maybe_date_style.FromJust();
-
- // 30. Let timeStyle be ? GetOption(options, "timeStyle", "string", «
- // "full", "long", "medium", "short" »).
- Maybe<DateTimeStyle> maybe_time_style =
- Intl::GetStringOption<DateTimeStyle>(
- isolate, options, "timeStyle", "Intl.DateTimeFormat",
- {"full", "long", "medium", "short"},
- {DateTimeStyle::kFull, DateTimeStyle::kLong, DateTimeStyle::kMedium,
- DateTimeStyle::kShort},
- DateTimeStyle::kUndefined);
- MAYBE_RETURN(maybe_time_style, MaybeHandle<JSDateTimeFormat>());
-
- // 31. If timeStyle is not undefined, set dateTimeFormat.[[TimeStyle]] to
- // timeStyle.
- time_style = maybe_time_style.FromJust();
-
- // 32. If dateStyle or timeStyle are not undefined, then
- if (date_style != DateTimeStyle::kUndefined ||
- time_style != DateTimeStyle::kUndefined) {
- icu_date_format = DateTimeStylePattern(date_style, time_style, icu_locale,
- hc, *generator);
- }
+ // 28. Let dateStyle be ? GetOption(options, "dateStyle", "string", «
+ // "full", "long", "medium", "short" », undefined).
+ Maybe<DateTimeStyle> maybe_date_style = Intl::GetStringOption<DateTimeStyle>(
+ isolate, options, "dateStyle", service,
+ {"full", "long", "medium", "short"},
+ {DateTimeStyle::kFull, DateTimeStyle::kLong, DateTimeStyle::kMedium,
+ DateTimeStyle::kShort},
+ DateTimeStyle::kUndefined);
+ MAYBE_RETURN(maybe_date_style, MaybeHandle<JSDateTimeFormat>());
+ // 29. If dateStyle is not undefined, set dateTimeFormat.[[DateStyle]] to
+ // dateStyle.
+ date_style = maybe_date_style.FromJust();
+
+ // 30. Let timeStyle be ? GetOption(options, "timeStyle", "string", «
+ // "full", "long", "medium", "short" »).
+ Maybe<DateTimeStyle> maybe_time_style = Intl::GetStringOption<DateTimeStyle>(
+ isolate, options, "timeStyle", service,
+ {"full", "long", "medium", "short"},
+ {DateTimeStyle::kFull, DateTimeStyle::kLong, DateTimeStyle::kMedium,
+ DateTimeStyle::kShort},
+ DateTimeStyle::kUndefined);
+ MAYBE_RETURN(maybe_time_style, MaybeHandle<JSDateTimeFormat>());
+
+ // 31. If timeStyle is not undefined, set dateTimeFormat.[[TimeStyle]] to
+ // timeStyle.
+ time_style = maybe_time_style.FromJust();
+
+ // 32. If dateStyle or timeStyle are not undefined, then
+ if (date_style != DateTimeStyle::kUndefined ||
+ time_style != DateTimeStyle::kUndefined) {
+ icu_date_format = DateTimeStylePattern(date_style, time_style, icu_locale,
+ hc, generator.get());
}
+
// 33. Else,
if (icu_date_format.get() == nullptr) {
bool has_hour_option = false;
@@ -1456,9 +1472,9 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// i. Let prop be the name given in the Property column of the row.
// ii. Let value be ? GetOption(options, prop, "string", « the strings
// given in the Values column of the row », undefined).
- Maybe<bool> maybe_get_option = Intl::GetStringOption(
- isolate, options, item.property.c_str(), item.allowed_values,
- "Intl.DateTimeFormat", &input);
+ Maybe<bool> maybe_get_option =
+ Intl::GetStringOption(isolate, options, item.property.c_str(),
+ item.allowed_values, service, &input);
MAYBE_RETURN(maybe_get_option, Handle<JSDateTimeFormat>());
if (maybe_get_option.FromJust()) {
if (item.property == "hour") {
@@ -1487,8 +1503,7 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// « "basic", "best fit" », "best fit").
Maybe<FormatMatcherOption> maybe_format_matcher =
Intl::GetStringOption<FormatMatcherOption>(
- isolate, options, "formatMatcher", "Intl.DateTimeFormat",
- {"best fit", "basic"},
+ isolate, options, "formatMatcher", service, {"best fit", "basic"},
{FormatMatcherOption::kBestFit, FormatMatcherOption::kBasic},
FormatMatcherOption::kBestFit);
MAYBE_RETURN(maybe_format_matcher, MaybeHandle<JSDateTimeFormat>());
@@ -1496,13 +1511,13 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
// FormatMatcherOption format_matcher = maybe_format_matcher.FromJust();
icu::UnicodeString skeleton_ustr(skeleton.c_str());
- icu_date_format =
- CreateICUDateFormatFromCache(icu_locale, skeleton_ustr, *generator);
+ icu_date_format = CreateICUDateFormatFromCache(icu_locale, skeleton_ustr,
+ generator.get());
if (icu_date_format.get() == nullptr) {
// Remove extensions and try again.
icu_locale = icu::Locale(icu_locale.getBaseName());
- icu_date_format =
- CreateICUDateFormatFromCache(icu_locale, skeleton_ustr, *generator);
+ icu_date_format = CreateICUDateFormatFromCache(icu_locale, skeleton_ustr,
+ generator.get());
if (icu_date_format.get() == nullptr) {
FATAL("Failed to create ICU date format, are ICU data files missing?");
}
@@ -1561,12 +1576,16 @@ MaybeHandle<JSDateTimeFormat> JSDateTimeFormat::New(
isolate->factory()->NewFastOrSlowJSObjectFromMap(map));
DisallowHeapAllocation no_gc;
date_time_format->set_flags(0);
- date_time_format->set_hour_cycle(hc);
if (date_style != DateTimeStyle::kUndefined) {
date_time_format->set_date_style(date_style);
}
if (time_style != DateTimeStyle::kUndefined) {
date_time_format->set_time_style(time_style);
+ date_time_format->set_hour_cycle(hc);
+ }
+ if ((date_style == DateTimeStyle::kUndefined) &&
+ (time_style == DateTimeStyle::kUndefined)) {
+ date_time_format->set_hour_cycle(hc);
}
date_time_format->set_icu_locale(*managed_locale);
date_time_format->set_icu_simple_date_format(*managed_format);
@@ -1585,11 +1604,9 @@ Handle<String> IcuDateFieldIdToDateType(int32_t field_id, Isolate* isolate) {
return isolate->factory()->literal_string();
case UDAT_YEAR_FIELD:
case UDAT_EXTENDED_YEAR_FIELD:
- case UDAT_YEAR_NAME_FIELD:
return isolate->factory()->year_string();
- case UDAT_QUARTER_FIELD:
- case UDAT_STANDALONE_QUARTER_FIELD:
- return isolate->factory()->quarter_string();
+ case UDAT_YEAR_NAME_FIELD:
+ return isolate->factory()->yearName_string();
case UDAT_MONTH_FIELD:
case UDAT_STANDALONE_MONTH_FIELD:
return isolate->factory()->month_string();
@@ -1624,6 +1641,11 @@ Handle<String> IcuDateFieldIdToDateType(int32_t field_id, Isolate* isolate) {
return isolate->factory()->era_string();
case UDAT_FRACTIONAL_SECOND_FIELD:
return isolate->factory()->fractionalSecond_string();
+ case UDAT_RELATED_YEAR_FIELD:
+ return isolate->factory()->relatedYear_string();
+
+ case UDAT_QUARTER_FIELD:
+ case UDAT_STANDALONE_QUARTER_FIELD:
default:
// Other UDAT_*_FIELD's cannot show up because there is no way to specify
// them via options of Intl.DateTimeFormat.
diff --git a/deps/v8/src/objects/js-date-time-format.h b/deps/v8/src/objects/js-date-time-format.h
index f4a8ccc8f5..acf99b3618 100644
--- a/deps/v8/src/objects/js-date-time-format.h
+++ b/deps/v8/src/objects/js-date-time-format.h
@@ -34,7 +34,7 @@ class JSDateTimeFormat : public JSObject {
public:
V8_WARN_UNUSED_RESULT static MaybeHandle<JSDateTimeFormat> New(
Isolate* isolate, Handle<Map> map, Handle<Object> locales,
- Handle<Object> options);
+ Handle<Object> options, const char* service);
V8_WARN_UNUSED_RESULT static MaybeHandle<JSObject> ResolvedOptions(
Isolate* isolate, Handle<JSDateTimeFormat> date_time_format);
@@ -82,7 +82,8 @@ class JSDateTimeFormat : public JSObject {
V8_WARN_UNUSED_RESULT static MaybeHandle<String> ToLocaleDateTime(
Isolate* isolate, Handle<Object> date, Handle<Object> locales,
- Handle<Object> options, RequiredOption required, DefaultsOption defaults);
+ Handle<Object> options, RequiredOption required, DefaultsOption defaults,
+ const char* method);
V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales();
@@ -94,7 +95,7 @@ class JSDateTimeFormat : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSDATE_TIME_FORMAT_FIELDS)
+ TORQUE_GENERATED_JS_DATE_TIME_FORMAT_FIELDS)
inline void set_hour_cycle(Intl::HourCycle hour_cycle);
inline Intl::HourCycle hour_cycle() const;
diff --git a/deps/v8/src/objects/js-list-format.cc b/deps/v8/src/objects/js-list-format.cc
index 4f303b1874..90b93e308a 100644
--- a/deps/v8/src/objects/js-list-format.cc
+++ b/deps/v8/src/objects/js-list-format.cc
@@ -252,40 +252,22 @@ namespace {
// Extract String from JSArray into array of UnicodeString
Maybe<std::vector<icu::UnicodeString>> ToUnicodeStringArray(
Isolate* isolate, Handle<JSArray> array) {
- Factory* factory = isolate->factory();
- // In general, ElementsAccessor::Get actually isn't guaranteed to give us the
- // elements in order. But if it is a holey array, it will cause the exception
- // with the IsString check.
+ // Thanks to iterable-to-list preprocessing, we never see dictionary-mode
+ // arrays here, so the loop below can construct an entry from the index.
+ DCHECK(array->HasFastElements(isolate));
auto* accessor = array->GetElementsAccessor();
uint32_t length = accessor->NumberOfElements(*array);
- // ecma402 #sec-createpartsfromlist
- // 2. If list contains any element value such that Type(value) is not String,
- // throw a TypeError exception.
- //
- // Per spec it looks like we're supposed to throw a TypeError exception if the
- // item isn't already a string, rather than coercing to a string.
std::vector<icu::UnicodeString> result;
for (uint32_t i = 0; i < length; i++) {
- DCHECK(accessor->HasElement(*array, i));
- Handle<Object> item = accessor->Get(array, i);
- DCHECK(!item.is_null());
- if (!item->IsString()) {
- THROW_NEW_ERROR_RETURN_VALUE(
- isolate,
- NewTypeError(MessageTemplate::kArrayItemNotType,
- factory->list_string(),
- // TODO(ftang): For dictionary-mode arrays, i isn't
- // actually the index in the array but the index in the
- // dictionary.
- factory->NewNumber(i), factory->String_string()),
- Nothing<std::vector<icu::UnicodeString>>());
- }
+ InternalIndex entry(i);
+ DCHECK(accessor->HasEntry(*array, entry));
+ Handle<Object> item = accessor->Get(array, entry);
+ DCHECK(item->IsString());
Handle<String> item_str = Handle<String>::cast(item);
if (!item_str->IsFlat()) item_str = String::Flatten(isolate, item_str);
result.push_back(Intl::ToICUUnicodeString(isolate, item_str));
}
- DCHECK(!array->HasDictionaryElements());
return Just(result);
}
@@ -294,9 +276,6 @@ MaybeHandle<T> FormatListCommon(
Isolate* isolate, Handle<JSListFormat> format, Handle<JSArray> list,
MaybeHandle<T> (*formatToResult)(Isolate*, const icu::FormattedValue&)) {
DCHECK(!list->IsUndefined());
- // ecma402 #sec-createpartsfromlist
- // 2. If list contains any element value such that Type(value) is not String,
- // throw a TypeError exception.
Maybe<std::vector<icu::UnicodeString>> maybe_array =
ToUnicodeStringArray(isolate, list);
MAYBE_RETURN(maybe_array, Handle<T>());
diff --git a/deps/v8/src/objects/js-list-format.h b/deps/v8/src/objects/js-list-format.h
index df937722e6..1ff76790f9 100644
--- a/deps/v8/src/objects/js-list-format.h
+++ b/deps/v8/src/objects/js-list-format.h
@@ -104,7 +104,7 @@ class JSListFormat : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSLIST_FORMAT_FIELDS)
+ TORQUE_GENERATED_JS_LIST_FORMAT_FIELDS)
OBJECT_CONSTRUCTORS(JSListFormat, JSObject);
};
diff --git a/deps/v8/src/objects/js-locale.cc b/deps/v8/src/objects/js-locale.cc
index 4a66ea9eca..9e8053b1dc 100644
--- a/deps/v8/src/objects/js-locale.cc
+++ b/deps/v8/src/objects/js-locale.cc
@@ -168,10 +168,20 @@ bool IsUnicodeVariantSubtag(const std::string& value) {
bool IsExtensionSingleton(const std::string& value) {
return IsAlphanum(value, 1, 1);
}
+} // namespace
+
+bool JSLocale::Is38AlphaNumList(const std::string& value) {
+ std::size_t found = value.find("-");
+ if (found == std::string::npos) {
+ return IsAlphanum(value, 3, 8);
+ }
+ return IsAlphanum(value.substr(0, found), 3, 8) &&
+ JSLocale::Is38AlphaNumList(value.substr(found + 1));
+}
// TODO(ftang) Replace the following check w/ icu::LocaleBuilder
// once ICU64 land in March 2019.
-bool StartsWithUnicodeLanguageId(const std::string& value) {
+bool JSLocale::StartsWithUnicodeLanguageId(const std::string& value) {
// unicode_language_id =
// unicode_language_subtag (sep unicode_script_subtag)?
// (sep unicode_region_subtag)? (sep unicode_variant_subtag)* ;
@@ -207,6 +217,7 @@ bool StartsWithUnicodeLanguageId(const std::string& value) {
return true;
}
+namespace {
Maybe<bool> ApplyOptionsToTag(Isolate* isolate, Handle<String> tag,
Handle<JSReceiver> options,
icu::LocaleBuilder* builder) {
@@ -223,7 +234,7 @@ Maybe<bool> ApplyOptionsToTag(Isolate* isolate, Handle<String> tag,
CHECK_NOT_NULL(*bcp47_tag);
// 2. If IsStructurallyValidLanguageTag(tag) is false, throw a RangeError
// exception.
- if (!StartsWithUnicodeLanguageId(*bcp47_tag)) {
+ if (!JSLocale::StartsWithUnicodeLanguageId(*bcp47_tag)) {
return Just(false);
}
UErrorCode status = U_ZERO_ERROR;
diff --git a/deps/v8/src/objects/js-locale.h b/deps/v8/src/objects/js-locale.h
index e1806e6b7f..f2fca3ce14 100644
--- a/deps/v8/src/objects/js-locale.h
+++ b/deps/v8/src/objects/js-locale.h
@@ -49,6 +49,13 @@ class JSLocale : public JSObject {
static Handle<String> ToString(Isolate* isolate, Handle<JSLocale> locale);
static std::string ToString(Handle<JSLocale> locale);
+ // Help function to validate locale by other Intl objects.
+ static bool StartsWithUnicodeLanguageId(const std::string& value);
+
+ // Help function to check well-formed
+ // "(3*8alphanum) *("-" (3*8alphanum)) sequence" sequence
+ static bool Is38AlphaNumList(const std::string& value);
+
DECL_CAST(JSLocale)
DECL_ACCESSORS(icu_locale, Managed<icu::Locale>)
@@ -58,7 +65,7 @@ class JSLocale : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSLOCALE_FIELDS)
+ TORQUE_GENERATED_JS_LOCALE_FIELDS)
OBJECT_CONSTRUCTORS(JSLocale, JSObject);
};
diff --git a/deps/v8/src/objects/js-number-format-inl.h b/deps/v8/src/objects/js-number-format-inl.h
index afdfef89f2..f68252ab0b 100644
--- a/deps/v8/src/objects/js-number-format-inl.h
+++ b/deps/v8/src/objects/js-number-format-inl.h
@@ -26,46 +26,8 @@ ACCESSORS(JSNumberFormat, icu_number_formatter,
kIcuNumberFormatterOffset)
ACCESSORS(JSNumberFormat, bound_format, Object, kBoundFormatOffset)
-// Currenct ECMA 402 spec mandate to record (Min|Max)imumFractionDigits
-// uncondictionally while the unified number proposal eventually will only
-// record either (Min|Max)imumFractionDigits or (Min|Max)imumSignaficantDigits
-// Since LocalizedNumberFormatter can only remember one set, and during
-// 2019-1-17 ECMA402 meeting that the committee decide not to take a PR to
-// address that prior to the unified number proposal, we have to add these two
-// 5 bits int into flags to remember the (Min|Max)imumFractionDigits while
-// (Min|Max)imumSignaficantDigits is present.
-// TODO(ftang) remove the following once we ship int-number-format-unified
-// * SMI_ACCESSORS of flags
-// * Four inline functions: (set_)?(min|max)imum_fraction_digits
-
SMI_ACCESSORS(JSNumberFormat, flags, kFlagsOffset)
-inline int JSNumberFormat::minimum_fraction_digits() const {
- return MinimumFractionDigitsBits::decode(flags());
-}
-
-inline void JSNumberFormat::set_minimum_fraction_digits(int digits) {
- DCHECK_GE(MinimumFractionDigitsBits::kMax, digits);
- DCHECK_LE(0, digits);
- DCHECK_GE(20, digits);
- int hints = flags();
- hints = MinimumFractionDigitsBits::update(hints, digits);
- set_flags(hints);
-}
-
-inline int JSNumberFormat::maximum_fraction_digits() const {
- return MaximumFractionDigitsBits::decode(flags());
-}
-
-inline void JSNumberFormat::set_maximum_fraction_digits(int digits) {
- DCHECK_GE(MaximumFractionDigitsBits::kMax, digits);
- DCHECK_LE(0, digits);
- DCHECK_GE(20, digits);
- int hints = flags();
- hints = MaximumFractionDigitsBits::update(hints, digits);
- set_flags(hints);
-}
-
inline void JSNumberFormat::set_style(Style style) {
DCHECK_GE(StyleBits::kMax, style);
int hints = flags();
diff --git a/deps/v8/src/objects/js-number-format.cc b/deps/v8/src/objects/js-number-format.cc
index ff564975d6..c065a3f725 100644
--- a/deps/v8/src/objects/js-number-format.cc
+++ b/deps/v8/src/objects/js-number-format.cc
@@ -33,7 +33,6 @@ namespace {
// [[CurrencyDisplay]] is one of the values "code", "symbol", "name",
// or "narrowSymbol" identifying the display of the currency number format.
-// Note: "narrowSymbol" is added in proposal-unified-intl-numberformat
enum class CurrencyDisplay {
CODE,
SYMBOL,
@@ -621,12 +620,11 @@ JSNumberFormat::SetDigitOptionsToFormatter(
result = result.integerWidth(icu::number::IntegerWidth::zeroFillTo(
digit_options.minimum_integer_digits));
}
- if (FLAG_harmony_intl_numberformat_unified) {
- // Value -1 of minimum_significant_digits represent the roundingtype is
- // "compact-rounding".
- if (digit_options.minimum_significant_digits < 0) {
- return result;
- }
+
+ // Value -1 of minimum_significant_digits represent the roundingtype is
+ // "compact-rounding".
+ if (digit_options.minimum_significant_digits < 0) {
+ return result;
}
icu::number::Precision precision =
(digit_options.minimum_significant_digits > 0)
@@ -704,15 +702,12 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
isolate, options, factory->currencyDisplay_string(),
CurrencyDisplayString(isolate, skeleton), Just(kDontThrow))
.FromJust());
- if (FLAG_harmony_intl_numberformat_unified) {
- CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->currencySign_string(),
- CurrencySignString(isolate, skeleton), Just(kDontThrow))
- .FromJust());
- }
+ CHECK(JSReceiver::CreateDataProperty(
+ isolate, options, factory->currencySign_string(),
+ CurrencySignString(isolate, skeleton), Just(kDontThrow))
+ .FromJust());
}
- if (FLAG_harmony_intl_numberformat_unified) {
if (style == JSNumberFormat::Style::UNIT) {
std::string unit = UnitFromSkeleton(skeleton);
if (!unit.empty()) {
@@ -727,7 +722,6 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
UnitDisplayString(isolate, skeleton), Just(kDontThrow))
.FromJust());
}
- }
CHECK(
JSReceiver::CreateDataProperty(
@@ -735,45 +729,25 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
factory->NewNumberFromInt(MinimumIntegerDigitsFromSkeleton(skeleton)),
Just(kDontThrow))
.FromJust());
+
int32_t minimum = 0, maximum = 0;
- bool output_fraction =
- FractionDigitsFromSkeleton(skeleton, &minimum, &maximum);
-
- if (!FLAG_harmony_intl_numberformat_unified && !output_fraction) {
- // Currenct ECMA 402 spec mandate to record (Min|Max)imumFractionDigits
- // uncondictionally while the unified number proposal eventually will only
- // record either (Min|Max)imumFractionDigits or
- // (Min|Max)imumSignaficantDigits Since LocalizedNumberFormatter can only
- // remember one set, and during 2019-1-17 ECMA402 meeting that the committee
- // decide not to take a PR to address that prior to the unified number
- // proposal, we have to add these two 5 bits int into flags to remember the
- // (Min|Max)imumFractionDigits while (Min|Max)imumSignaficantDigits is
- // present.
- // TODO(ftang) remove the following two lines once we ship
- // int-number-format-unified
- output_fraction = true;
- minimum = number_format->minimum_fraction_digits();
- maximum = number_format->maximum_fraction_digits();
- }
- if (output_fraction) {
+ if (SignificantDigitsFromSkeleton(skeleton, &minimum, &maximum)) {
CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->minimumFractionDigits_string(),
+ isolate, options, factory->minimumSignificantDigits_string(),
factory->NewNumberFromInt(minimum), Just(kDontThrow))
.FromJust());
CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->maximumFractionDigits_string(),
+ isolate, options, factory->maximumSignificantDigits_string(),
factory->NewNumberFromInt(maximum), Just(kDontThrow))
.FromJust());
- }
- minimum = 0;
- maximum = 0;
- if (SignificantDigitsFromSkeleton(skeleton, &minimum, &maximum)) {
+ } else {
+ FractionDigitsFromSkeleton(skeleton, &minimum, &maximum);
CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->minimumSignificantDigits_string(),
+ isolate, options, factory->minimumFractionDigits_string(),
factory->NewNumberFromInt(minimum), Just(kDontThrow))
.FromJust());
CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->maximumSignificantDigits_string(),
+ isolate, options, factory->maximumFractionDigits_string(),
factory->NewNumberFromInt(maximum), Just(kDontThrow))
.FromJust());
}
@@ -783,24 +757,22 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions(
factory->ToBoolean(UseGroupingFromSkeleton(skeleton)),
Just(kDontThrow))
.FromJust());
- if (FLAG_harmony_intl_numberformat_unified) {
- Notation notation = NotationFromSkeleton(skeleton);
- CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->notation_string(),
- NotationAsString(isolate, notation), Just(kDontThrow))
- .FromJust());
- // Only output compactDisplay when notation is compact.
- if (notation == Notation::COMPACT) {
- CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->compactDisplay_string(),
- CompactDisplayString(isolate, skeleton), Just(kDontThrow))
- .FromJust());
- }
+ Notation notation = NotationFromSkeleton(skeleton);
+ CHECK(JSReceiver::CreateDataProperty(
+ isolate, options, factory->notation_string(),
+ NotationAsString(isolate, notation), Just(kDontThrow))
+ .FromJust());
+ // Only output compactDisplay when notation is compact.
+ if (notation == Notation::COMPACT) {
CHECK(JSReceiver::CreateDataProperty(
- isolate, options, factory->signDisplay_string(),
- SignDisplayString(isolate, skeleton), Just(kDontThrow))
+ isolate, options, factory->compactDisplay_string(),
+ CompactDisplayString(isolate, skeleton), Just(kDontThrow))
.FromJust());
}
+ CHECK(JSReceiver::CreateDataProperty(
+ isolate, options, factory->signDisplay_string(),
+ SignDisplayString(isolate, skeleton), Just(kDontThrow))
+ .FromJust());
return options;
}
@@ -837,7 +809,8 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::UnwrapNumberFormat(
MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
Handle<Map> map,
Handle<Object> locales,
- Handle<Object> options_obj) {
+ Handle<Object> options_obj,
+ const char* service) {
Factory* factory = isolate->factory();
// 1. Let requestedLocales be ? CanonicalizeLocaleList(locales).
@@ -854,10 +827,9 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
} else {
// 3. Else
// 3. a. Let options be ? ToObject(options).
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, options_obj,
- Object::ToObject(isolate, options_obj, "Intl.NumberFormat"),
- JSNumberFormat);
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, options_obj,
+ Object::ToObject(isolate, options_obj, service),
+ JSNumberFormat);
}
// At this point, options_obj can either be a JSObject or a JSProxy only.
@@ -868,7 +840,7 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
// "lookup", "best fit" », "best fit").
// 6. Set opt.[[localeMatcher]] to matcher.
Maybe<Intl::MatcherOption> maybe_locale_matcher =
- Intl::GetLocaleMatcher(isolate, options, "Intl.NumberFormat");
+ Intl::GetLocaleMatcher(isolate, options, service);
MAYBE_RETURN(maybe_locale_matcher, MaybeHandle<JSNumberFormat>());
Intl::MatcherOption matcher = maybe_locale_matcher.FromJust();
@@ -877,7 +849,7 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
// 7. Let _numberingSystem_ be ? GetOption(_options_, `"numberingSystem"`,
// `"string"`, *undefined*, *undefined*).
Maybe<bool> maybe_numberingSystem = Intl::GetNumberingSystem(
- isolate, options, "Intl.RelativeTimeFormat", &numbering_system_str);
+ isolate, options, service, &numbering_system_str);
// 8. If _numberingSystem_ is not *undefined*, then
// a. If _numberingSystem_ does not match the
// `(3*8alphanum) *("-" (3*8alphanum))` sequence, throw a *RangeError*
@@ -895,7 +867,8 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
requested_locales, matcher, relevant_extension_keys);
UErrorCode status = U_ZERO_ERROR;
- if (numbering_system_str != nullptr) {
+ if (numbering_system_str != nullptr &&
+ Intl::IsValidNumberingSystem(numbering_system_str.get())) {
r.icu_locale.setUnicodeKeywordValue("nu", numbering_system_str.get(),
status);
CHECK(U_SUCCESS(status));
@@ -913,21 +886,15 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
.roundingMode(UNUM_ROUND_HALFUP);
// 12. Let style be ? GetOption(options, "style", "string", « "decimal",
- // "percent", "currency" », "decimal").
- const char* service = "Intl.NumberFormat";
+ // "percent", "currency", "unit" », "decimal").
- std::vector<const char*> style_str_values({"decimal", "percent", "currency"});
- std::vector<JSNumberFormat::Style> style_enum_values(
- {JSNumberFormat::Style::DECIMAL, JSNumberFormat::Style::PERCENT,
- JSNumberFormat::Style::CURRENCY});
- if (FLAG_harmony_intl_numberformat_unified) {
- style_str_values.push_back("unit");
- style_enum_values.push_back(JSNumberFormat::Style::UNIT);
- }
Maybe<JSNumberFormat::Style> maybe_style =
Intl::GetStringOption<JSNumberFormat::Style>(
- isolate, options, "style", service, style_str_values,
- style_enum_values, JSNumberFormat::Style::DECIMAL);
+ isolate, options, "style", service,
+ {"decimal", "percent", "currency", "unit"},
+ {JSNumberFormat::Style::DECIMAL, JSNumberFormat::Style::PERCENT,
+ JSNumberFormat::Style::CURRENCY, JSNumberFormat::Style::UNIT},
+ JSNumberFormat::Style::DECIMAL);
MAYBE_RETURN(maybe_style, MaybeHandle<JSNumberFormat>());
JSNumberFormat::Style style = maybe_style.FromJust();
@@ -977,99 +944,87 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
}
// 18. Let currencyDisplay be ? GetOption(options, "currencyDisplay",
- // "string", « "code", "symbol", "name" », "symbol").
- std::vector<const char*> currency_display_str_values(
- {"code", "symbol", "name"});
- std::vector<CurrencyDisplay> currency_display_enum_values(
- {CurrencyDisplay::CODE, CurrencyDisplay::SYMBOL, CurrencyDisplay::NAME});
- if (FLAG_harmony_intl_numberformat_unified) {
- currency_display_str_values.push_back("narrowSymbol");
- currency_display_enum_values.push_back(CurrencyDisplay::NARROW_SYMBOL);
- }
+ // "string", « "code", "symbol", "name", "narrowSymbol" », "symbol").
Maybe<CurrencyDisplay> maybe_currency_display =
Intl::GetStringOption<CurrencyDisplay>(
isolate, options, "currencyDisplay", service,
- currency_display_str_values, currency_display_enum_values,
+ {"code", "symbol", "name", "narrowSymbol"},
+ {CurrencyDisplay::CODE, CurrencyDisplay::SYMBOL,
+ CurrencyDisplay::NAME, CurrencyDisplay::NARROW_SYMBOL},
CurrencyDisplay::SYMBOL);
MAYBE_RETURN(maybe_currency_display, MaybeHandle<JSNumberFormat>());
CurrencyDisplay currency_display = maybe_currency_display.FromJust();
CurrencySign currency_sign = CurrencySign::STANDARD;
- if (FLAG_harmony_intl_numberformat_unified) {
- // Let currencySign be ? GetOption(options, "currencySign", "string", «
- // "standard", "accounting" », "standard").
- Maybe<CurrencySign> maybe_currency_sign =
- Intl::GetStringOption<CurrencySign>(
- isolate, options, "currencySign", service,
- {"standard", "accounting"},
- {CurrencySign::STANDARD, CurrencySign::ACCOUNTING},
- CurrencySign::STANDARD);
- MAYBE_RETURN(maybe_currency_sign, MaybeHandle<JSNumberFormat>());
- currency_sign = maybe_currency_sign.FromJust();
-
- // Let unit be ? GetOption(options, "unit", "string", undefined, undefined).
- std::unique_ptr<char[]> unit_cstr;
- Maybe<bool> found_unit = Intl::GetStringOption(
- isolate, options, "unit", empty_values, service, &unit_cstr);
- MAYBE_RETURN(found_unit, MaybeHandle<JSNumberFormat>());
-
- std::string unit;
- if (found_unit.FromJust()) {
- DCHECK_NOT_NULL(unit_cstr.get());
- unit = unit_cstr.get();
+ // Let currencySign be ? GetOption(options, "currencySign", "string", «
+ // "standard", "accounting" », "standard").
+ Maybe<CurrencySign> maybe_currency_sign = Intl::GetStringOption<CurrencySign>(
+ isolate, options, "currencySign", service, {"standard", "accounting"},
+ {CurrencySign::STANDARD, CurrencySign::ACCOUNTING},
+ CurrencySign::STANDARD);
+ MAYBE_RETURN(maybe_currency_sign, MaybeHandle<JSNumberFormat>());
+ currency_sign = maybe_currency_sign.FromJust();
+
+ // Let unit be ? GetOption(options, "unit", "string", undefined, undefined).
+ std::unique_ptr<char[]> unit_cstr;
+ Maybe<bool> found_unit = Intl::GetStringOption(
+ isolate, options, "unit", empty_values, service, &unit_cstr);
+ MAYBE_RETURN(found_unit, MaybeHandle<JSNumberFormat>());
+
+ std::string unit;
+ if (found_unit.FromJust()) {
+ DCHECK_NOT_NULL(unit_cstr.get());
+ unit = unit_cstr.get();
+ }
+
+ // Let unitDisplay be ? GetOption(options, "unitDisplay", "string", «
+ // "short", "narrow", "long" », "short").
+ Maybe<UnitDisplay> maybe_unit_display = Intl::GetStringOption<UnitDisplay>(
+ isolate, options, "unitDisplay", service, {"short", "narrow", "long"},
+ {UnitDisplay::SHORT, UnitDisplay::NARROW, UnitDisplay::LONG},
+ UnitDisplay::SHORT);
+ MAYBE_RETURN(maybe_unit_display, MaybeHandle<JSNumberFormat>());
+ UnitDisplay unit_display = maybe_unit_display.FromJust();
+
+ // If style is "unit", then
+ if (style == JSNumberFormat::Style::UNIT) {
+ // If unit is undefined, throw a TypeError exception.
+ if (unit == "") {
+ THROW_NEW_ERROR(isolate,
+ NewTypeError(MessageTemplate::kInvalidUnit,
+ factory->NewStringFromAsciiChecked(service),
+ factory->empty_string()),
+ JSNumberFormat);
}
- // Let unitDisplay be ? GetOption(options, "unitDisplay", "string", «
- // "short", "narrow", "long" », "short").
- Maybe<UnitDisplay> maybe_unit_display = Intl::GetStringOption<UnitDisplay>(
- isolate, options, "unitDisplay", service, {"short", "narrow", "long"},
- {UnitDisplay::SHORT, UnitDisplay::NARROW, UnitDisplay::LONG},
- UnitDisplay::SHORT);
- MAYBE_RETURN(maybe_unit_display, MaybeHandle<JSNumberFormat>());
- UnitDisplay unit_display = maybe_unit_display.FromJust();
-
- // If style is "unit", then
- if (style == JSNumberFormat::Style::UNIT) {
- // If unit is undefined, throw a TypeError exception.
- if (unit == "") {
- THROW_NEW_ERROR(
- isolate,
- NewTypeError(MessageTemplate::kInvalidUnit,
- factory->NewStringFromStaticChars("Intl.NumberFormat"),
- factory->empty_string()),
- JSNumberFormat);
- }
-
- // If the result of IsWellFormedUnitIdentifier(unit) is false, throw a
- // RangeError exception.
- Maybe<std::pair<icu::MeasureUnit, icu::MeasureUnit>> maybe_wellformed =
- IsWellFormedUnitIdentifier(isolate, unit);
- if (maybe_wellformed.IsNothing()) {
- THROW_NEW_ERROR(
- isolate,
- NewRangeError(
- MessageTemplate::kInvalidUnit,
- factory->NewStringFromStaticChars("Intl.NumberFormat"),
- factory->NewStringFromAsciiChecked(unit.c_str())),
- JSNumberFormat);
- }
- std::pair<icu::MeasureUnit, icu::MeasureUnit> unit_pair =
- maybe_wellformed.FromJust();
+ // If the result of IsWellFormedUnitIdentifier(unit) is false, throw a
+ // RangeError exception.
+ Maybe<std::pair<icu::MeasureUnit, icu::MeasureUnit>> maybe_wellformed =
+ IsWellFormedUnitIdentifier(isolate, unit);
+ if (maybe_wellformed.IsNothing()) {
+ THROW_NEW_ERROR(
+ isolate,
+ NewRangeError(MessageTemplate::kInvalidUnit,
+ factory->NewStringFromAsciiChecked(service),
+ factory->NewStringFromAsciiChecked(unit.c_str())),
+ JSNumberFormat);
+ }
+ std::pair<icu::MeasureUnit, icu::MeasureUnit> unit_pair =
+ maybe_wellformed.FromJust();
- // Set intlObj.[[Unit]] to unit.
- if (unit_pair.first != icu::NoUnit::base()) {
- icu_number_formatter = icu_number_formatter.unit(unit_pair.first);
- }
- if (unit_pair.second != icu::NoUnit::base()) {
- icu_number_formatter = icu_number_formatter.perUnit(unit_pair.second);
- }
+ // Set intlObj.[[Unit]] to unit.
+ if (unit_pair.first != icu::NoUnit::base()) {
+ icu_number_formatter = icu_number_formatter.unit(unit_pair.first);
+ }
+ if (unit_pair.second != icu::NoUnit::base()) {
+ icu_number_formatter = icu_number_formatter.perUnit(unit_pair.second);
+ }
- // The default unitWidth is SHORT in ICU and that mapped from
- // Symbol so we can skip the setting for optimization.
- if (unit_display != UnitDisplay::SHORT) {
- icu_number_formatter =
- icu_number_formatter.unitWidth(ToUNumberUnitWidth(unit_display));
- }
+ // The default unitWidth is SHORT in ICU and that mapped from
+ // Symbol so we can skip the setting for optimization.
+ if (unit_display != UnitDisplay::SHORT) {
+ icu_number_formatter =
+ icu_number_formatter.unitWidth(ToUNumberUnitWidth(unit_display));
}
}
@@ -1125,18 +1080,16 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
}
Notation notation = Notation::STANDARD;
- if (FLAG_harmony_intl_numberformat_unified) {
- // 25. Let notation be ? GetOption(options, "notation", "string", «
- // "standard", "scientific", "engineering", "compact" », "standard").
- Maybe<Notation> maybe_notation = Intl::GetStringOption<Notation>(
- isolate, options, "notation", service,
- {"standard", "scientific", "engineering", "compact"},
- {Notation::STANDARD, Notation::SCIENTIFIC, Notation::ENGINEERING,
- Notation::COMPACT},
- Notation::STANDARD);
- MAYBE_RETURN(maybe_notation, MaybeHandle<JSNumberFormat>());
- notation = maybe_notation.FromJust();
- }
+ // 25. Let notation be ? GetOption(options, "notation", "string", «
+ // "standard", "scientific", "engineering", "compact" », "standard").
+ Maybe<Notation> maybe_notation = Intl::GetStringOption<Notation>(
+ isolate, options, "notation", service,
+ {"standard", "scientific", "engineering", "compact"},
+ {Notation::STANDARD, Notation::SCIENTIFIC, Notation::ENGINEERING,
+ Notation::COMPACT},
+ Notation::STANDARD);
+ MAYBE_RETURN(maybe_notation, MaybeHandle<JSNumberFormat>());
+ notation = maybe_notation.FromJust();
// 27. Perform ? SetNumberFormatDigitOptions(numberFormat, options,
// mnfdDefault, mxfdDefault).
@@ -1149,24 +1102,21 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
icu_number_formatter = JSNumberFormat::SetDigitOptionsToFormatter(
icu_number_formatter, digit_options);
- if (FLAG_harmony_intl_numberformat_unified) {
- // 28. Let compactDisplay be ? GetOption(options, "compactDisplay",
- // "string", « "short", "long" », "short").
- Maybe<CompactDisplay> maybe_compact_display =
- Intl::GetStringOption<CompactDisplay>(
- isolate, options, "compactDisplay", service, {"short", "long"},
- {CompactDisplay::SHORT, CompactDisplay::LONG},
- CompactDisplay::SHORT);
- MAYBE_RETURN(maybe_compact_display, MaybeHandle<JSNumberFormat>());
- CompactDisplay compact_display = maybe_compact_display.FromJust();
-
- // 26. Set numberFormat.[[Notation]] to notation.
- // The default notation in ICU is Simple, which mapped from STANDARD
- // so we can skip setting it.
- if (notation != Notation::STANDARD) {
- icu_number_formatter = icu_number_formatter.notation(
- ToICUNotation(notation, compact_display));
- }
+ // 28. Let compactDisplay be ? GetOption(options, "compactDisplay",
+ // "string", « "short", "long" », "short").
+ Maybe<CompactDisplay> maybe_compact_display =
+ Intl::GetStringOption<CompactDisplay>(
+ isolate, options, "compactDisplay", service, {"short", "long"},
+ {CompactDisplay::SHORT, CompactDisplay::LONG}, CompactDisplay::SHORT);
+ MAYBE_RETURN(maybe_compact_display, MaybeHandle<JSNumberFormat>());
+ CompactDisplay compact_display = maybe_compact_display.FromJust();
+
+ // 26. Set numberFormat.[[Notation]] to notation.
+ // The default notation in ICU is Simple, which mapped from STANDARD
+ // so we can skip setting it.
+ if (notation != Notation::STANDARD) {
+ icu_number_formatter =
+ icu_number_formatter.notation(ToICUNotation(notation, compact_display));
}
// 30. Let useGrouping be ? GetOption(options, "useGrouping", "boolean",
// undefined, true).
@@ -1180,27 +1130,25 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
UNumberGroupingStrategy::UNUM_GROUPING_OFF);
}
- if (FLAG_harmony_intl_numberformat_unified) {
- // 32. Let signDisplay be ? GetOption(options, "signDisplay", "string", «
- // "auto", "never", "always", "exceptZero" », "auto").
- Maybe<SignDisplay> maybe_sign_display = Intl::GetStringOption<SignDisplay>(
- isolate, options, "signDisplay", service,
- {"auto", "never", "always", "exceptZero"},
- {SignDisplay::AUTO, SignDisplay::NEVER, SignDisplay::ALWAYS,
- SignDisplay::EXCEPT_ZERO},
- SignDisplay::AUTO);
- MAYBE_RETURN(maybe_sign_display, MaybeHandle<JSNumberFormat>());
- SignDisplay sign_display = maybe_sign_display.FromJust();
-
- // 33. Set numberFormat.[[SignDisplay]] to signDisplay.
- // The default sign in ICU is UNUM_SIGN_AUTO which is mapped from
- // SignDisplay::AUTO and CurrencySign::STANDARD so we can skip setting
- // under that values for optimization.
- if (sign_display != SignDisplay::AUTO ||
- currency_sign != CurrencySign::STANDARD) {
- icu_number_formatter = icu_number_formatter.sign(
- ToUNumberSignDisplay(sign_display, currency_sign));
- }
+ // 32. Let signDisplay be ? GetOption(options, "signDisplay", "string", «
+ // "auto", "never", "always", "exceptZero" », "auto").
+ Maybe<SignDisplay> maybe_sign_display = Intl::GetStringOption<SignDisplay>(
+ isolate, options, "signDisplay", service,
+ {"auto", "never", "always", "exceptZero"},
+ {SignDisplay::AUTO, SignDisplay::NEVER, SignDisplay::ALWAYS,
+ SignDisplay::EXCEPT_ZERO},
+ SignDisplay::AUTO);
+ MAYBE_RETURN(maybe_sign_display, MaybeHandle<JSNumberFormat>());
+ SignDisplay sign_display = maybe_sign_display.FromJust();
+
+ // 33. Set numberFormat.[[SignDisplay]] to signDisplay.
+ // The default sign in ICU is UNUM_SIGN_AUTO which is mapped from
+ // SignDisplay::AUTO and CurrencySign::STANDARD so we can skip setting
+ // under that values for optimization.
+ if (sign_display != SignDisplay::AUTO ||
+ currency_sign != CurrencySign::STANDARD) {
+ icu_number_formatter = icu_number_formatter.sign(
+ ToUNumberSignDisplay(sign_display, currency_sign));
}
// 25. Let dataLocaleData be localeData.[[<dataLocale>]].
@@ -1231,24 +1179,6 @@ MaybeHandle<JSNumberFormat> JSNumberFormat::New(Isolate* isolate,
number_format->set_style(style);
number_format->set_locale(*locale_str);
- if (digit_options.minimum_significant_digits > 0) {
- // The current ECMA 402 spec mandates recording (Min|Max)imumFractionDigits
- // unconditionally, while the unified number proposal eventually will only
- // record either (Min|Max)imumFractionDigits or
- // (Min|Max)imumSignificantDigits. Since LocalizedNumberFormatter can only
- // remember one set, and during 2019-1-17 ECMA402 meeting the committee
- // decided not to take a PR to address that prior to the unified number
- // proposal, we have to add these two 5-bit ints into flags to remember the
- // (Min|Max)imumFractionDigits while (Min|Max)imumSignificantDigits is
- // present.
- // TODO(ftang) remove the following two lines once we ship
- // int-number-format-unified
- number_format->set_minimum_fraction_digits(
- digit_options.minimum_fraction_digits);
- number_format->set_maximum_fraction_digits(
- digit_options.maximum_fraction_digits);
- }
-
number_format->set_icu_number_formatter(*managed_number_formatter);
number_format->set_bound_format(*factory->undefined_value());
diff --git a/deps/v8/src/objects/js-number-format.h b/deps/v8/src/objects/js-number-format.h
index 2979ab10f4..a5196f8d51 100644
--- a/deps/v8/src/objects/js-number-format.h
+++ b/deps/v8/src/objects/js-number-format.h
@@ -36,7 +36,7 @@ class JSNumberFormat : public JSObject {
// ecma402/#sec-initializenumberformat
V8_WARN_UNUSED_RESULT static MaybeHandle<JSNumberFormat> New(
Isolate* isolate, Handle<Map> map, Handle<Object> locales,
- Handle<Object> options);
+ Handle<Object> options, const char* service);
// ecma402/#sec-unwrapnumberformat
V8_WARN_UNUSED_RESULT static MaybeHandle<JSNumberFormat> UnwrapNumberFormat(
@@ -72,26 +72,6 @@ class JSNumberFormat : public JSObject {
DECL_PRINTER(JSNumberFormat)
DECL_VERIFIER(JSNumberFormat)
- // Current ECMA 402 spec mandates to record (Min|Max)imumFractionDigits
- // unconditionally while the unified number proposal eventually will only
- // record either (Min|Max)imumFractionDigits or (Min|Max)imumSignaficantDigits
- // Since LocalizedNumberFormatter can only remember one set, and during
- // 2019-1-17 ECMA402 meeting that the committee decide not to take a PR to
- // address that prior to the unified number proposal, we have to add these two
- // 5 bits int into flags to remember the (Min|Max)imumFractionDigits while
- // (Min|Max)imumSignaficantDigits is present.
- // TODO(ftang) remove the following once we ship int-number-format-unified
- // * Four inline functions: (set_)?(min|max)imum_fraction_digits
- // * kFlagsOffset
- // * #define FLAGS_BIT_FIELDS
- // * DECL_INT_ACCESSORS(flags)
-
- inline int minimum_fraction_digits() const;
- inline void set_minimum_fraction_digits(int digits);
-
- inline int maximum_fraction_digits() const;
- inline void set_maximum_fraction_digits(int digits);
-
// [[Style]] is one of the values "decimal", "percent", "currency",
// or "unit" identifying the style of the number format.
// Note: "unit" is added in proposal-unified-intl-numberformat
@@ -102,19 +82,15 @@ class JSNumberFormat : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSNUMBER_FORMAT_FIELDS)
+ TORQUE_GENERATED_JS_NUMBER_FORMAT_FIELDS)
// Bit positions in |flags|.
#define FLAGS_BIT_FIELDS(V, _) \
- V(MinimumFractionDigitsBits, int, 5, _) \
- V(MaximumFractionDigitsBits, int, 5, _) \
V(StyleBits, Style, 2, _)
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
- STATIC_ASSERT(20 <= MinimumFractionDigitsBits::kMax);
- STATIC_ASSERT(20 <= MaximumFractionDigitsBits::kMax);
STATIC_ASSERT(Style::DECIMAL <= StyleBits::kMax);
STATIC_ASSERT(Style::PERCENT <= StyleBits::kMax);
STATIC_ASSERT(Style::CURRENCY <= StyleBits::kMax);
diff --git a/deps/v8/src/objects/js-objects-inl.h b/deps/v8/src/objects/js-objects-inl.h
index f8fe069d3d..a6b9e9ad83 100644
--- a/deps/v8/src/objects/js-objects-inl.h
+++ b/deps/v8/src/objects/js-objects-inl.h
@@ -31,16 +31,19 @@ namespace internal {
OBJECT_CONSTRUCTORS_IMPL(JSReceiver, HeapObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSObject)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSCustomElementsObject)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSSpecialObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSAsyncFromSyncIterator)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSDate)
-OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSObject)
-OBJECT_CONSTRUCTORS_IMPL(JSGlobalObject, JSObject)
+OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSFunctionOrBoundFunction)
+OBJECT_CONSTRUCTORS_IMPL(JSGlobalObject, JSSpecialObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSGlobalProxy)
JSIteratorResult::JSIteratorResult(Address ptr) : JSObject(ptr) {}
OBJECT_CONSTRUCTORS_IMPL(JSMessageObject, JSObject)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSPrimitiveWrapper)
-OBJECT_CONSTRUCTORS_IMPL(JSStringIterator, JSObject)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSStringIterator)
NEVER_READ_ONLY_SPACE_IMPL(JSReceiver)
@@ -49,7 +52,6 @@ CAST_ACCESSOR(JSGlobalObject)
CAST_ACCESSOR(JSIteratorResult)
CAST_ACCESSOR(JSMessageObject)
CAST_ACCESSOR(JSReceiver)
-CAST_ACCESSOR(JSStringIterator)
MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
Handle<JSReceiver> receiver,
@@ -375,7 +377,7 @@ void JSObject::FastPropertyAtPut(FieldIndex index, Object value) {
}
}
-void JSObject::WriteToField(int descriptor, PropertyDetails details,
+void JSObject::WriteToField(InternalIndex descriptor, PropertyDetails details,
Object value) {
DCHECK_EQ(kField, details.location());
DCHECK_EQ(kData, details.kind());
@@ -540,7 +542,9 @@ Code JSFunction::code() const {
void JSFunction::set_code(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
+#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrier(*this, RawField(kCodeOffset), value);
+#endif
}
void JSFunction::set_code_no_write_barrier(Code value) {
@@ -1007,8 +1011,7 @@ inline int JSGlobalProxy::SizeWithEmbedderFields(int embedder_field_count) {
ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
-ACCESSORS(JSStringIterator, string, String, kStringOffset)
-SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
+TQ_SMI_ACCESSORS(JSStringIterator, index)
// If the fast-case backing storage takes up much more memory than a dictionary
// backing storage would, the object should have slow elements.
diff --git a/deps/v8/src/objects/js-objects.cc b/deps/v8/src/objects/js-objects.cc
index 3666f5afbe..ea0917f18f 100644
--- a/deps/v8/src/objects/js-objects.cc
+++ b/deps/v8/src/objects/js-objects.cc
@@ -216,15 +216,19 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
}
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
- int length = map->NumberOfOwnDescriptors();
bool stable = true;
- for (int i = 0; i < length; i++) {
+ for (InternalIndex i : map->IterateOwnDescriptors()) {
+ HandleScope inner_scope(isolate);
+
Handle<Name> next_key(descriptors->GetKey(i), isolate);
Handle<Object> prop_value;
// Directly decode from the descriptor array if |from| did not change shape.
if (stable) {
+ DCHECK_EQ(from->map(), *map);
+ DCHECK_EQ(*descriptors, map->instance_descriptors());
+
PropertyDetails details = descriptors->GetDetails(i);
if (!details.IsEnumerable()) continue;
if (details.kind() == kData) {
@@ -232,7 +236,8 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
prop_value = handle(descriptors->GetStrongValue(i), isolate);
} else {
Representation representation = details.representation();
- FieldIndex index = FieldIndex::ForDescriptor(*map, i);
+ FieldIndex index = FieldIndex::ForPropertyIndex(
+ *map, details.field_index(), representation);
prop_value = JSObject::FastPropertyAt(from, representation, index);
}
} else {
@@ -240,6 +245,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
isolate, prop_value,
JSReceiver::GetProperty(isolate, from, next_key), Nothing<bool>());
stable = from->map() == *map;
+ *descriptors.location() = map->instance_descriptors().ptr();
}
} else {
// If the map did change, do a slower lookup. We are still guaranteed that
@@ -260,7 +266,10 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
Object::SetProperty(&it, prop_value, StoreOrigin::kNamed,
Just(ShouldThrow::kThrowOnError));
if (result.IsNothing()) return result;
- if (stable) stable = from->map() == *map;
+ if (stable) {
+ stable = from->map() == *map;
+ *descriptors.location() = map->instance_descriptors().ptr();
+ }
} else {
if (excluded_properties != nullptr &&
HasExcludedProperty(excluded_properties, next_key)) {
@@ -1094,8 +1103,7 @@ Maybe<bool> SetPropertyWithInterceptorInternal(
Maybe<bool> DefinePropertyWithInterceptorInternal(
LookupIterator* it, Handle<InterceptorInfo> interceptor,
- Maybe<ShouldThrow> should_throw,
- PropertyDescriptor& desc) { // NOLINT(runtime/references)
+ Maybe<ShouldThrow> should_throw, PropertyDescriptor* desc) {
Isolate* isolate = it->isolate();
// Make sure that the top context does not change when doing callbacks or
// interceptor calls.
@@ -1116,23 +1124,23 @@ Maybe<bool> DefinePropertyWithInterceptorInternal(
std::unique_ptr<v8::PropertyDescriptor> descriptor(
new v8::PropertyDescriptor());
- if (PropertyDescriptor::IsAccessorDescriptor(&desc)) {
+ if (PropertyDescriptor::IsAccessorDescriptor(desc)) {
descriptor.reset(new v8::PropertyDescriptor(
- v8::Utils::ToLocal(desc.get()), v8::Utils::ToLocal(desc.set())));
- } else if (PropertyDescriptor::IsDataDescriptor(&desc)) {
- if (desc.has_writable()) {
+ v8::Utils::ToLocal(desc->get()), v8::Utils::ToLocal(desc->set())));
+ } else if (PropertyDescriptor::IsDataDescriptor(desc)) {
+ if (desc->has_writable()) {
descriptor.reset(new v8::PropertyDescriptor(
- v8::Utils::ToLocal(desc.value()), desc.writable()));
+ v8::Utils::ToLocal(desc->value()), desc->writable()));
} else {
descriptor.reset(
- new v8::PropertyDescriptor(v8::Utils::ToLocal(desc.value())));
+ new v8::PropertyDescriptor(v8::Utils::ToLocal(desc->value())));
}
}
- if (desc.has_enumerable()) {
- descriptor->set_enumerable(desc.enumerable());
+ if (desc->has_enumerable()) {
+ descriptor->set_enumerable(desc->enumerable());
}
- if (desc.has_configurable()) {
- descriptor->set_configurable(desc.configurable());
+ if (desc->has_configurable()) {
+ descriptor->set_configurable(desc->configurable());
}
if (it->IsElement()) {
@@ -1166,7 +1174,7 @@ Maybe<bool> JSReceiver::OrdinaryDefineOwnProperty(
if (it->state() == LookupIterator::INTERCEPTOR) {
if (it->HolderIsReceiverOrHiddenPrototype()) {
Maybe<bool> result = DefinePropertyWithInterceptorInternal(
- it, it->GetInterceptor(), should_throw, *desc);
+ it, it->GetInterceptor(), should_throw, desc);
if (result.IsNothing() || result.FromJust()) {
return result;
}
@@ -1834,8 +1842,8 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
if (!map->OnlyHasSimpleProperties()) return Just(false);
Handle<JSObject> object(JSObject::cast(*receiver), isolate);
-
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
+
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
int number_of_own_elements =
object->GetElementsAccessor()->GetCapacity(*object, object->elements());
@@ -1857,15 +1865,25 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
Nothing<bool>());
}
- bool stable = object->map() == *map;
+ // We may have already lost stability, if CollectValuesOrEntries had
+ // side-effects.
+ bool stable = *map == object->map();
+ if (stable) {
+ *descriptors.location() = map->instance_descriptors().ptr();
+ }
+
+ for (InternalIndex index : InternalIndex::Range(number_of_own_descriptors)) {
+ HandleScope inner_scope(isolate);
- for (int index = 0; index < number_of_own_descriptors; index++) {
Handle<Name> next_key(descriptors->GetKey(index), isolate);
if (!next_key->IsString()) continue;
Handle<Object> prop_value;
// Directly decode from the descriptor array if |from| did not change shape.
if (stable) {
+ DCHECK_EQ(object->map(), *map);
+ DCHECK_EQ(*descriptors, map->instance_descriptors());
+
PropertyDetails details = descriptors->GetDetails(index);
if (!details.IsEnumerable()) continue;
if (details.kind() == kData) {
@@ -1873,7 +1891,8 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
prop_value = handle(descriptors->GetStrongValue(index), isolate);
} else {
Representation representation = details.representation();
- FieldIndex field_index = FieldIndex::ForDescriptor(*map, index);
+ FieldIndex field_index = FieldIndex::ForPropertyIndex(
+ *map, details.field_index(), representation);
prop_value =
JSObject::FastPropertyAt(object, representation, field_index);
}
@@ -1883,6 +1902,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
JSReceiver::GetProperty(isolate, object, next_key),
Nothing<bool>());
stable = object->map() == *map;
+ *descriptors.location() = map->instance_descriptors().ptr();
}
} else {
// If the map did change, do a slower lookup. We are still guaranteed that
@@ -2121,15 +2141,15 @@ int JSObject::GetHeaderSize(InstanceType type,
return JSWeakSet::kSize;
case JS_PROMISE_TYPE:
return JSPromise::kSize;
- case JS_REGEXP_TYPE:
+ case JS_REG_EXP_TYPE:
return JSRegExp::kSize;
- case JS_REGEXP_STRING_ITERATOR_TYPE:
+ case JS_REG_EXP_STRING_ITERATOR_TYPE:
return JSRegExpStringIterator::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
return JSObject::kHeaderSize;
case JS_MESSAGE_OBJECT_TYPE:
return JSMessageObject::kSize;
- case JS_ARGUMENTS_TYPE:
+ case JS_ARGUMENTS_OBJECT_TYPE:
return JSObject::kHeaderSize;
case JS_ERROR_TYPE:
return JSObject::kHeaderSize;
@@ -2138,38 +2158,38 @@ int JSObject::GetHeaderSize(InstanceType type,
case JS_MODULE_NAMESPACE_TYPE:
return JSModuleNamespace::kHeaderSize;
#ifdef V8_INTL_SUPPORT
- case JS_INTL_V8_BREAK_ITERATOR_TYPE:
+ case JS_V8_BREAK_ITERATOR_TYPE:
return JSV8BreakIterator::kSize;
- case JS_INTL_COLLATOR_TYPE:
+ case JS_COLLATOR_TYPE:
return JSCollator::kSize;
- case JS_INTL_DATE_TIME_FORMAT_TYPE:
+ case JS_DATE_TIME_FORMAT_TYPE:
return JSDateTimeFormat::kSize;
- case JS_INTL_LIST_FORMAT_TYPE:
+ case JS_LIST_FORMAT_TYPE:
return JSListFormat::kSize;
- case JS_INTL_LOCALE_TYPE:
+ case JS_LOCALE_TYPE:
return JSLocale::kSize;
- case JS_INTL_NUMBER_FORMAT_TYPE:
+ case JS_NUMBER_FORMAT_TYPE:
return JSNumberFormat::kSize;
- case JS_INTL_PLURAL_RULES_TYPE:
+ case JS_PLURAL_RULES_TYPE:
return JSPluralRules::kSize;
- case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
+ case JS_RELATIVE_TIME_FORMAT_TYPE:
return JSRelativeTimeFormat::kSize;
- case JS_INTL_SEGMENT_ITERATOR_TYPE:
+ case JS_SEGMENT_ITERATOR_TYPE:
return JSSegmentIterator::kSize;
- case JS_INTL_SEGMENTER_TYPE:
+ case JS_SEGMENTER_TYPE:
return JSSegmenter::kSize;
#endif // V8_INTL_SUPPORT
- case WASM_GLOBAL_TYPE:
+ case WASM_GLOBAL_OBJECT_TYPE:
return WasmGlobalObject::kSize;
- case WASM_INSTANCE_TYPE:
+ case WASM_INSTANCE_OBJECT_TYPE:
return WasmInstanceObject::kSize;
- case WASM_MEMORY_TYPE:
+ case WASM_MEMORY_OBJECT_TYPE:
return WasmMemoryObject::kSize;
- case WASM_MODULE_TYPE:
+ case WASM_MODULE_OBJECT_TYPE:
return WasmModuleObject::kSize;
- case WASM_TABLE_TYPE:
+ case WASM_TABLE_OBJECT_TYPE:
return WasmTableObject::kSize;
- case WASM_EXCEPTION_TYPE:
+ case WASM_EXCEPTION_OBJECT_TYPE:
return WasmExceptionObject::kSize;
default:
UNREACHABLE();
@@ -2377,7 +2397,7 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) {
accumulator->Add("<JSWeakSet>");
break;
}
- case JS_REGEXP_TYPE: {
+ case JS_REG_EXP_TYPE: {
accumulator->Add("<JSRegExp");
JSRegExp regexp = JSRegExp::cast(*this);
if (regexp.source().IsString()) {
@@ -2506,7 +2526,7 @@ void JSObject::PrintInstanceMigration(FILE* file, Map original_map,
PrintF(file, "[migrating]");
DescriptorArray o = original_map.instance_descriptors();
DescriptorArray n = new_map.instance_descriptors();
- for (int i = 0; i < original_map.NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : original_map.IterateOwnDescriptors()) {
Representation o_r = o.GetDetails(i).representation();
Representation n_r = n.GetDetails(i).representation();
if (!o_r.Equals(n_r)) {
@@ -2703,7 +2723,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// number of properties.
DCHECK(old_nof <= new_nof);
- for (int i = 0; i < old_nof; i++) {
+ for (InternalIndex i : InternalIndex::Range(old_nof)) {
PropertyDetails details = new_descriptors->GetDetails(i);
if (details.location() != kField) continue;
DCHECK_EQ(kData, details.kind());
@@ -2753,7 +2773,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
}
}
- for (int i = old_nof; i < new_nof; i++) {
+ for (InternalIndex i : InternalIndex::Range(old_nof, new_nof)) {
PropertyDetails details = new_descriptors->GetDetails(i);
if (details.location() != kField) continue;
DCHECK_EQ(kData, details.kind());
@@ -2776,9 +2796,10 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
Heap* heap = isolate->heap();
- int old_instance_size = old_map->instance_size();
-
- heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
+ // Invalidate slots manually later in case of tagged to untagged translation.
+ // In all other cases the recorded slot remains dereferenceable.
+ heap->NotifyObjectLayoutChange(*object, no_allocation,
+ InvalidateRecordedSlots::kNo);
// Copy (real) inobject properties. If necessary, stop at number_of_fields to
// avoid overwriting |one_pointer_filler_map|.
@@ -2795,7 +2816,8 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
index, HeapNumber::cast(value).value_as_bits());
if (i < old_number_of_fields && !old_map->IsUnboxedDoubleField(index)) {
// Transition from tagged to untagged slot.
- heap->ClearRecordedSlot(*object, object->RawField(index.offset()));
+ MemoryChunk* chunk = MemoryChunk::FromHeapObject(*object);
+ chunk->InvalidateRecordedSlots(*object);
} else {
#ifdef DEBUG
heap->VerifyClearedSlot(*object, object->RawField(index.offset()));
@@ -2809,6 +2831,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
object->SetProperties(*array);
// Create filler object past the new instance size.
+ int old_instance_size = old_map->instance_size();
int new_instance_size = new_map->instance_size();
int instance_size_delta = old_instance_size - new_instance_size;
DCHECK_GE(instance_size_delta, 0);
@@ -2851,7 +2874,7 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
NameDictionary::New(isolate, property_count);
Handle<DescriptorArray> descs(map->instance_descriptors(isolate), isolate);
- for (int i = 0; i < real_size; i++) {
+ for (InternalIndex i : InternalIndex::Range(real_size)) {
PropertyDetails details = descs->GetDetails(i);
Handle<Name> key(descs->GetKey(isolate, i), isolate);
Handle<Object> value;
@@ -2891,10 +2914,15 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
DisallowHeapAllocation no_allocation;
Heap* heap = isolate->heap();
- int old_instance_size = map->instance_size();
- heap->NotifyObjectLayoutChange(*object, old_instance_size, no_allocation);
+
+ // Invalidate slots manually later in case the new map has in-object
+ // properties. If not, it is not possible to store an untagged value
+ // in a recorded slot.
+ heap->NotifyObjectLayoutChange(*object, no_allocation,
+ InvalidateRecordedSlots::kNo);
// Resize the object in the heap if necessary.
+ int old_instance_size = map->instance_size();
int new_instance_size = new_map->instance_size();
int instance_size_delta = old_instance_size - new_instance_size;
DCHECK_GE(instance_size_delta, 0);
@@ -2914,10 +2942,8 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
// garbage.
int inobject_properties = new_map->GetInObjectProperties();
if (inobject_properties) {
- Heap* heap = isolate->heap();
- heap->ClearRecordedSlotRange(
- object->address() + map->GetInObjectPropertyOffset(0),
- object->address() + new_instance_size);
+ MemoryChunk* chunk = MemoryChunk::FromHeapObject(*object);
+ chunk->InvalidateRecordedSlots(*object);
for (int i = 0; i < inobject_properties; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
@@ -3047,7 +3073,7 @@ void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
Handle<PropertyArray> array =
isolate->factory()->NewPropertyArray(external);
- for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : map->IterateOwnDescriptors()) {
PropertyDetails details = descriptors->GetDetails(i);
Representation representation = details.representation();
if (!representation.IsDouble()) continue;
@@ -3344,8 +3370,8 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
}
// Allocate the instance descriptor.
- Handle<DescriptorArray> descriptors = DescriptorArray::Allocate(
- isolate, instance_descriptor_length, 0, AllocationType::kOld);
+ Handle<DescriptorArray> descriptors =
+ DescriptorArray::Allocate(isolate, instance_descriptor_length, 0);
int number_of_allocated_fields =
number_of_fields + unused_property_fields - inobject_props;
@@ -3410,7 +3436,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
}
current_offset += details.field_width_in_words();
}
- descriptors->Set(i, &d);
+ descriptors->Set(InternalIndex(i), &d);
}
DCHECK(current_offset == number_of_fields);
@@ -3441,6 +3467,8 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
}
void JSObject::RequireSlowElements(NumberDictionary dictionary) {
+ DCHECK_NE(dictionary,
+ ReadOnlyRoots(GetIsolate()).empty_slow_element_dictionary());
if (dictionary.requires_slow_elements()) return;
dictionary.set_requires_slow_elements();
if (map().is_prototype_map()) {
@@ -3603,8 +3631,7 @@ bool TestFastPropertiesIntegrityLevel(Map map, PropertyAttributes level) {
DCHECK(!map.is_dictionary_map());
DescriptorArray descriptors = map.instance_descriptors();
- int number_of_own_descriptors = map.NumberOfOwnDescriptors();
- for (int i = 0; i < number_of_own_descriptors; i++) {
+ for (InternalIndex i : map.IterateOwnDescriptors()) {
if (descriptors.GetKey(i).IsPrivate()) continue;
PropertyDetails details = descriptors.GetDetails(i);
if (details.IsConfigurable()) return false;
@@ -3709,7 +3736,9 @@ Maybe<bool> JSObject::PreventExtensions(Handle<JSObject> object,
object->HasSlowArgumentsElements());
// Make sure that we never go back to fast case.
- object->RequireSlowElements(*dictionary);
+ if (*dictionary != ReadOnlyRoots(isolate).empty_slow_element_dictionary()) {
+ object->RequireSlowElements(*dictionary);
+ }
}
// Do a map transition, other objects with this map may still
@@ -4136,10 +4165,9 @@ MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
Object JSObject::SlowReverseLookup(Object value) {
if (HasFastProperties()) {
- int number_of_own_descriptors = map().NumberOfOwnDescriptors();
DescriptorArray descs = map().instance_descriptors();
bool value_is_number = value.IsNumber();
- for (int i = 0; i < number_of_own_descriptors; i++) {
+ for (InternalIndex i : map().IterateOwnDescriptors()) {
PropertyDetails details = descs.GetDetails(i);
if (details.location() == kField) {
DCHECK_EQ(kData, details.kind());
@@ -5187,16 +5215,16 @@ bool CanSubclassHaveInobjectProperties(InstanceType instance_type) {
case JS_FUNCTION_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
#ifdef V8_INTL_SUPPORT
- case JS_INTL_COLLATOR_TYPE:
- case JS_INTL_DATE_TIME_FORMAT_TYPE:
- case JS_INTL_LIST_FORMAT_TYPE:
- case JS_INTL_LOCALE_TYPE:
- case JS_INTL_NUMBER_FORMAT_TYPE:
- case JS_INTL_PLURAL_RULES_TYPE:
- case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
- case JS_INTL_SEGMENT_ITERATOR_TYPE:
- case JS_INTL_SEGMENTER_TYPE:
- case JS_INTL_V8_BREAK_ITERATOR_TYPE:
+ case JS_COLLATOR_TYPE:
+ case JS_DATE_TIME_FORMAT_TYPE:
+ case JS_LIST_FORMAT_TYPE:
+ case JS_LOCALE_TYPE:
+ case JS_NUMBER_FORMAT_TYPE:
+ case JS_PLURAL_RULES_TYPE:
+ case JS_RELATIVE_TIME_FORMAT_TYPE:
+ case JS_SEGMENT_ITERATOR_TYPE:
+ case JS_SEGMENTER_TYPE:
+ case JS_V8_BREAK_ITERATOR_TYPE:
#endif
case JS_ASYNC_FUNCTION_OBJECT_TYPE:
case JS_ASYNC_GENERATOR_OBJECT_TYPE:
@@ -5205,9 +5233,9 @@ bool CanSubclassHaveInobjectProperties(InstanceType instance_type) {
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
case JS_FINALIZATION_GROUP_TYPE:
- case JS_ARGUMENTS_TYPE:
+ case JS_ARGUMENTS_OBJECT_TYPE:
case JS_PROMISE_TYPE:
- case JS_REGEXP_TYPE:
+ case JS_REG_EXP_TYPE:
case JS_SET_TYPE:
case JS_SPECIAL_API_OBJECT_TYPE:
case JS_TYPED_ARRAY_TYPE:
@@ -5215,11 +5243,11 @@ bool CanSubclassHaveInobjectProperties(InstanceType instance_type) {
case JS_WEAK_MAP_TYPE:
case JS_WEAK_REF_TYPE:
case JS_WEAK_SET_TYPE:
- case WASM_GLOBAL_TYPE:
- case WASM_INSTANCE_TYPE:
- case WASM_MEMORY_TYPE:
- case WASM_MODULE_TYPE:
- case WASM_TABLE_TYPE:
+ case WASM_GLOBAL_OBJECT_TYPE:
+ case WASM_INSTANCE_OBJECT_TYPE:
+ case WASM_MEMORY_OBJECT_TYPE:
+ case WASM_MODULE_OBJECT_TYPE:
+ case WASM_TABLE_OBJECT_TYPE:
return true;
case BIGINT_TYPE:
diff --git a/deps/v8/src/objects/js-objects.h b/deps/v8/src/objects/js-objects.h
index a9510642f1..f38cbe16e6 100644
--- a/deps/v8/src/objects/js-objects.h
+++ b/deps/v8/src/objects/js-objects.h
@@ -6,6 +6,8 @@
#define V8_OBJECTS_JS_OBJECTS_H_
#include "src/objects/embedder-data-slot.h"
+// TODO(jkummerow): Consider forward-declaring instead.
+#include "src/objects/internal-index.h"
#include "src/objects/objects.h"
#include "src/objects/property-array.h"
#include "torque-generated/class-definitions-tq.h"
@@ -264,7 +266,7 @@ class JSReceiver : public HeapObject {
static const int kHashMask = PropertyArray::HashField::kMask;
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- TORQUE_GENERATED_JSRECEIVER_FIELDS)
+ TORQUE_GENERATED_JS_RECEIVER_FIELDS)
bool HasProxyInPrototype(Isolate* isolate);
V8_WARN_UNUSED_RESULT static MaybeHandle<FixedArray> GetPrivateEntries(
@@ -631,7 +633,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
FieldIndex index, Object value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void RawFastDoublePropertyAsBitsAtPut(FieldIndex index, uint64_t bits);
- inline void WriteToField(int descriptor, PropertyDetails details,
+ inline void WriteToField(InternalIndex descriptor, PropertyDetails details,
Object value);
// Access to in object properties.
@@ -806,6 +808,29 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
TQ_OBJECT_CONSTRUCTORS(JSObject)
};
+// An abstract superclass for JSObjects that may have elements while having an
+// empty fixed array as elements backing store. It doesn't carry any
+// functionality but allows function classes to be identified in the type
+// system.
+class JSCustomElementsObject
+ : public TorqueGeneratedJSCustomElementsObject<JSCustomElementsObject,
+ JSObject> {
+ public:
+ STATIC_ASSERT(kHeaderSize == JSObject::kHeaderSize);
+ TQ_OBJECT_CONSTRUCTORS(JSCustomElementsObject)
+};
+
+// An abstract superclass for JSObjects that require non-standard element
+// access. It doesn't carry any functionality but allows function classes to be
+// identified in the type system.
+class JSSpecialObject
+ : public TorqueGeneratedJSSpecialObject<JSSpecialObject,
+ JSCustomElementsObject> {
+ public:
+ STATIC_ASSERT(kHeaderSize == JSObject::kHeaderSize);
+ TQ_OBJECT_CONSTRUCTORS(JSSpecialObject)
+};
+
// JSAccessorPropertyDescriptor is just a JSObject with a specific initial
// map. This initial map adds in-object properties for "get", "set",
// "enumerable" and "configurable" properties, as assigned by the
@@ -893,9 +918,21 @@ class JSIteratorResult : public JSObject {
OBJECT_CONSTRUCTORS(JSIteratorResult, JSObject);
};
+// An abstract superclass for classes representing JavaScript function values.
+// It doesn't carry any functionality but allows function classes to be
+// identified in the type system.
+class JSFunctionOrBoundFunction
+ : public TorqueGeneratedJSFunctionOrBoundFunction<JSFunctionOrBoundFunction,
+ JSObject> {
+ public:
+ STATIC_ASSERT(kHeaderSize == JSObject::kHeaderSize);
+ TQ_OBJECT_CONSTRUCTORS(JSFunctionOrBoundFunction)
+};
+
// JSBoundFunction describes a bound function exotic object.
class JSBoundFunction
- : public TorqueGeneratedJSBoundFunction<JSBoundFunction, JSObject> {
+ : public TorqueGeneratedJSBoundFunction<JSBoundFunction,
+ JSFunctionOrBoundFunction> {
public:
static MaybeHandle<String> GetName(Isolate* isolate,
Handle<JSBoundFunction> function);
@@ -916,7 +953,7 @@ class JSBoundFunction
};
// JSFunction describes JavaScript functions.
-class JSFunction : public JSObject {
+class JSFunction : public JSFunctionOrBoundFunction {
public:
// [prototype_or_initial_map]:
DECL_ACCESSORS(prototype_or_initial_map, HeapObject)
@@ -1119,13 +1156,13 @@ class JSFunction : public JSObject {
// ES6 section 19.2.3.5 Function.prototype.toString ( ).
static Handle<String> ToString(Handle<JSFunction> function);
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSFUNCTION_FIELDS)
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSFunctionOrBoundFunction::kHeaderSize,
+ TORQUE_GENERATED_JS_FUNCTION_FIELDS)
static constexpr int kSizeWithoutPrototype = kPrototypeOrInitialMapOffset;
static constexpr int kSizeWithPrototype = kSize;
- OBJECT_CONSTRUCTORS(JSFunction, JSObject);
+ OBJECT_CONSTRUCTORS(JSFunction, JSFunctionOrBoundFunction);
};
// JSGlobalProxy's prototype must be a JSGlobalObject or null,
@@ -1137,7 +1174,7 @@ class JSFunction : public JSObject {
// Accessing a JSGlobalProxy requires security check.
class JSGlobalProxy
- : public TorqueGeneratedJSGlobalProxy<JSGlobalProxy, JSObject> {
+ : public TorqueGeneratedJSGlobalProxy<JSGlobalProxy, JSSpecialObject> {
public:
inline bool IsDetachedFrom(JSGlobalObject global) const;
@@ -1151,7 +1188,7 @@ class JSGlobalProxy
};
// JavaScript global object.
-class JSGlobalObject : public JSObject {
+class JSGlobalObject : public JSSpecialObject {
public:
// [native context]: the natives corresponding to this global object.
DECL_ACCESSORS(native_context, NativeContext)
@@ -1179,15 +1216,16 @@ class JSGlobalObject : public JSObject {
DECL_VERIFIER(JSGlobalObject)
// Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSGLOBAL_OBJECT_FIELDS)
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSSpecialObject::kHeaderSize,
+ TORQUE_GENERATED_JS_GLOBAL_OBJECT_FIELDS)
- OBJECT_CONSTRUCTORS(JSGlobalObject, JSObject);
+ OBJECT_CONSTRUCTORS(JSGlobalObject, JSSpecialObject);
};
// Representation for JS Wrapper objects, String, Number, Boolean, etc.
class JSPrimitiveWrapper
- : public TorqueGeneratedJSPrimitiveWrapper<JSPrimitiveWrapper, JSObject> {
+ : public TorqueGeneratedJSPrimitiveWrapper<JSPrimitiveWrapper,
+ JSCustomElementsObject> {
public:
// Dispatched behavior.
DECL_PRINTER(JSPrimitiveWrapper)
@@ -1319,7 +1357,7 @@ class JSMessageObject : public JSObject {
DECL_VERIFIER(JSMessageObject)
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSMESSAGE_OBJECT_FIELDS)
+ TORQUE_GENERATED_JS_MESSAGE_OBJECT_FIELDS)
// TODO(v8:8989): [torque] Support marker constants.
static const int kPointerFieldsEndOffset = kStartPositionOffset;
@@ -1370,25 +1408,18 @@ class JSAsyncFromSyncIterator
TQ_OBJECT_CONSTRUCTORS(JSAsyncFromSyncIterator)
};
-class JSStringIterator : public JSObject {
+class JSStringIterator
+ : public TorqueGeneratedJSStringIterator<JSStringIterator, JSObject> {
public:
// Dispatched behavior.
DECL_PRINTER(JSStringIterator)
DECL_VERIFIER(JSStringIterator)
- DECL_CAST(JSStringIterator)
-
- // [string]: the [[IteratedString]] inobject property.
- DECL_ACCESSORS(string, String)
-
// [index]: The [[StringIteratorNextIndex]] inobject property.
inline int index() const;
inline void set_index(int value);
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSSTRING_ITERATOR_FIELDS)
-
- OBJECT_CONSTRUCTORS(JSStringIterator, JSObject);
+ TQ_OBJECT_CONSTRUCTORS(JSStringIterator)
};
} // namespace internal
diff --git a/deps/v8/src/objects/js-plural-rules.cc b/deps/v8/src/objects/js-plural-rules.cc
index 84fe9b6d52..bf928416f4 100644
--- a/deps/v8/src/objects/js-plural-rules.cc
+++ b/deps/v8/src/objects/js-plural-rules.cc
@@ -241,17 +241,18 @@ Handle<JSObject> JSPluralRules::ResolvedOptions(
JSNumberFormat::MinimumIntegerDigitsFromSkeleton(skeleton),
"minimumIntegerDigits");
int32_t min = 0, max = 0;
- JSNumberFormat::FractionDigitsFromSkeleton(skeleton, &min, &max);
-
- CreateDataPropertyForOptions(isolate, options, min, "minimumFractionDigits");
-
- CreateDataPropertyForOptions(isolate, options, max, "maximumFractionDigits");
if (JSNumberFormat::SignificantDigitsFromSkeleton(skeleton, &min, &max)) {
CreateDataPropertyForOptions(isolate, options, min,
"minimumSignificantDigits");
CreateDataPropertyForOptions(isolate, options, max,
"maximumSignificantDigits");
+ } else {
+ JSNumberFormat::FractionDigitsFromSkeleton(skeleton, &min, &max);
+ CreateDataPropertyForOptions(isolate, options, min,
+ "minimumFractionDigits");
+ CreateDataPropertyForOptions(isolate, options, max,
+ "maximumFractionDigits");
}
// 6. Let pluralCategories be a List of Strings representing the
diff --git a/deps/v8/src/objects/js-plural-rules.h b/deps/v8/src/objects/js-plural-rules.h
index 840efb07ed..0303266894 100644
--- a/deps/v8/src/objects/js-plural-rules.h
+++ b/deps/v8/src/objects/js-plural-rules.h
@@ -68,7 +68,7 @@ class JSPluralRules : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSPLURAL_RULES_FIELDS)
+ TORQUE_GENERATED_JS_PLURAL_RULES_FIELDS)
DECL_ACCESSORS(locale, String)
DECL_INT_ACCESSORS(flags)
diff --git a/deps/v8/src/objects/js-proxy.h b/deps/v8/src/objects/js-proxy.h
index 8e29c08bc1..c6bb844fe5 100644
--- a/deps/v8/src/objects/js-proxy.h
+++ b/deps/v8/src/objects/js-proxy.h
@@ -128,7 +128,7 @@ class JSProxyRevocableResult : public JSObject {
public:
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(
- JSObject::kHeaderSize, TORQUE_GENERATED_JSPROXY_REVOCABLE_RESULT_FIELDS)
+ JSObject::kHeaderSize, TORQUE_GENERATED_JS_PROXY_REVOCABLE_RESULT_FIELDS)
// Indices of in-object properties.
static const int kProxyIndex = 0;
diff --git a/deps/v8/src/objects/js-regexp-inl.h b/deps/v8/src/objects/js-regexp-inl.h
index b69d1cca97..885bc4804d 100644
--- a/deps/v8/src/objects/js-regexp-inl.h
+++ b/deps/v8/src/objects/js-regexp-inl.h
@@ -7,6 +7,7 @@
#include "src/objects/js-regexp.h"
+#include "src/objects/js-array-inl.h"
#include "src/objects/objects-inl.h" // Needed for write barriers
#include "src/objects/smi.h"
#include "src/objects/string.h"
@@ -18,9 +19,18 @@ namespace v8 {
namespace internal {
TQ_OBJECT_CONSTRUCTORS_IMPL(JSRegExp)
+OBJECT_CONSTRUCTORS_IMPL(JSRegExpResult, JSArray)
+OBJECT_CONSTRUCTORS_IMPL(JSRegExpResultIndices, JSArray)
+
+CAST_ACCESSOR(JSRegExpResult)
+CAST_ACCESSOR(JSRegExpResultIndices)
ACCESSORS(JSRegExp, last_index, Object, kLastIndexOffset)
+ACCESSORS(JSRegExpResult, cached_indices_or_match_info, Object,
+ kCachedIndicesOrMatchInfoOffset)
+ACCESSORS(JSRegExpResult, names, Object, kNamesOffset)
+
JSRegExp::Type JSRegExp::TypeTag() const {
Object data = this->data();
if (data.IsUndefined()) return JSRegExp::NOT_COMPILED;
diff --git a/deps/v8/src/objects/js-regexp.cc b/deps/v8/src/objects/js-regexp.cc
new file mode 100644
index 0000000000..c7f96fe278
--- /dev/null
+++ b/deps/v8/src/objects/js-regexp.cc
@@ -0,0 +1,118 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/objects/js-regexp.h"
+
+#include "src/objects/js-array-inl.h"
+#include "src/objects/js-regexp-inl.h"
+
+namespace v8 {
+namespace internal {
+Handle<JSArray> JSRegExpResult::GetAndCacheIndices(
+ Isolate* isolate, Handle<JSRegExpResult> regexp_result) {
+ // Check for cached indices.
+ Handle<Object> indices_or_match_info(
+ regexp_result->cached_indices_or_match_info(), isolate);
+ if (indices_or_match_info->IsRegExpMatchInfo()) {
+ // Build and cache indices for next lookup.
+ // TODO(joshualitt): Instead of caching the indices, we could call
+ // ReconfigureToDataProperty on 'indices' setting its value to this
+ // newly created array. However, care would have to be taken to ensure
+ // a new map is not created each time.
+ Handle<RegExpMatchInfo> match_info(
+ RegExpMatchInfo::cast(regexp_result->cached_indices_or_match_info()),
+ isolate);
+ Handle<Object> maybe_names(regexp_result->names(), isolate);
+ indices_or_match_info =
+ JSRegExpResultIndices::BuildIndices(isolate, match_info, maybe_names);
+
+ // Cache the result and clear the names array.
+ regexp_result->set_cached_indices_or_match_info(*indices_or_match_info);
+ regexp_result->set_names(ReadOnlyRoots(isolate).undefined_value());
+ }
+ return Handle<JSArray>::cast(indices_or_match_info);
+}
+
+Handle<JSRegExpResultIndices> JSRegExpResultIndices::BuildIndices(
+ Isolate* isolate, Handle<RegExpMatchInfo> match_info,
+ Handle<Object> maybe_names) {
+ Handle<JSRegExpResultIndices> indices(Handle<JSRegExpResultIndices>::cast(
+ isolate->factory()->NewJSObjectFromMap(
+ isolate->regexp_result_indices_map())));
+
+ // Initialize indices length to avoid having a partially initialized object
+ // should GC be triggered by creating a NewFixedArray.
+ indices->set_length(Smi::kZero);
+
+ // Build indices array from RegExpMatchInfo.
+ int num_indices = match_info->NumberOfCaptureRegisters();
+ int num_results = num_indices >> 1;
+ Handle<FixedArray> indices_array =
+ isolate->factory()->NewFixedArray(num_results);
+ JSArray::SetContent(indices, indices_array);
+
+ for (int i = 0; i < num_results; i++) {
+ int base_offset = i * 2;
+ int start_offset = match_info->Capture(base_offset);
+ int end_offset = match_info->Capture(base_offset + 1);
+
+ // Any unmatched captures are set to undefined, otherwise we set them to a
+ // subarray of the indices.
+ if (start_offset == -1) {
+ indices_array->set(i, ReadOnlyRoots(isolate).undefined_value());
+ } else {
+ Handle<FixedArray> indices_sub_array(
+ isolate->factory()->NewFixedArray(2));
+ indices_sub_array->set(0, Smi::FromInt(start_offset));
+ indices_sub_array->set(1, Smi::FromInt(end_offset));
+ Handle<JSArray> indices_sub_jsarray =
+ isolate->factory()->NewJSArrayWithElements(indices_sub_array,
+ PACKED_SMI_ELEMENTS, 2);
+ indices_array->set(i, *indices_sub_jsarray);
+ }
+ }
+
+ // If there are no capture groups, set the groups property to undefined.
+ FieldIndex groups_index = FieldIndex::ForDescriptor(
+ indices->map(), InternalIndex(kGroupsDescriptorIndex));
+ if (maybe_names->IsUndefined(isolate)) {
+ indices->RawFastPropertyAtPut(groups_index,
+ ReadOnlyRoots(isolate).undefined_value());
+ return indices;
+ }
+
+ // Create a groups property which returns a dictionary of named captures to
+ // their corresponding capture indices.
+ Handle<FixedArray> names(Handle<FixedArray>::cast(maybe_names));
+ int num_names = names->length() >> 1;
+ Handle<NameDictionary> group_names = NameDictionary::New(isolate, num_names);
+ for (int i = 0; i < num_names; i++) {
+ int base_offset = i * 2;
+ int name_offset = base_offset;
+ int index_offset = base_offset + 1;
+ Handle<String> name(String::cast(names->get(name_offset)), isolate);
+ Handle<Smi> smi_index(Smi::cast(names->get(index_offset)), isolate);
+ Handle<Object> capture_indices(indices_array->get(smi_index->value()),
+ isolate);
+ if (!capture_indices->IsUndefined(isolate)) {
+ capture_indices = Handle<JSArray>::cast(capture_indices);
+ }
+ group_names = NameDictionary::Add(
+ isolate, group_names, name, capture_indices, PropertyDetails::Empty());
+ }
+
+ // Convert group_names to a JSObject and store at the groups property of the
+ // result indices.
+ Handle<FixedArrayBase> elements = isolate->factory()->empty_fixed_array();
+ Handle<HeapObject> null =
+ Handle<HeapObject>::cast(isolate->factory()->null_value());
+ Handle<JSObject> js_group_names =
+ isolate->factory()->NewSlowJSObjectWithPropertiesAndElements(
+ null, group_names, elements);
+ indices->RawFastPropertyAtPut(groups_index, *js_group_names);
+ return indices;
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/src/objects/js-regexp.h b/deps/v8/src/objects/js-regexp.h
index b3ef06bd5c..03efd4913c 100644
--- a/deps/v8/src/objects/js-regexp.h
+++ b/deps/v8/src/objects/js-regexp.h
@@ -96,7 +96,8 @@ class JSRegExp : public TorqueGeneratedJSRegExp<JSRegExp, JSObject> {
Handle<String> flags_string);
bool MarkedForTierUp();
- void ResetTierUp();
+ void ResetLastTierUpTick();
+ void TierUpTick();
void MarkTierUpForNextExec();
inline Type TypeTag() const;
@@ -176,9 +177,13 @@ class JSRegExp : public TorqueGeneratedJSRegExp<JSRegExp, JSObject> {
// Maps names of named capture groups (at indices 2i) to their corresponding
// (1-based) capture group indices (at indices 2i + 1).
static const int kIrregexpCaptureNameMapIndex = kDataIndex + 6;
- static const int kIrregexpTierUpTicksIndex = kDataIndex + 7;
+ // Tier-up ticks are set to the value of the tier-up ticks flag. The value is
+ // decremented on each execution of the bytecode, so that the tier-up
+ // happens once the ticks reach zero.
+ // This value is ignored if the regexp-tier-up flag isn't turned on.
+ static const int kIrregexpTicksUntilTierUpIndex = kDataIndex + 7;
- static const int kIrregexpDataSize = kIrregexpTierUpTicksIndex + 1;
+ static const int kIrregexpDataSize = kIrregexpTicksUntilTierUpIndex + 1;
// In-object fields.
static const int kLastIndexFieldIndex = 0;
@@ -195,6 +200,10 @@ class JSRegExp : public TorqueGeneratedJSRegExp<JSRegExp, JSObject> {
// The uninitialized value for a regexp code object.
static const int kUninitializedValue = -1;
+ // The heuristic value for the length of the subject string for which we
+ // tier-up to the compiler immediately, instead of using the interpreter.
+ static constexpr int kTierUpForSubjectLengthValue = 1000;
+
TQ_OBJECT_CONSTRUCTORS(JSRegExp)
};
@@ -208,18 +217,63 @@ DEFINE_OPERATORS_FOR_FLAGS(JSRegExp::Flags)
// After creation the result must be treated as a JSArray in all regards.
class JSRegExpResult : public JSArray {
public:
+ DECL_CAST(JSRegExpResult)
+
+ // TODO(joshualitt): We would like to add printers and verifiers to
+ // JSRegExpResult, and maybe JSRegExpResultIndices, but both have the same
+ // instance type as JSArray.
+
+ // cached_indices_or_match_info and names, are used to construct the
+ // JSRegExpResultIndices returned from the indices property lazily.
+ DECL_ACCESSORS(cached_indices_or_match_info, Object)
+ DECL_ACCESSORS(names, Object)
+
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSArray::kSize,
- TORQUE_GENERATED_JSREG_EXP_RESULT_FIELDS)
+ TORQUE_GENERATED_JS_REG_EXP_RESULT_FIELDS)
+
+ static Handle<JSArray> GetAndCacheIndices(
+ Isolate* isolate, Handle<JSRegExpResult> regexp_result);
// Indices of in-object properties.
static const int kIndexIndex = 0;
static const int kInputIndex = 1;
static const int kGroupsIndex = 2;
- static const int kInObjectPropertyCount = 3;
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(JSRegExpResult);
+ // Private internal only fields.
+ static const int kCachedIndicesOrMatchInfoIndex = 3;
+ static const int kNamesIndex = 4;
+ static const int kInObjectPropertyCount = 5;
+
+ OBJECT_CONSTRUCTORS(JSRegExpResult, JSArray);
+};
+
+// JSRegExpResultIndices is just a JSArray with a specific initial map.
+// This initial map adds in-object properties for "group"
+// properties, as assigned by RegExp.prototype.exec, which allows
+// faster creation of RegExp exec results.
+// This class just holds constants used when creating the result.
+// After creation the result must be treated as a JSArray in all regards.
+class JSRegExpResultIndices : public JSArray {
+ public:
+ DECL_CAST(JSRegExpResultIndices)
+
+ // Layout description.
+ DEFINE_FIELD_OFFSET_CONSTANTS(
+ JSArray::kSize, TORQUE_GENERATED_JS_REG_EXP_RESULT_INDICES_FIELDS)
+
+ static Handle<JSRegExpResultIndices> BuildIndices(
+ Isolate* isolate, Handle<RegExpMatchInfo> match_info,
+ Handle<Object> maybe_names);
+
+ // Indices of in-object properties.
+ static const int kGroupsIndex = 0;
+ static const int kInObjectPropertyCount = 1;
+
+ // Descriptor index of groups.
+ static const int kGroupsDescriptorIndex = 1;
+
+ OBJECT_CONSTRUCTORS(JSRegExpResultIndices, JSArray);
};
} // namespace internal
diff --git a/deps/v8/src/objects/js-relative-time-format.cc b/deps/v8/src/objects/js-relative-time-format.cc
index 28f8c757ee..edf3e26c22 100644
--- a/deps/v8/src/objects/js-relative-time-format.cc
+++ b/deps/v8/src/objects/js-relative-time-format.cc
@@ -112,7 +112,8 @@ MaybeHandle<JSRelativeTimeFormat> JSRelativeTimeFormat::New(
// 14. Let dataLocale be r.[[DataLocale]].
icu::Locale icu_locale = r.icu_locale;
UErrorCode status = U_ZERO_ERROR;
- if (numbering_system_str != nullptr) {
+ if (numbering_system_str != nullptr &&
+ Intl::IsValidNumberingSystem(numbering_system_str.get())) {
icu_locale.setUnicodeKeywordValue("nu", numbering_system_str.get(), status);
CHECK(U_SUCCESS(status));
}
diff --git a/deps/v8/src/objects/js-relative-time-format.h b/deps/v8/src/objects/js-relative-time-format.h
index 6e405e345e..c642367988 100644
--- a/deps/v8/src/objects/js-relative-time-format.h
+++ b/deps/v8/src/objects/js-relative-time-format.h
@@ -107,7 +107,7 @@ class JSRelativeTimeFormat : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSRELATIVE_TIME_FORMAT_FIELDS)
+ TORQUE_GENERATED_JS_RELATIVE_TIME_FORMAT_FIELDS)
private:
static Style getStyle(const char* str);
diff --git a/deps/v8/src/objects/js-segment-iterator.h b/deps/v8/src/objects/js-segment-iterator.h
index cadb99e79d..1c71af8864 100644
--- a/deps/v8/src/objects/js-segment-iterator.h
+++ b/deps/v8/src/objects/js-segment-iterator.h
@@ -91,7 +91,7 @@ class JSSegmentIterator : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSSEGMENT_ITERATOR_FIELDS)
+ TORQUE_GENERATED_JS_SEGMENT_ITERATOR_FIELDS)
OBJECT_CONSTRUCTORS(JSSegmentIterator, JSObject);
};
diff --git a/deps/v8/src/objects/js-segmenter.h b/deps/v8/src/objects/js-segmenter.h
index 641cf106fb..209c4682b3 100644
--- a/deps/v8/src/objects/js-segmenter.h
+++ b/deps/v8/src/objects/js-segmenter.h
@@ -78,7 +78,7 @@ class JSSegmenter : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSSEGMENTER_FIELDS)
+ TORQUE_GENERATED_JS_SEGMENTER_FIELDS)
private:
static Granularity GetGranularity(const char* str);
diff --git a/deps/v8/src/objects/js-weak-refs-inl.h b/deps/v8/src/objects/js-weak-refs-inl.h
index 004ffd6d79..0635153611 100644
--- a/deps/v8/src/objects/js-weak-refs-inl.h
+++ b/deps/v8/src/objects/js-weak-refs-inl.h
@@ -17,38 +17,21 @@
namespace v8 {
namespace internal {
-OBJECT_CONSTRUCTORS_IMPL(WeakCell, HeapObject)
-OBJECT_CONSTRUCTORS_IMPL(JSWeakRef, JSObject)
+TQ_OBJECT_CONSTRUCTORS_IMPL(WeakCell)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSWeakRef)
OBJECT_CONSTRUCTORS_IMPL(JSFinalizationGroup, JSObject)
-OBJECT_CONSTRUCTORS_IMPL(JSFinalizationGroupCleanupIterator, JSObject)
+TQ_OBJECT_CONSTRUCTORS_IMPL(JSFinalizationGroupCleanupIterator)
ACCESSORS(JSFinalizationGroup, native_context, NativeContext,
kNativeContextOffset)
ACCESSORS(JSFinalizationGroup, cleanup, Object, kCleanupOffset)
-ACCESSORS(JSFinalizationGroup, active_cells, Object, kActiveCellsOffset)
-ACCESSORS(JSFinalizationGroup, cleared_cells, Object, kClearedCellsOffset)
+ACCESSORS(JSFinalizationGroup, active_cells, HeapObject, kActiveCellsOffset)
+ACCESSORS(JSFinalizationGroup, cleared_cells, HeapObject, kClearedCellsOffset)
ACCESSORS(JSFinalizationGroup, key_map, Object, kKeyMapOffset)
SMI_ACCESSORS(JSFinalizationGroup, flags, kFlagsOffset)
ACCESSORS(JSFinalizationGroup, next, Object, kNextOffset)
CAST_ACCESSOR(JSFinalizationGroup)
-ACCESSORS(WeakCell, finalization_group, Object, kFinalizationGroupOffset)
-ACCESSORS(WeakCell, target, HeapObject, kTargetOffset)
-ACCESSORS(WeakCell, holdings, Object, kHoldingsOffset)
-ACCESSORS(WeakCell, next, Object, kNextOffset)
-ACCESSORS(WeakCell, prev, Object, kPrevOffset)
-ACCESSORS(WeakCell, key, Object, kKeyOffset)
-ACCESSORS(WeakCell, key_list_next, Object, kKeyListNextOffset)
-ACCESSORS(WeakCell, key_list_prev, Object, kKeyListPrevOffset)
-CAST_ACCESSOR(WeakCell)
-
-CAST_ACCESSOR(JSWeakRef)
-ACCESSORS(JSWeakRef, target, HeapObject, kTargetOffset)
-
-ACCESSORS(JSFinalizationGroupCleanupIterator, finalization_group,
- JSFinalizationGroup, kFinalizationGroupOffset)
-CAST_ACCESSOR(JSFinalizationGroupCleanupIterator)
-
void JSFinalizationGroup::Register(
Handle<JSFinalizationGroup> finalization_group, Handle<JSReceiver> target,
Handle<Object> holdings, Handle<Object> key, Isolate* isolate) {
@@ -101,7 +84,7 @@ bool JSFinalizationGroup::Unregister(
Handle<ObjectHashTable> key_map =
handle(ObjectHashTable::cast(finalization_group->key_map()), isolate);
Object value = key_map->Lookup(unregister_token);
- Object undefined = ReadOnlyRoots(isolate).undefined_value();
+ HeapObject undefined = ReadOnlyRoots(isolate).undefined_value();
while (value.IsWeakCell()) {
WeakCell weak_cell = WeakCell::cast(value);
weak_cell.RemoveFromFinalizationGroupCells(isolate);
diff --git a/deps/v8/src/objects/js-weak-refs.h b/deps/v8/src/objects/js-weak-refs.h
index 723e0e3135..8d61b125a1 100644
--- a/deps/v8/src/objects/js-weak-refs.h
+++ b/deps/v8/src/objects/js-weak-refs.h
@@ -28,8 +28,8 @@ class JSFinalizationGroup : public JSObject {
DECL_ACCESSORS(native_context, NativeContext)
DECL_ACCESSORS(cleanup, Object)
- DECL_ACCESSORS(active_cells, Object)
- DECL_ACCESSORS(cleared_cells, Object)
+ DECL_ACCESSORS(active_cells, HeapObject)
+ DECL_ACCESSORS(cleared_cells, HeapObject)
DECL_ACCESSORS(key_map, Object)
// For storing a list of JSFinalizationGroup objects in NativeContext.
@@ -66,7 +66,7 @@ class JSFinalizationGroup : public JSObject {
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSFINALIZATION_GROUP_FIELDS)
+ TORQUE_GENERATED_JS_FINALIZATION_GROUP_FIELDS)
// Bitfields in flags.
using ScheduledForCleanupField = BitField<bool, 0, 1>;
@@ -75,32 +75,10 @@ class JSFinalizationGroup : public JSObject {
};
// Internal object for storing weak references in JSFinalizationGroup.
-class WeakCell : public HeapObject {
+class WeakCell : public TorqueGeneratedWeakCell<WeakCell, HeapObject> {
public:
DECL_PRINTER(WeakCell)
EXPORT_DECL_VERIFIER(WeakCell)
- DECL_CAST(WeakCell)
-
- DECL_ACCESSORS(finalization_group, Object)
- DECL_ACCESSORS(target, HeapObject)
- DECL_ACCESSORS(holdings, Object)
-
- // For storing doubly linked lists of WeakCells in JSFinalizationGroup's
- // "active_cells" and "cleared_cells" lists.
- DECL_ACCESSORS(prev, Object)
- DECL_ACCESSORS(next, Object)
-
- // For storing doubly linked lists of WeakCells per key in
- // JSFinalizationGroup's key-based hashmap. WeakCell also needs to know its
- // key, so that we can remove the key from the key_map when we remove the last
- // WeakCell associated with it.
- DECL_ACCESSORS(key, Object)
- DECL_ACCESSORS(key_list_prev, Object)
- DECL_ACCESSORS(key_list_next, Object)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- TORQUE_GENERATED_WEAK_CELL_FIELDS)
class BodyDescriptor;
@@ -115,40 +93,27 @@ class WeakCell : public HeapObject {
inline void RemoveFromFinalizationGroupCells(Isolate* isolate);
- OBJECT_CONSTRUCTORS(WeakCell, HeapObject);
+ TQ_OBJECT_CONSTRUCTORS(WeakCell)
};
-class JSWeakRef : public JSObject {
+class JSWeakRef : public TorqueGeneratedJSWeakRef<JSWeakRef, JSObject> {
public:
DECL_PRINTER(JSWeakRef)
EXPORT_DECL_VERIFIER(JSWeakRef)
- DECL_CAST(JSWeakRef)
-
- DECL_ACCESSORS(target, HeapObject)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
- TORQUE_GENERATED_JSWEAK_REF_FIELDS)
class BodyDescriptor;
- OBJECT_CONSTRUCTORS(JSWeakRef, JSObject);
+ TQ_OBJECT_CONSTRUCTORS(JSWeakRef)
};
-class JSFinalizationGroupCleanupIterator : public JSObject {
+class JSFinalizationGroupCleanupIterator
+ : public TorqueGeneratedJSFinalizationGroupCleanupIterator<
+ JSFinalizationGroupCleanupIterator, JSObject> {
public:
DECL_PRINTER(JSFinalizationGroupCleanupIterator)
DECL_VERIFIER(JSFinalizationGroupCleanupIterator)
- DECL_CAST(JSFinalizationGroupCleanupIterator)
-
- DECL_ACCESSORS(finalization_group, JSFinalizationGroup)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(
- JSObject::kHeaderSize,
- TORQUE_GENERATED_JSFINALIZATION_GROUP_CLEANUP_ITERATOR_FIELDS)
- OBJECT_CONSTRUCTORS(JSFinalizationGroupCleanupIterator, JSObject);
+ TQ_OBJECT_CONSTRUCTORS(JSFinalizationGroupCleanupIterator)
};
} // namespace internal
diff --git a/deps/v8/src/objects/keys.cc b/deps/v8/src/objects/keys.cc
index 7496399cad..0231df18d3 100644
--- a/deps/v8/src/objects/keys.cc
+++ b/deps/v8/src/objects/keys.cc
@@ -279,9 +279,13 @@ void FastKeyAccumulator::Prepare() {
is_receiver_simple_enum_ = false;
has_empty_prototype_ = true;
JSReceiver last_prototype;
+ may_have_elements_ = MayHaveElements(*receiver_);
for (PrototypeIterator iter(isolate_, *receiver_); !iter.IsAtEnd();
iter.Advance()) {
JSReceiver current = iter.GetCurrent<JSReceiver>();
+ if (!may_have_elements_) {
+ may_have_elements_ = MayHaveElements(current);
+ }
bool has_no_properties = CheckAndInitalizeEmptyEnumCache(current);
if (has_no_properties) continue;
last_prototype = current;
@@ -338,13 +342,12 @@ Handle<FixedArray> GetFastEnumPropertyKeys(Isolate* isolate,
Handle<DescriptorArray> descriptors =
Handle<DescriptorArray>(map->instance_descriptors(), isolate);
isolate->counters()->enum_cache_misses()->Increment();
- int nod = map->NumberOfOwnDescriptors();
// Create the keys array.
int index = 0;
bool fields_only = true;
keys = isolate->factory()->NewFixedArray(enum_length);
- for (int i = 0; i < nod; i++) {
+ for (InternalIndex i : map->IterateOwnDescriptors()) {
DisallowHeapAllocation no_gc;
PropertyDetails details = descriptors->GetDetails(i);
if (details.IsDontEnum()) continue;
@@ -361,7 +364,7 @@ Handle<FixedArray> GetFastEnumPropertyKeys(Isolate* isolate,
if (fields_only) {
indices = isolate->factory()->NewFixedArray(enum_length);
index = 0;
- for (int i = 0; i < nod; i++) {
+ for (InternalIndex i : map->IterateOwnDescriptors()) {
DisallowHeapAllocation no_gc;
PropertyDetails details = descriptors->GetDetails(i);
if (details.IsDontEnum()) continue;
@@ -499,12 +502,21 @@ MaybeHandle<FixedArray> FastKeyAccumulator::GetKeysSlow(
accumulator.set_is_for_in(is_for_in_);
accumulator.set_skip_indices(skip_indices_);
accumulator.set_last_non_empty_prototype(last_non_empty_prototype_);
+ accumulator.set_may_have_elements(may_have_elements_);
MAYBE_RETURN(accumulator.CollectKeys(receiver_, receiver_),
MaybeHandle<FixedArray>());
return accumulator.GetKeys(keys_conversion);
}
+bool FastKeyAccumulator::MayHaveElements(JSReceiver receiver) {
+ if (!receiver.IsJSObject()) return true;
+ JSObject object = JSObject::cast(receiver);
+ if (object.HasEnumerableElements()) return true;
+ if (object.HasIndexedInterceptor()) return true;
+ return false;
+}
+
namespace {
enum IndexedOrNamed { kIndexed, kNamed };
@@ -518,13 +530,14 @@ V8_WARN_UNUSED_RESULT ExceptionStatus FilterForEnumerableProperties(
uint32_t length = accessor->GetCapacity(*result, result->elements());
for (uint32_t i = 0; i < length; i++) {
- if (!accessor->HasEntry(*result, i)) continue;
+ InternalIndex entry(i);
+ if (!accessor->HasEntry(*result, entry)) continue;
// args are invalid after args.Call(), create a new one in every iteration.
PropertyCallbackArguments args(accumulator->isolate(), interceptor->data(),
*receiver, *object, Just(kDontThrow));
- Handle<Object> element = accessor->Get(result, i);
+ Handle<Object> element = accessor->Get(result, entry);
Handle<Object> attributes;
if (type == kIndexed) {
uint32_t number;
@@ -624,7 +637,7 @@ base::Optional<int> CollectOwnPropertyNamesInternal(
int first_skipped = -1;
PropertyFilter filter = keys->filter();
KeyCollectionMode mode = keys->mode();
- for (int i = start_index; i < limit; i++) {
+ for (InternalIndex i : InternalIndex::Range(start_index, limit)) {
bool is_shadowing_key = false;
PropertyDetails details = descs->GetDetails(i);
@@ -645,7 +658,7 @@ base::Optional<int> CollectOwnPropertyNamesInternal(
Name key = descs->GetKey(i);
if (skip_symbols == key.IsSymbol()) {
- if (first_skipped == -1) first_skipped = i;
+ if (first_skipped == -1) first_skipped = i.as_int();
continue;
}
if (key.FilterKey(keys->filter())) continue;
@@ -689,13 +702,15 @@ Maybe<bool> KeyAccumulator::CollectOwnPropertyNames(Handle<JSReceiver> receiver,
Map map = object->map();
int nof_descriptors = map.NumberOfOwnDescriptors();
if (enum_keys->length() != nof_descriptors) {
- Handle<DescriptorArray> descs =
- Handle<DescriptorArray>(map.instance_descriptors(), isolate_);
- for (int i = 0; i < nof_descriptors; i++) {
- PropertyDetails details = descs->GetDetails(i);
- if (!details.IsDontEnum()) continue;
- Object key = descs->GetKey(i);
- this->AddShadowingKey(key);
+ if (map.prototype(isolate_) != ReadOnlyRoots(isolate_).null_value()) {
+ Handle<DescriptorArray> descs =
+ Handle<DescriptorArray>(map.instance_descriptors(), isolate_);
+ for (InternalIndex i : InternalIndex::Range(nof_descriptors)) {
+ PropertyDetails details = descs->GetDetails(i);
+ if (!details.IsDontEnum()) continue;
+ Object key = descs->GetKey(i);
+ this->AddShadowingKey(key);
+ }
}
}
} else if (object->IsJSGlobalObject()) {
@@ -823,7 +838,9 @@ Maybe<bool> KeyAccumulator::CollectOwnKeys(Handle<JSReceiver> receiver,
return Just(true);
}
- MAYBE_RETURN(CollectOwnElementIndices(receiver, object), Nothing<bool>());
+ if (may_have_elements_) {
+ MAYBE_RETURN(CollectOwnElementIndices(receiver, object), Nothing<bool>());
+ }
MAYBE_RETURN(CollectOwnPropertyNames(receiver, object), Nothing<bool>());
return Just(true);
}
diff --git a/deps/v8/src/objects/keys.h b/deps/v8/src/objects/keys.h
index 5d8632e2a7..4c2307a20b 100644
--- a/deps/v8/src/objects/keys.h
+++ b/deps/v8/src/objects/keys.h
@@ -93,6 +93,7 @@ class KeyAccumulator final {
void set_last_non_empty_prototype(Handle<JSReceiver> object) {
last_non_empty_prototype_ = object;
}
+ void set_may_have_elements(bool value) { may_have_elements_ = value; }
// Shadowing keys are used to filter keys. This happens when non-enumerable
// keys appear again on the prototype chain.
void AddShadowingKey(Object key);
@@ -125,6 +126,7 @@ class KeyAccumulator final {
// For all the keys on the first receiver adding a shadowing key we can skip
// the shadow check.
bool skip_shadow_check_ = true;
+ bool may_have_elements_ = true;
DISALLOW_COPY_AND_ASSIGN(KeyAccumulator);
};
@@ -149,6 +151,7 @@ class FastKeyAccumulator {
bool is_receiver_simple_enum() { return is_receiver_simple_enum_; }
bool has_empty_prototype() { return has_empty_prototype_; }
+ bool may_have_elements() { return may_have_elements_; }
MaybeHandle<FixedArray> GetKeys(
GetKeysConversion convert = GetKeysConversion::kKeepNumbers);
@@ -160,6 +163,8 @@ class FastKeyAccumulator {
MaybeHandle<FixedArray> GetOwnKeysWithUninitializedEnumCache();
+ bool MayHaveElements(JSReceiver receiver);
+
Isolate* isolate_;
Handle<JSReceiver> receiver_;
Handle<JSReceiver> last_non_empty_prototype_;
@@ -169,6 +174,7 @@ class FastKeyAccumulator {
bool skip_indices_ = false;
bool is_receiver_simple_enum_ = false;
bool has_empty_prototype_ = false;
+ bool may_have_elements_ = true;
DISALLOW_COPY_AND_ASSIGN(FastKeyAccumulator);
};
diff --git a/deps/v8/src/objects/layout-descriptor-inl.h b/deps/v8/src/objects/layout-descriptor-inl.h
index ad0a058a92..30fe132129 100644
--- a/deps/v8/src/objects/layout-descriptor-inl.h
+++ b/deps/v8/src/objects/layout-descriptor-inl.h
@@ -169,7 +169,7 @@ int LayoutDescriptor::CalculateCapacity(Map map, DescriptorArray descriptors,
} else {
layout_descriptor_length = 0;
- for (int i = 0; i < num_descriptors; i++) {
+ for (InternalIndex i : InternalIndex::Range(num_descriptors)) {
PropertyDetails details = descriptors.GetDetails(i);
if (!InobjectUnboxedField(inobject_properties, details)) continue;
int field_index = details.field_index();
@@ -188,7 +188,7 @@ LayoutDescriptor LayoutDescriptor::Initialize(
DisallowHeapAllocation no_allocation;
int inobject_properties = map.GetInObjectProperties();
- for (int i = 0; i < num_descriptors; i++) {
+ for (InternalIndex i : InternalIndex::Range(num_descriptors)) {
PropertyDetails details = descriptors.GetDetails(i);
if (!InobjectUnboxedField(inobject_properties, details)) {
DCHECK(details.location() != kField ||
diff --git a/deps/v8/src/objects/layout-descriptor.cc b/deps/v8/src/objects/layout-descriptor.cc
index 76421aaf4f..2b588a58bf 100644
--- a/deps/v8/src/objects/layout-descriptor.cc
+++ b/deps/v8/src/objects/layout-descriptor.cc
@@ -258,9 +258,8 @@ LayoutDescriptor LayoutDescriptor::Trim(Heap* heap, Map map,
bool LayoutDescriptor::IsConsistentWithMap(Map map, bool check_tail) {
if (FLAG_unbox_double_fields) {
DescriptorArray descriptors = map.instance_descriptors();
- int nof_descriptors = map.NumberOfOwnDescriptors();
int last_field_index = 0;
- for (int i = 0; i < nof_descriptors; i++) {
+ for (InternalIndex i : map.IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);
if (details.location() != kField) continue;
FieldIndex field_index = FieldIndex::ForDescriptor(map, i);
diff --git a/deps/v8/src/objects/literal-objects.cc b/deps/v8/src/objects/literal-objects.cc
index 95beb6cbdb..98c41cbfb5 100644
--- a/deps/v8/src/objects/literal-objects.cc
+++ b/deps/v8/src/objects/literal-objects.cc
@@ -31,11 +31,11 @@ void AddToDescriptorArrayTemplate(
Isolate* isolate, Handle<DescriptorArray> descriptor_array_template,
Handle<Name> name, ClassBoilerplate::ValueKind value_kind,
Handle<Object> value) {
- int entry = descriptor_array_template->Search(
+ InternalIndex entry = descriptor_array_template->Search(
*name, descriptor_array_template->number_of_descriptors());
// TODO(ishell): deduplicate properties at AST level, this will allow us to
// avoid creation of closures that will be overwritten anyway.
- if (entry == DescriptorArray::kNotFound) {
+ if (entry.is_not_found()) {
// Entry not found, add new one.
Descriptor d;
if (value_kind == ClassBoilerplate::kData) {
@@ -412,8 +412,8 @@ Handle<ClassBoilerplate> ClassBoilerplate::BuildClassBoilerplate(
ObjectDescriptor static_desc(kMinimumClassPropertiesCount);
ObjectDescriptor instance_desc(kMinimumPrototypePropertiesCount);
- for (int i = 0; i < expr->properties()->length(); i++) {
- ClassLiteral::Property* property = expr->properties()->at(i);
+ for (int i = 0; i < expr->public_members()->length(); i++) {
+ ClassLiteral::Property* property = expr->public_members()->at(i);
ObjectDescriptor& desc =
property->is_static() ? static_desc : instance_desc;
if (property->is_computed_name()) {
@@ -477,14 +477,8 @@ Handle<ClassBoilerplate> ClassBoilerplate::BuildClassBoilerplate(
//
int dynamic_argument_index = ClassBoilerplate::kFirstDynamicArgumentIndex;
- for (int i = 0; i < expr->properties()->length(); i++) {
- ClassLiteral::Property* property = expr->properties()->at(i);
-
- // Private members are not processed using the class boilerplate.
- if (property->is_private()) {
- continue;
- }
-
+ for (int i = 0; i < expr->public_members()->length(); i++) {
+ ClassLiteral::Property* property = expr->public_members()->at(i);
ClassBoilerplate::ValueKind value_kind;
switch (property->kind()) {
case ClassLiteral::Property::METHOD:
diff --git a/deps/v8/src/objects/lookup-inl.h b/deps/v8/src/objects/lookup-inl.h
index 648398be5e..49a42e7131 100644
--- a/deps/v8/src/objects/lookup-inl.h
+++ b/deps/v8/src/objects/lookup-inl.h
@@ -10,6 +10,7 @@
#include "src/handles/handles-inl.h"
#include "src/heap/factory-inl.h"
#include "src/objects/api-callbacks.h"
+#include "src/objects/internal-index.h"
#include "src/objects/map-inl.h"
#include "src/objects/name-inl.h"
#include "src/objects/objects-inl.h"
@@ -136,11 +137,11 @@ void LookupIterator::UpdateProtector() {
}
}
-int LookupIterator::descriptor_number() const {
+InternalIndex LookupIterator::descriptor_number() const {
DCHECK(!IsElement());
DCHECK(has_property_);
DCHECK(holder_->HasFastProperties(isolate_));
- return number_;
+ return InternalIndex(number_);
}
int LookupIterator::dictionary_entry() const {
diff --git a/deps/v8/src/objects/lookup.cc b/deps/v8/src/objects/lookup.cc
index 4646b71a9e..7f626cc223 100644
--- a/deps/v8/src/objects/lookup.cc
+++ b/deps/v8/src/objects/lookup.cc
@@ -249,10 +249,10 @@ void LookupIterator::InternalUpdateProtector() {
}
if (!Protectors::IsArraySpeciesLookupChainIntact(isolate_) &&
- !isolate_->IsPromiseSpeciesLookupChainIntact() &&
+ !Protectors::IsPromiseSpeciesLookupChainIntact(isolate_) &&
!Protectors::IsRegExpSpeciesLookupChainProtectorIntact(
native_context) &&
- !isolate_->IsTypedArraySpeciesLookupChainIntact()) {
+ !Protectors::IsTypedArraySpeciesLookupChainIntact(isolate_)) {
return;
}
// Setting the constructor property could change an instance's @@species
@@ -263,8 +263,8 @@ void LookupIterator::InternalUpdateProtector() {
Protectors::InvalidateArraySpeciesLookupChain(isolate_);
return;
} else if (receiver->IsJSPromise(isolate_)) {
- if (!isolate_->IsPromiseSpeciesLookupChainIntact()) return;
- isolate_->InvalidatePromiseSpeciesProtector();
+ if (!Protectors::IsPromiseSpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidatePromiseSpeciesLookupChain(isolate_);
return;
} else if (receiver->IsJSRegExp(isolate_)) {
if (!Protectors::IsRegExpSpeciesLookupChainProtectorIntact(
@@ -275,8 +275,8 @@ void LookupIterator::InternalUpdateProtector() {
native_context);
return;
} else if (receiver->IsJSTypedArray(isolate_)) {
- if (!isolate_->IsTypedArraySpeciesLookupChainIntact()) return;
- isolate_->InvalidateTypedArraySpeciesProtector();
+ if (!Protectors::IsTypedArraySpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateTypedArraySpeciesLookupChain(isolate_);
return;
}
if (receiver->map(isolate_).is_prototype_map()) {
@@ -294,8 +294,8 @@ void LookupIterator::InternalUpdateProtector() {
Protectors::InvalidateArraySpeciesLookupChain(isolate_);
} else if (isolate_->IsInAnyContext(*receiver,
Context::PROMISE_PROTOTYPE_INDEX)) {
- if (!isolate_->IsPromiseSpeciesLookupChainIntact()) return;
- isolate_->InvalidatePromiseSpeciesProtector();
+ if (!Protectors::IsPromiseSpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidatePromiseSpeciesLookupChain(isolate_);
} else if (isolate_->IsInAnyContext(*receiver,
Context::REGEXP_PROTOTYPE_INDEX)) {
if (!Protectors::IsRegExpSpeciesLookupChainProtectorIntact(
@@ -307,8 +307,8 @@ void LookupIterator::InternalUpdateProtector() {
} else if (isolate_->IsInAnyContext(
receiver->map(isolate_).prototype(isolate_),
Context::TYPED_ARRAY_PROTOTYPE_INDEX)) {
- if (!isolate_->IsTypedArraySpeciesLookupChainIntact()) return;
- isolate_->InvalidateTypedArraySpeciesProtector();
+ if (!Protectors::IsTypedArraySpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateTypedArraySpeciesLookupChain(isolate_);
}
}
} else if (*name_ == roots.next_string()) {
@@ -317,26 +317,26 @@ void LookupIterator::InternalUpdateProtector() {
*receiver, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)) {
// Setting the next property of %ArrayIteratorPrototype% also needs to
// invalidate the array iterator protector.
- if (!isolate_->IsArrayIteratorLookupChainIntact()) return;
- isolate_->InvalidateArrayIteratorProtector();
+ if (!Protectors::IsArrayIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateArrayIteratorLookupChain(isolate_);
} else if (receiver->IsJSMapIterator() ||
isolate_->IsInAnyContext(
*receiver, Context::INITIAL_MAP_ITERATOR_PROTOTYPE_INDEX)) {
- if (!isolate_->IsMapIteratorLookupChainIntact()) return;
- isolate_->InvalidateMapIteratorProtector();
+ if (!Protectors::IsMapIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateMapIteratorLookupChain(isolate_);
} else if (receiver->IsJSSetIterator() ||
isolate_->IsInAnyContext(
*receiver, Context::INITIAL_SET_ITERATOR_PROTOTYPE_INDEX)) {
- if (!isolate_->IsSetIteratorLookupChainIntact()) return;
- isolate_->InvalidateSetIteratorProtector();
+ if (!Protectors::IsSetIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateSetIteratorLookupChain(isolate_);
} else if (receiver->IsJSStringIterator() ||
isolate_->IsInAnyContext(
*receiver,
Context::INITIAL_STRING_ITERATOR_PROTOTYPE_INDEX)) {
// Setting the next property of %StringIteratorPrototype% invalidates the
// string iterator protector.
- if (!isolate_->IsStringIteratorLookupChainIntact()) return;
- isolate_->InvalidateStringIteratorProtector();
+ if (!Protectors::IsStringIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateStringIteratorLookupChain(isolate_);
}
} else if (*name_ == roots.species_symbol()) {
// Fetching the context in here since the operation is rather expensive.
@@ -345,10 +345,10 @@ void LookupIterator::InternalUpdateProtector() {
}
if (!Protectors::IsArraySpeciesLookupChainIntact(isolate_) &&
- !isolate_->IsPromiseSpeciesLookupChainIntact() &&
+ !Protectors::IsPromiseSpeciesLookupChainIntact(isolate_) &&
!Protectors::IsRegExpSpeciesLookupChainProtectorIntact(
native_context) &&
- !isolate_->IsTypedArraySpeciesLookupChainIntact()) {
+ !Protectors::IsTypedArraySpeciesLookupChainIntact(isolate_)) {
return;
}
// Setting the Symbol.species property of any Array, Promise or TypedArray
@@ -360,8 +360,8 @@ void LookupIterator::InternalUpdateProtector() {
Protectors::InvalidateArraySpeciesLookupChain(isolate_);
} else if (isolate_->IsInAnyContext(*receiver,
Context::PROMISE_FUNCTION_INDEX)) {
- if (!isolate_->IsPromiseSpeciesLookupChainIntact()) return;
- isolate_->InvalidatePromiseSpeciesProtector();
+ if (!Protectors::IsPromiseSpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidatePromiseSpeciesLookupChain(isolate_);
} else if (isolate_->IsInAnyContext(*receiver,
Context::REGEXP_FUNCTION_INDEX)) {
if (!Protectors::IsRegExpSpeciesLookupChainProtectorIntact(
@@ -371,37 +371,37 @@ void LookupIterator::InternalUpdateProtector() {
Protectors::InvalidateRegExpSpeciesLookupChainProtector(isolate_,
native_context);
} else if (IsTypedArrayFunctionInAnyContext(isolate_, *receiver)) {
- if (!isolate_->IsTypedArraySpeciesLookupChainIntact()) return;
- isolate_->InvalidateTypedArraySpeciesProtector();
+ if (!Protectors::IsTypedArraySpeciesLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateTypedArraySpeciesLookupChain(isolate_);
}
} else if (*name_ == roots.is_concat_spreadable_symbol()) {
- if (!isolate_->IsIsConcatSpreadableLookupChainIntact()) return;
- isolate_->InvalidateIsConcatSpreadableProtector();
+ if (!Protectors::IsIsConcatSpreadableLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateIsConcatSpreadableLookupChain(isolate_);
} else if (*name_ == roots.iterator_symbol()) {
if (receiver->IsJSArray(isolate_)) {
- if (!isolate_->IsArrayIteratorLookupChainIntact()) return;
- isolate_->InvalidateArrayIteratorProtector();
+ if (!Protectors::IsArrayIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateArrayIteratorLookupChain(isolate_);
} else if (receiver->IsJSSet(isolate_) || receiver->IsJSSetIterator() ||
isolate_->IsInAnyContext(
*receiver, Context::INITIAL_SET_ITERATOR_PROTOTYPE_INDEX) ||
isolate_->IsInAnyContext(*receiver,
Context::INITIAL_SET_PROTOTYPE_INDEX)) {
- if (isolate_->IsSetIteratorLookupChainIntact()) {
- isolate_->InvalidateSetIteratorProtector();
+ if (Protectors::IsSetIteratorLookupChainIntact(isolate_)) {
+ Protectors::InvalidateSetIteratorLookupChain(isolate_);
}
} else if (receiver->IsJSMapIterator() ||
isolate_->IsInAnyContext(
*receiver, Context::INITIAL_MAP_ITERATOR_PROTOTYPE_INDEX)) {
- if (isolate_->IsMapIteratorLookupChainIntact()) {
- isolate_->InvalidateMapIteratorProtector();
+ if (Protectors::IsMapIteratorLookupChainIntact(isolate_)) {
+ Protectors::InvalidateMapIteratorLookupChain(isolate_);
}
} else if (isolate_->IsInAnyContext(
*receiver, Context::INITIAL_ITERATOR_PROTOTYPE_INDEX)) {
- if (isolate_->IsMapIteratorLookupChainIntact()) {
- isolate_->InvalidateMapIteratorProtector();
+ if (Protectors::IsMapIteratorLookupChainIntact(isolate_)) {
+ Protectors::InvalidateMapIteratorLookupChain(isolate_);
}
- if (isolate_->IsSetIteratorLookupChainIntact()) {
- isolate_->InvalidateSetIteratorProtector();
+ if (Protectors::IsSetIteratorLookupChainIntact(isolate_)) {
+ Protectors::InvalidateSetIteratorLookupChain(isolate_);
}
} else if (isolate_->IsInAnyContext(
*receiver, Context::INITIAL_STRING_PROTOTYPE_INDEX)) {
@@ -409,18 +409,18 @@ void LookupIterator::InternalUpdateProtector() {
// the string iterator protector. Symbol.iterator can also be set on a
// String wrapper, but not on a primitive string. We only support
// protector for primitive strings.
- if (!isolate_->IsStringIteratorLookupChainIntact()) return;
- isolate_->InvalidateStringIteratorProtector();
+ if (!Protectors::IsStringIteratorLookupChainIntact(isolate_)) return;
+ Protectors::InvalidateStringIteratorLookupChain(isolate_);
}
} else if (*name_ == roots.resolve_string()) {
- if (!isolate_->IsPromiseResolveLookupChainIntact()) return;
+ if (!Protectors::IsPromiseResolveLookupChainIntact(isolate_)) return;
// Setting the "resolve" property on any %Promise% intrinsic object
// invalidates the Promise.resolve protector.
if (isolate_->IsInAnyContext(*receiver, Context::PROMISE_FUNCTION_INDEX)) {
- isolate_->InvalidatePromiseResolveProtector();
+ Protectors::InvalidatePromiseResolveLookupChain(isolate_);
}
} else if (*name_ == roots.then_string()) {
- if (!isolate_->IsPromiseThenLookupChainIntact()) return;
+ if (!Protectors::IsPromiseThenLookupChainIntact(isolate_)) return;
// Setting the "then" property on any JSPromise instance or on the
// initial %PromisePrototype% invalidates the Promise#then protector.
// Also setting the "then" property on the initial %ObjectPrototype%
@@ -432,7 +432,7 @@ void LookupIterator::InternalUpdateProtector() {
isolate_->IsInAnyContext(*receiver,
Context::INITIAL_OBJECT_PROTOTYPE_INDEX) ||
isolate_->IsInAnyContext(*receiver, Context::PROMISE_PROTOTYPE_INDEX)) {
- isolate_->InvalidatePromiseThenProtector();
+ Protectors::InvalidatePromiseThenLookupChain(isolate_);
}
}
}
@@ -534,7 +534,7 @@ void LookupIterator::ReconfigureDataProperty(Handle<Object> value,
DCHECK(attributes != NONE || !holder_obj->HasFastElements(isolate_));
Handle<FixedArrayBase> elements(holder_obj->elements(isolate_), isolate());
holder_obj->GetElementsAccessor(isolate_)->Reconfigure(
- holder_obj, elements, number_, value, attributes);
+ holder_obj, elements, InternalIndex(number_), value, attributes);
ReloadPropertyInformation<true>();
} else if (holder_obj->HasFastProperties(isolate_)) {
Handle<Map> old_map(holder_obj->map(isolate_), isolate_);
@@ -699,8 +699,7 @@ void LookupIterator::ApplyTransitionToDataProperty(
}
if (simple_transition) {
- int number = transition->LastAdded();
- number_ = static_cast<uint32_t>(number);
+ number_ = transition->LastAdded().as_uint32();
property_details_ = transition->GetLastDescriptorDetails(isolate_);
state_ = DATA;
} else if (receiver->map(isolate_).is_dictionary_map()) {
@@ -731,7 +730,7 @@ void LookupIterator::Delete() {
if (IsElement()) {
Handle<JSObject> object = Handle<JSObject>::cast(holder);
ElementsAccessor* accessor = object->GetElementsAccessor(isolate_);
- accessor->Delete(object, number_);
+ accessor->Delete(object, InternalIndex(number_));
} else {
DCHECK(!name()->IsPrivateName(isolate_));
bool is_prototype_map = holder->map(isolate_).is_prototype_map();
@@ -777,8 +776,11 @@ void LookupIterator::TransitionToAccessorProperty(
} else if (state_ == INTERCEPTOR) {
LookupInRegularHolder<false>(*old_map, *holder_);
}
- int descriptor =
- IsFound() ? static_cast<int>(number_) : DescriptorArray::kNotFound;
+ // TODO(jkummerow): {IsFound()} should be enough once {number_} has type
+ // {InternalIndex}.
+ InternalIndex descriptor = (IsFound() && number_ != kMaxUInt32)
+ ? InternalIndex(number_)
+ : InternalIndex::NotFound();
Handle<Map> new_map = Map::TransitionToAccessorProperty(
isolate_, old_map, name_, descriptor, getter, setter, attributes);
@@ -787,8 +789,7 @@ void LookupIterator::TransitionToAccessorProperty(
JSObject::MigrateToMap(isolate_, receiver, new_map);
if (simple_transition) {
- int number = new_map->LastAdded();
- number_ = static_cast<uint32_t>(number);
+ number_ = new_map->LastAdded().as_uint32();
property_details_ = new_map->GetLastDescriptorDetails(isolate_);
state_ = ACCESSOR;
return;
@@ -894,23 +895,24 @@ Handle<Object> LookupIterator::FetchValue() const {
if (IsElement()) {
Handle<JSObject> holder = GetHolder<JSObject>();
ElementsAccessor* accessor = holder->GetElementsAccessor(isolate_);
- return accessor->Get(holder, number_);
+ return accessor->Get(holder, InternalIndex(number_));
} else if (holder_->IsJSGlobalObject(isolate_)) {
Handle<JSGlobalObject> holder = GetHolder<JSGlobalObject>();
result = holder->global_dictionary(isolate_).ValueAt(isolate_, number_);
} else if (!holder_->HasFastProperties(isolate_)) {
- result = holder_->property_dictionary(isolate_).ValueAt(isolate_, number_);
+ result = holder_->property_dictionary(isolate_).ValueAt(isolate_,
+ dictionary_entry());
} else if (property_details_.location() == kField) {
DCHECK_EQ(kData, property_details_.kind());
Handle<JSObject> holder = GetHolder<JSObject>();
FieldIndex field_index =
- FieldIndex::ForDescriptor(holder->map(isolate_), number_);
+ FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
return JSObject::FastPropertyAt(holder, property_details_.representation(),
field_index);
} else {
result =
holder_->map(isolate_).instance_descriptors(isolate_).GetStrongValue(
- isolate_, number_);
+ isolate_, descriptor_number());
}
return handle(result, isolate_);
}
@@ -922,7 +924,7 @@ bool LookupIterator::IsConstFieldValueEqualTo(Object value) const {
DCHECK_EQ(PropertyConstness::kConst, property_details_.constness());
Handle<JSObject> holder = GetHolder<JSObject>();
FieldIndex field_index =
- FieldIndex::ForDescriptor(holder->map(isolate_), number_);
+ FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number());
if (property_details_.representation().IsDouble()) {
if (!value.IsNumber(isolate_)) return false;
uint64_t bits;
@@ -958,7 +960,8 @@ int LookupIterator::GetFieldDescriptorIndex() const {
DCHECK(holder_->HasFastProperties());
DCHECK_EQ(kField, property_details_.location());
DCHECK_EQ(kData, property_details_.kind());
- return descriptor_number();
+ // TODO(jkummerow): Propagate InternalIndex further.
+ return descriptor_number().as_int();
}
int LookupIterator::GetAccessorIndex() const {
@@ -966,7 +969,7 @@ int LookupIterator::GetAccessorIndex() const {
DCHECK(holder_->HasFastProperties(isolate_));
DCHECK_EQ(kDescriptor, property_details_.location());
DCHECK_EQ(kAccessor, property_details_.kind());
- return descriptor_number();
+ return descriptor_number().as_int();
}
Handle<Map> LookupIterator::GetFieldOwnerMap() const {
@@ -1028,7 +1031,7 @@ void LookupIterator::WriteDataValue(Handle<Object> value,
if (IsElement()) {
Handle<JSObject> object = Handle<JSObject>::cast(holder);
ElementsAccessor* accessor = object->GetElementsAccessor(isolate_);
- accessor->Set(object, number_, *value);
+ accessor->Set(object, InternalIndex(number_), *value);
} else if (holder->HasFastProperties(isolate_)) {
if (property_details_.location() == kField) {
// Check that in case of VariableMode::kConst field the existing value is
@@ -1164,13 +1167,15 @@ LookupIterator::State LookupIterator::LookupInRegularHolder(
JSObject js_object = JSObject::cast(holder);
ElementsAccessor* accessor = js_object.GetElementsAccessor(isolate_);
FixedArrayBase backing_store = js_object.elements(isolate_);
- number_ =
+ // TODO(jkummerow): {number_} should have type InternalIndex.
+ InternalIndex entry =
accessor->GetEntryForIndex(isolate_, js_object, backing_store, index_);
+ number_ = entry.is_found() ? entry.as_uint32() : kMaxUInt32;
if (number_ == kMaxUInt32) {
return holder.IsJSTypedArray(isolate_) ? INTEGER_INDEXED_EXOTIC
: NOT_FOUND;
}
- property_details_ = accessor->GetDetails(js_object, number_);
+ property_details_ = accessor->GetDetails(js_object, InternalIndex(number_));
if (map.has_frozen_elements()) {
property_details_ = property_details_.CopyAddAttributes(FROZEN);
} else if (map.has_sealed_elements()) {
@@ -1178,10 +1183,10 @@ LookupIterator::State LookupIterator::LookupInRegularHolder(
}
} else if (!map.is_dictionary_map()) {
DescriptorArray descriptors = map.instance_descriptors(isolate_);
- int number = descriptors.SearchWithCache(isolate_, *name_, map);
- if (number == DescriptorArray::kNotFound) return NotFound(holder);
- number_ = static_cast<uint32_t>(number);
- property_details_ = descriptors.GetDetails(number_);
+ InternalIndex number = descriptors.SearchWithCache(isolate_, *name_, map);
+ if (number.is_not_found()) return NotFound(holder);
+ number_ = number.as_uint32();
+ property_details_ = descriptors.GetDetails(InternalIndex(number_));
} else {
DCHECK_IMPLIES(holder.IsJSProxy(isolate_), name()->IsPrivate(isolate_));
NameDictionary dict = holder.property_dictionary(isolate_);
diff --git a/deps/v8/src/objects/lookup.h b/deps/v8/src/objects/lookup.h
index 565ea4bb75..2a1f0e2f1b 100644
--- a/deps/v8/src/objects/lookup.h
+++ b/deps/v8/src/objects/lookup.h
@@ -241,7 +241,7 @@ class V8_EXPORT_PRIVATE LookupIterator final {
bool check_interceptor() const {
return (configuration_ & kInterceptor) != 0;
}
- inline int descriptor_number() const;
+ inline InternalIndex descriptor_number() const;
inline int dictionary_entry() const;
static inline Configuration ComputeConfiguration(Isolate* isolate,
diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h
index 48bb86e2da..557c004401 100644
--- a/deps/v8/src/objects/map-inl.h
+++ b/deps/v8/src/objects/map-inl.h
@@ -112,7 +112,7 @@ bool Map::IsMostGeneralFieldType(Representation representation,
bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
return instance_type == JS_ARRAY_TYPE ||
instance_type == JS_PRIMITIVE_WRAPPER_TYPE ||
- instance_type == JS_ARGUMENTS_TYPE;
+ instance_type == JS_ARGUMENTS_OBJECT_TYPE;
}
bool Map::CanHaveFastTransitionableElementsKind() const {
@@ -177,10 +177,10 @@ PropertyDetails Map::GetLastDescriptorDetails(Isolate* isolate) const {
return instance_descriptors(isolate).GetDetails(LastAdded());
}
-int Map::LastAdded() const {
+InternalIndex Map::LastAdded() const {
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK_GT(number_of_own_descriptors, 0);
- return number_of_own_descriptors - 1;
+ return InternalIndex(number_of_own_descriptors - 1);
}
int Map::NumberOfOwnDescriptors() const {
@@ -194,6 +194,10 @@ void Map::SetNumberOfOwnDescriptors(int number) {
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}
+InternalIndex::Range Map::IterateOwnDescriptors() const {
+ return InternalIndex::Range(NumberOfOwnDescriptors());
+}
+
int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
void Map::SetEnumLength(int length) {
@@ -207,7 +211,8 @@ void Map::SetEnumLength(int length) {
FixedArrayBase Map::GetInitialElements() const {
FixedArrayBase result;
- if (has_fast_elements() || has_fast_string_wrapper_elements()) {
+ if (has_fast_elements() || has_fast_string_wrapper_elements() ||
+ has_any_nonextensible_elements()) {
result = GetReadOnlyRoots().empty_fixed_array();
} else if (has_fast_sloppy_arguments_elements()) {
result = GetReadOnlyRoots().empty_sloppy_arguments_elements();
@@ -540,12 +545,12 @@ void Map::mark_unstable() {
bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
bool Map::CanBeDeprecated() const {
- int descriptor = LastAdded();
- for (int i = 0; i <= descriptor; i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
PropertyDetails details = instance_descriptors().GetDetails(i);
if (details.representation().IsNone()) return true;
if (details.representation().IsSmi()) return true;
- if (details.representation().IsDouble()) return true;
+ if (details.representation().IsDouble() && FLAG_unbox_double_fields)
+ return true;
if (details.representation().IsHeapObject()) return true;
if (details.kind() == kData && details.location() == kDescriptor) {
return true;
@@ -584,7 +589,7 @@ bool Map::IsNullOrUndefinedMap() const {
}
bool Map::IsPrimitiveMap() const {
- return instance_type() <= LAST_PRIMITIVE_TYPE;
+ return instance_type() <= LAST_PRIMITIVE_HEAP_OBJECT_TYPE;
}
LayoutDescriptor Map::layout_descriptor_gc_safe() const {
@@ -675,8 +680,10 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
// barrier.
descriptors.Append(desc);
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
+#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
number_of_own_descriptors + 1);
+#endif
}
// Properly mark the map if the {desc} is an "interesting symbol".
if (desc->GetKey()->IsInterestingSymbol()) {
diff --git a/deps/v8/src/objects/map-updater.cc b/deps/v8/src/objects/map-updater.cc
index 49b9ccea91..8c9b94014f 100644
--- a/deps/v8/src/objects/map-updater.cc
+++ b/deps/v8/src/objects/map-updater.cc
@@ -38,12 +38,12 @@ MapUpdater::MapUpdater(Isolate* isolate, Handle<Map> old_map)
!old_map->FindRootMap(isolate).GetConstructor().IsFunctionTemplateInfo());
}
-Name MapUpdater::GetKey(int descriptor) const {
+Name MapUpdater::GetKey(InternalIndex descriptor) const {
return old_descriptors_->GetKey(descriptor);
}
-PropertyDetails MapUpdater::GetDetails(int descriptor) const {
- DCHECK_LE(0, descriptor);
+PropertyDetails MapUpdater::GetDetails(InternalIndex descriptor) const {
+ DCHECK(descriptor.is_found());
if (descriptor == modified_descriptor_) {
PropertyAttributes attributes = new_attributes_;
// If the original map was sealed or frozen, let us used the old
@@ -59,8 +59,8 @@ PropertyDetails MapUpdater::GetDetails(int descriptor) const {
return old_descriptors_->GetDetails(descriptor);
}
-Object MapUpdater::GetValue(int descriptor) const {
- DCHECK_LE(0, descriptor);
+Object MapUpdater::GetValue(InternalIndex descriptor) const {
+ DCHECK(descriptor.is_found());
if (descriptor == modified_descriptor_) {
DCHECK_EQ(kDescriptor, new_location_);
return *new_value_;
@@ -69,8 +69,8 @@ Object MapUpdater::GetValue(int descriptor) const {
return old_descriptors_->GetStrongValue(descriptor);
}
-FieldType MapUpdater::GetFieldType(int descriptor) const {
- DCHECK_LE(0, descriptor);
+FieldType MapUpdater::GetFieldType(InternalIndex descriptor) const {
+ DCHECK(descriptor.is_found());
if (descriptor == modified_descriptor_) {
DCHECK_EQ(kField, new_location_);
return *new_field_type_;
@@ -80,9 +80,9 @@ FieldType MapUpdater::GetFieldType(int descriptor) const {
}
Handle<FieldType> MapUpdater::GetOrComputeFieldType(
- int descriptor, PropertyLocation location,
+ InternalIndex descriptor, PropertyLocation location,
Representation representation) const {
- DCHECK_LE(0, descriptor);
+ DCHECK(descriptor.is_found());
// |location| is just a pre-fetched GetDetails(descriptor).location().
DCHECK_EQ(location, GetDetails(descriptor).location());
if (location == kField) {
@@ -93,7 +93,7 @@ Handle<FieldType> MapUpdater::GetOrComputeFieldType(
}
Handle<FieldType> MapUpdater::GetOrComputeFieldType(
- Handle<DescriptorArray> descriptors, int descriptor,
+ Handle<DescriptorArray> descriptors, InternalIndex descriptor,
PropertyLocation location, Representation representation) {
// |location| is just a pre-fetched GetDetails(descriptor).location().
DCHECK_EQ(descriptors->GetDetails(descriptor).location(), location);
@@ -105,13 +105,13 @@ Handle<FieldType> MapUpdater::GetOrComputeFieldType(
}
}
-Handle<Map> MapUpdater::ReconfigureToDataField(int descriptor,
+Handle<Map> MapUpdater::ReconfigureToDataField(InternalIndex descriptor,
PropertyAttributes attributes,
PropertyConstness constness,
Representation representation,
Handle<FieldType> field_type) {
DCHECK_EQ(kInitialized, state_);
- DCHECK_LE(0, descriptor);
+ DCHECK(descriptor.is_found());
DCHECK(!old_map_->is_dictionary_map());
modified_descriptor_ = descriptor;
new_kind_ = kData;
@@ -190,7 +190,7 @@ Handle<Map> MapUpdater::Update() {
return result_map_;
}
-void MapUpdater::GeneralizeField(Handle<Map> map, int modify_index,
+void MapUpdater::GeneralizeField(Handle<Map> map, InternalIndex modify_index,
PropertyConstness new_constness,
Representation new_representation,
Handle<FieldType> new_field_type) {
@@ -338,7 +338,8 @@ MapUpdater::State MapUpdater::FindRootMap() {
}
int root_nof = root_map_->NumberOfOwnDescriptors();
- if (modified_descriptor_ >= 0 && modified_descriptor_ < root_nof) {
+ if (modified_descriptor_.is_found() &&
+ modified_descriptor_.as_int() < root_nof) {
PropertyDetails old_details =
old_descriptors_->GetDetails(modified_descriptor_);
if (old_details.kind() != new_kind_ ||
@@ -374,7 +375,7 @@ MapUpdater::State MapUpdater::FindTargetMap() {
target_map_ = root_map_;
int root_nof = root_map_->NumberOfOwnDescriptors();
- for (int i = root_nof; i < old_nof_; ++i) {
+ for (InternalIndex i : InternalIndex::Range(root_nof, old_nof_)) {
PropertyDetails old_details = GetDetails(i);
Map transition = TransitionsAccessor(isolate_, target_map_)
.SearchTransition(GetKey(i), old_details.kind(),
@@ -423,7 +424,7 @@ MapUpdater::State MapUpdater::FindTargetMap() {
int target_nof = target_map_->NumberOfOwnDescriptors();
if (target_nof == old_nof_) {
#ifdef DEBUG
- if (modified_descriptor_ >= 0) {
+ if (modified_descriptor_.is_found()) {
DescriptorArray target_descriptors = target_map_->instance_descriptors();
PropertyDetails details =
target_descriptors.GetDetails(modified_descriptor_);
@@ -465,7 +466,7 @@ MapUpdater::State MapUpdater::FindTargetMap() {
}
// Find the last compatible target map in the transition tree.
- for (int i = target_nof; i < old_nof_; ++i) {
+ for (InternalIndex i : InternalIndex::Range(target_nof, old_nof_)) {
PropertyDetails old_details = GetDetails(i);
Map transition = TransitionsAccessor(isolate_, target_map_)
.SearchTransition(GetKey(i), old_details.kind(),
@@ -521,7 +522,7 @@ Handle<DescriptorArray> MapUpdater::BuildDescriptorArray() {
// general than we requested. Take |root_nof| entries as is.
// 0 -> |root_nof|
int current_offset = 0;
- for (int i = 0; i < root_nof; ++i) {
+ for (InternalIndex i : InternalIndex::Range(root_nof)) {
PropertyDetails old_details = old_descriptors_->GetDetails(i);
if (old_details.location() == kField) {
current_offset += old_details.field_width_in_words();
@@ -534,7 +535,7 @@ Handle<DescriptorArray> MapUpdater::BuildDescriptorArray() {
// Merge "updated" old_descriptor entries with target_descriptor entries.
// |root_nof| -> |target_nof|
- for (int i = root_nof; i < target_nof; ++i) {
+ for (InternalIndex i : InternalIndex::Range(root_nof, target_nof)) {
Handle<Name> key(GetKey(i), isolate_);
PropertyDetails old_details = GetDetails(i);
PropertyDetails target_details = target_descriptors->GetDetails(i);
@@ -606,7 +607,7 @@ Handle<DescriptorArray> MapUpdater::BuildDescriptorArray() {
// Take "updated" old_descriptor entries.
// |target_nof| -> |old_nof|
- for (int i = target_nof; i < old_nof_; ++i) {
+ for (InternalIndex i : InternalIndex::Range(target_nof, old_nof_)) {
PropertyDetails old_details = GetDetails(i);
Handle<Name> key(GetKey(i), isolate_);
@@ -665,7 +666,7 @@ Handle<Map> MapUpdater::FindSplitMap(Handle<DescriptorArray> descriptors) {
int root_nof = root_map_->NumberOfOwnDescriptors();
Map current = *root_map_;
- for (int i = root_nof; i < old_nof_; i++) {
+ for (InternalIndex i : InternalIndex::Range(root_nof, old_nof_)) {
Name name = descriptors->GetKey(i);
PropertyDetails details = descriptors->GetDetails(i);
Map next =
@@ -707,13 +708,13 @@ MapUpdater::State MapUpdater::ConstructNewMap() {
state_ = kAtIntegrityLevelSource;
return state_;
}
-
- PropertyDetails split_details = GetDetails(split_nof);
+ InternalIndex split_index(split_nof);
+ PropertyDetails split_details = GetDetails(split_index);
TransitionsAccessor transitions(isolate_, split_map);
// Invalidate a transition target at |key|.
Map maybe_transition = transitions.SearchTransition(
- GetKey(split_nof), split_details.kind(), split_details.attributes());
+ GetKey(split_index), split_details.kind(), split_details.attributes());
if (!maybe_transition.is_null()) {
maybe_transition.DeprecateTransitionTree(isolate_);
}
@@ -727,7 +728,7 @@ MapUpdater::State MapUpdater::ConstructNewMap() {
old_map_->NotifyLeafMapLayoutChange(isolate_);
- if (FLAG_trace_generalization && modified_descriptor_ >= 0) {
+ if (FLAG_trace_generalization && modified_descriptor_.is_found()) {
PropertyDetails old_details =
old_descriptors_->GetDetails(modified_descriptor_);
PropertyDetails new_details =
diff --git a/deps/v8/src/objects/map-updater.h b/deps/v8/src/objects/map-updater.h
index 6ee373cbdf..11bdd0859f 100644
--- a/deps/v8/src/objects/map-updater.h
+++ b/deps/v8/src/objects/map-updater.h
@@ -54,7 +54,7 @@ class MapUpdater {
// Prepares for reconfiguring of a property at |descriptor| to data field
// with given |attributes| and |representation|/|field_type| and
// performs the steps 1-5.
- Handle<Map> ReconfigureToDataField(int descriptor,
+ Handle<Map> ReconfigureToDataField(InternalIndex descriptor,
PropertyAttributes attributes,
PropertyConstness constness,
Representation representation,
@@ -127,26 +127,26 @@ class MapUpdater {
State Normalize(const char* reason);
// Returns name of a |descriptor| property.
- inline Name GetKey(int descriptor) const;
+ inline Name GetKey(InternalIndex descriptor) const;
// Returns property details of a |descriptor| in "updated" |old_descrtiptors_|
// array.
- inline PropertyDetails GetDetails(int descriptor) const;
+ inline PropertyDetails GetDetails(InternalIndex descriptor) const;
// Returns value of a |descriptor| with kDescriptor location in "updated"
// |old_descrtiptors_| array.
- inline Object GetValue(int descriptor) const;
+ inline Object GetValue(InternalIndex descriptor) const;
// Returns field type for a |descriptor| with kField location in "updated"
// |old_descrtiptors_| array.
- inline FieldType GetFieldType(int descriptor) const;
+ inline FieldType GetFieldType(InternalIndex descriptor) const;
// If a |descriptor| property in "updated" |old_descriptors_| has kField
// location then returns it's field type otherwise computes optimal field
// type for the descriptor's value and |representation|. The |location|
// value must be a pre-fetched location for |descriptor|.
inline Handle<FieldType> GetOrComputeFieldType(
- int descriptor, PropertyLocation location,
+ InternalIndex descriptor, PropertyLocation location,
Representation representation) const;
// If a |descriptor| property in given |descriptors| array has kField
@@ -154,10 +154,10 @@ class MapUpdater {
// type for the descriptor's value and |representation|.
// The |location| value must be a pre-fetched location for |descriptor|.
inline Handle<FieldType> GetOrComputeFieldType(
- Handle<DescriptorArray> descriptors, int descriptor,
+ Handle<DescriptorArray> descriptors, InternalIndex descriptor,
PropertyLocation location, Representation representation);
- void GeneralizeField(Handle<Map> map, int modify_index,
+ void GeneralizeField(Handle<Map> map, InternalIndex modify_index,
PropertyConstness new_constness,
Representation new_representation,
Handle<FieldType> new_field_type);
@@ -182,9 +182,9 @@ class MapUpdater {
ElementsKind new_elements_kind_;
bool is_transitionable_fast_elements_kind_;
- // If |modified_descriptor_| is not equal to -1 then the fields below form
+ // If |modified_descriptor_.is_found()|, then the fields below form
// an "update" of the |old_map_|'s descriptors.
- int modified_descriptor_ = -1;
+ InternalIndex modified_descriptor_ = InternalIndex::NotFound();
PropertyKind new_kind_ = kData;
PropertyAttributes new_attributes_ = NONE;
PropertyConstness new_constness_ = PropertyConstness::kMutable;
diff --git a/deps/v8/src/objects/map.cc b/deps/v8/src/objects/map.cc
index a672d6580a..0f448922eb 100644
--- a/deps/v8/src/objects/map.cc
+++ b/deps/v8/src/objects/map.cc
@@ -56,20 +56,8 @@ MaybeHandle<JSFunction> Map::GetConstructorFunction(
return MaybeHandle<JSFunction>();
}
-bool Map::IsMapOfGlobalProxy(Handle<NativeContext> native_context) const {
- DisallowHeapAllocation no_gc;
- if (IsJSGlobalProxyMap()) {
- Object maybe_constructor = GetConstructor();
- // Detached global proxies have |null| as their constructor.
- return maybe_constructor.IsJSFunction() &&
- JSFunction::cast(maybe_constructor).native_context() ==
- *native_context;
- }
- return false;
-}
-
-void Map::PrintReconfiguration(Isolate* isolate, FILE* file, int modify_index,
- PropertyKind kind,
+void Map::PrintReconfiguration(Isolate* isolate, FILE* file,
+ InternalIndex modify_index, PropertyKind kind,
PropertyAttributes attributes) {
OFStream os(file);
os << "[reconfiguring]";
@@ -256,7 +244,7 @@ VisitorId Map::GetVisitorId(Map map) {
case CODE_DATA_CONTAINER_TYPE:
return kVisitCodeDataContainer;
- case WASM_INSTANCE_TYPE:
+ case WASM_INSTANCE_OBJECT_TYPE:
return kVisitWasmInstanceObject;
case PREPARSE_DATA_TYPE:
@@ -270,7 +258,7 @@ VisitorId Map::GetVisitorId(Map map) {
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
- case JS_ARGUMENTS_TYPE:
+ case JS_ARGUMENTS_OBJECT_TYPE:
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
@@ -291,27 +279,27 @@ VisitorId Map::GetVisitorId(Map map) {
case JS_MAP_VALUE_ITERATOR_TYPE:
case JS_STRING_ITERATOR_TYPE:
case JS_PROMISE_TYPE:
- case JS_REGEXP_TYPE:
- case JS_REGEXP_STRING_ITERATOR_TYPE:
+ case JS_REG_EXP_TYPE:
+ case JS_REG_EXP_STRING_ITERATOR_TYPE:
case JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE:
case JS_FINALIZATION_GROUP_TYPE:
#ifdef V8_INTL_SUPPORT
- case JS_INTL_V8_BREAK_ITERATOR_TYPE:
- case JS_INTL_COLLATOR_TYPE:
- case JS_INTL_DATE_TIME_FORMAT_TYPE:
- case JS_INTL_LIST_FORMAT_TYPE:
- case JS_INTL_LOCALE_TYPE:
- case JS_INTL_NUMBER_FORMAT_TYPE:
- case JS_INTL_PLURAL_RULES_TYPE:
- case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
- case JS_INTL_SEGMENT_ITERATOR_TYPE:
- case JS_INTL_SEGMENTER_TYPE:
+ case JS_V8_BREAK_ITERATOR_TYPE:
+ case JS_COLLATOR_TYPE:
+ case JS_DATE_TIME_FORMAT_TYPE:
+ case JS_LIST_FORMAT_TYPE:
+ case JS_LOCALE_TYPE:
+ case JS_NUMBER_FORMAT_TYPE:
+ case JS_PLURAL_RULES_TYPE:
+ case JS_RELATIVE_TIME_FORMAT_TYPE:
+ case JS_SEGMENT_ITERATOR_TYPE:
+ case JS_SEGMENTER_TYPE:
#endif // V8_INTL_SUPPORT
- case WASM_EXCEPTION_TYPE:
- case WASM_GLOBAL_TYPE:
- case WASM_MEMORY_TYPE:
- case WASM_MODULE_TYPE:
- case WASM_TABLE_TYPE:
+ case WASM_EXCEPTION_OBJECT_TYPE:
+ case WASM_GLOBAL_OBJECT_TYPE:
+ case WASM_MEMORY_OBJECT_TYPE:
+ case WASM_MODULE_OBJECT_TYPE:
+ case WASM_TABLE_OBJECT_TYPE:
case JS_BOUND_FUNCTION_TYPE: {
const bool has_raw_data_fields =
(FLAG_unbox_double_fields && !map.HasFastPointerLayout()) ||
@@ -371,12 +359,13 @@ VisitorId Map::GetVisitorId(Map map) {
}
void Map::PrintGeneralization(
- Isolate* isolate, FILE* file, const char* reason, int modify_index,
- int split, int descriptors, bool descriptor_to_field,
- Representation old_representation, Representation new_representation,
- PropertyConstness old_constness, PropertyConstness new_constness,
- MaybeHandle<FieldType> old_field_type, MaybeHandle<Object> old_value,
- MaybeHandle<FieldType> new_field_type, MaybeHandle<Object> new_value) {
+ Isolate* isolate, FILE* file, const char* reason,
+ InternalIndex modify_index, int split, int descriptors,
+ bool descriptor_to_field, Representation old_representation,
+ Representation new_representation, PropertyConstness old_constness,
+ PropertyConstness new_constness, MaybeHandle<FieldType> old_field_type,
+ MaybeHandle<Object> old_value, MaybeHandle<FieldType> new_field_type,
+ MaybeHandle<Object> new_value) {
OFStream os(file);
os << "[generalizing]";
Name name = instance_descriptors().GetKey(modify_index);
@@ -440,9 +429,9 @@ MaybeHandle<Map> Map::CopyWithField(Isolate* isolate, Handle<Map> map,
PropertyConstness constness,
Representation representation,
TransitionFlag flag) {
- DCHECK(
- DescriptorArray::kNotFound ==
- map->instance_descriptors().Search(*name, map->NumberOfOwnDescriptors()));
+ DCHECK(map->instance_descriptors()
+ .Search(*name, map->NumberOfOwnDescriptors())
+ .is_not_found());
// Ensure the descriptor array does not get too big.
if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) {
@@ -536,8 +525,7 @@ bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
// If smi descriptors were replaced by double descriptors, rewrite.
DescriptorArray old_desc = instance_descriptors();
DescriptorArray new_desc = target.instance_descriptors();
- int limit = NumberOfOwnDescriptors();
- for (int i = 0; i < limit; i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
if (new_desc.GetDetails(i).representation().IsDouble() !=
old_desc.GetDetails(i).representation().IsDouble()) {
return true;
@@ -562,7 +550,7 @@ bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
int Map::NumberOfFields() const {
DescriptorArray descriptors = instance_descriptors();
int result = 0;
- for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
if (descriptors.GetDetails(i).location() == kField) result++;
}
return result;
@@ -572,7 +560,7 @@ Map::FieldCounts Map::GetFieldCounts() const {
DescriptorArray descriptors = instance_descriptors();
int mutable_count = 0;
int const_count = 0;
- for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
switch (details.constness()) {
@@ -625,8 +613,10 @@ void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
// descriptors will not be trimmed in the mark-compactor, we need to mark
// all its elements.
Map current = *this;
+#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), current, to_replace,
to_replace.number_of_descriptors());
+#endif
while (current.instance_descriptors(isolate) == to_replace) {
Object next = current.GetBackPointer(isolate);
if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
@@ -654,7 +644,7 @@ Map Map::FindRootMap(Isolate* isolate) const {
}
}
-Map Map::FindFieldOwner(Isolate* isolate, int descriptor) const {
+Map Map::FindFieldOwner(Isolate* isolate, InternalIndex descriptor) const {
DisallowHeapAllocation no_allocation;
DCHECK_EQ(kField,
instance_descriptors(isolate).GetDetails(descriptor).location());
@@ -663,14 +653,14 @@ Map Map::FindFieldOwner(Isolate* isolate, int descriptor) const {
Object back = result.GetBackPointer(isolate);
if (back.IsUndefined(isolate)) break;
const Map parent = Map::cast(back);
- if (parent.NumberOfOwnDescriptors() <= descriptor) break;
+ if (parent.NumberOfOwnDescriptors() <= descriptor.as_int()) break;
result = parent;
}
return result;
}
-void Map::UpdateFieldType(Isolate* isolate, int descriptor, Handle<Name> name,
- PropertyConstness new_constness,
+void Map::UpdateFieldType(Isolate* isolate, InternalIndex descriptor,
+ Handle<Name> name, PropertyConstness new_constness,
Representation new_representation,
const MaybeObjectHandle& new_wrapped_type) {
DCHECK(new_wrapped_type->IsSmi() || new_wrapped_type->IsWeak());
@@ -740,7 +730,8 @@ Handle<FieldType> Map::GeneralizeFieldType(Representation rep1,
}
// static
-void Map::GeneralizeField(Isolate* isolate, Handle<Map> map, int modify_index,
+void Map::GeneralizeField(Isolate* isolate, Handle<Map> map,
+ InternalIndex modify_index,
PropertyConstness new_constness,
Representation new_representation,
Handle<FieldType> new_field_type) {
@@ -791,7 +782,8 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map, int modify_index,
map->PrintGeneralization(
isolate, stdout, "field type generalization", modify_index,
map->NumberOfOwnDescriptors(), map->NumberOfOwnDescriptors(), false,
- details.representation(), details.representation(), old_constness,
+ details.representation(),
+ descriptors->GetDetails(modify_index).representation(), old_constness,
new_constness, old_field_type, MaybeHandle<Object>(), new_field_type,
MaybeHandle<Object>());
}
@@ -800,7 +792,8 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map, int modify_index,
// TODO(ishell): remove.
// static
Handle<Map> Map::ReconfigureProperty(Isolate* isolate, Handle<Map> map,
- int modify_index, PropertyKind new_kind,
+ InternalIndex modify_index,
+ PropertyKind new_kind,
PropertyAttributes new_attributes,
Representation new_representation,
Handle<FieldType> new_field_type) {
@@ -840,9 +833,8 @@ Map SearchMigrationTarget(Isolate* isolate, Map old_map) {
// types instead of old_map's types.
// Go to slow map updating if the old_map has fast properties with cleared
// field types.
- int old_nof = old_map.NumberOfOwnDescriptors();
DescriptorArray old_descriptors = old_map.instance_descriptors();
- for (int i = 0; i < old_nof; i++) {
+ for (InternalIndex i : old_map.IterateOwnDescriptors()) {
PropertyDetails old_details = old_descriptors.GetDetails(i);
if (old_details.location() == kField && old_details.kind() == kData) {
FieldType old_type = old_descriptors.GetFieldType(i);
@@ -1007,7 +999,7 @@ Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map) {
DescriptorArray old_descriptors = old_map.instance_descriptors();
Map new_map = *this;
- for (int i = root_nof; i < old_nof; ++i) {
+ for (InternalIndex i : InternalIndex::Range(root_nof, old_nof)) {
PropertyDetails old_details = old_descriptors.GetDetails(i);
Map transition =
TransitionsAccessor(isolate, new_map, &no_allocation)
@@ -1107,8 +1099,10 @@ void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
// Replace descriptors by new_descriptors in all maps that share it. The old
// descriptors will not be trimmed in the mark-compactor, we need to mark
// all its elements.
+#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *map, *descriptors,
descriptors->number_of_descriptors());
+#endif
Map current = *map;
while (current.instance_descriptors() == *descriptors) {
@@ -1363,8 +1357,7 @@ Handle<Map> Map::AsElementsKind(Isolate* isolate, Handle<Map> map,
int Map::NumberOfEnumerableProperties() const {
int result = 0;
DescriptorArray descs = instance_descriptors();
- int limit = NumberOfOwnDescriptors();
- for (int i = 0; i < limit; i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
!descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
result++;
@@ -1378,7 +1371,7 @@ int Map::NextFreePropertyIndex() const {
DescriptorArray descs = instance_descriptors();
// Search properties backwards to find the last field.
for (int i = number_of_own_descriptors - 1; i >= 0; --i) {
- PropertyDetails details = descs.GetDetails(i);
+ PropertyDetails details = descs.GetDetails(InternalIndex(i));
if (details.location() == kField) {
return details.field_index() + details.field_width_in_words();
}
@@ -1788,7 +1781,7 @@ Handle<Map> Map::AddMissingTransitions(
// if there are no dead transitions from that map and this is exactly the
// case for all the intermediate maps we create here.
Handle<Map> map = split_map;
- for (int i = split_nof; i < nof_descriptors - 1; ++i) {
+ for (InternalIndex i : InternalIndex::Range(split_nof, nof_descriptors - 1)) {
Handle<Map> new_map = CopyDropDescriptors(isolate, map);
InstallDescriptors(isolate, map, new_map, i, descriptors,
full_layout_descriptor);
@@ -1797,20 +1790,21 @@ Handle<Map> Map::AddMissingTransitions(
}
map->NotifyLeafMapLayoutChange(isolate);
last_map->set_may_have_interesting_symbols(false);
- InstallDescriptors(isolate, map, last_map, nof_descriptors - 1, descriptors,
- full_layout_descriptor);
+ InstallDescriptors(isolate, map, last_map, InternalIndex(nof_descriptors - 1),
+ descriptors, full_layout_descriptor);
return last_map;
}
// Since this method is used to rewrite an existing transition tree, it can
// always insert transitions without checking.
void Map::InstallDescriptors(Isolate* isolate, Handle<Map> parent,
- Handle<Map> child, int new_descriptor,
+ Handle<Map> child, InternalIndex new_descriptor,
Handle<DescriptorArray> descriptors,
Handle<LayoutDescriptor> full_layout_descriptor) {
DCHECK(descriptors->IsSortedNoDuplicates());
- child->SetInstanceDescriptors(isolate, *descriptors, new_descriptor + 1);
+ child->SetInstanceDescriptors(isolate, *descriptors,
+ new_descriptor.as_int() + 1);
child->CopyUnusedPropertyFields(*parent);
PropertyDetails details = descriptors->GetDetails(new_descriptor);
if (details.location() == kField) {
@@ -2063,7 +2057,7 @@ Handle<Map> Map::CopyForPreventExtensions(
namespace {
-bool CanHoldValue(DescriptorArray descriptors, int descriptor,
+bool CanHoldValue(DescriptorArray descriptors, InternalIndex descriptor,
PropertyConstness constness, Object value) {
PropertyDetails details = descriptors.GetDetails(descriptor);
if (details.location() == kField) {
@@ -2086,7 +2080,7 @@ bool CanHoldValue(DescriptorArray descriptors, int descriptor,
}
Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
- int descriptor,
+ InternalIndex descriptor,
PropertyConstness constness,
Handle<Object> value) {
if (CanHoldValue(map->instance_descriptors(), descriptor, constness,
@@ -2108,7 +2102,7 @@ Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
// static
Handle<Map> Map::PrepareForDataProperty(Isolate* isolate, Handle<Map> map,
- int descriptor,
+ InternalIndex descriptor,
PropertyConstness constness,
Handle<Object> value) {
// Update to the newest map before storing the property.
@@ -2140,7 +2134,7 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
.SearchTransition(*name, kData, attributes);
if (!maybe_transition.is_null()) {
Handle<Map> transition(maybe_transition, isolate);
- int descriptor = transition->LastAdded();
+ InternalIndex descriptor = transition->LastAdded();
DCHECK_EQ(
attributes,
@@ -2206,7 +2200,8 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
}
Handle<Map> Map::ReconfigureExistingProperty(Isolate* isolate, Handle<Map> map,
- int descriptor, PropertyKind kind,
+ InternalIndex descriptor,
+ PropertyKind kind,
PropertyAttributes attributes,
PropertyConstness constness) {
// Dictionaries have to be reconfigured in-place.
@@ -2232,7 +2227,8 @@ Handle<Map> Map::ReconfigureExistingProperty(Isolate* isolate, Handle<Map> map,
}
Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
- Handle<Name> name, int descriptor,
+ Handle<Name> name,
+ InternalIndex descriptor,
Handle<Object> getter,
Handle<Object> setter,
PropertyAttributes attributes) {
@@ -2261,7 +2257,7 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
if (!maybe_transition.is_null()) {
Handle<Map> transition(maybe_transition, isolate);
DescriptorArray descriptors = transition->instance_descriptors();
- int descriptor = transition->LastAdded();
+ InternalIndex descriptor = transition->LastAdded();
DCHECK(descriptors.GetKey(descriptor).Equals(*name));
DCHECK_EQ(kAccessor, descriptors.GetDetails(descriptor).kind());
@@ -2284,7 +2280,7 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
Handle<AccessorPair> pair;
DescriptorArray old_descriptors = map->instance_descriptors();
- if (descriptor != DescriptorArray::kNotFound) {
+ if (descriptor.is_found()) {
if (descriptor != map->LastAdded()) {
return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
}
@@ -2374,9 +2370,9 @@ Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
Handle<DescriptorArray> old_descriptors(map->instance_descriptors(), isolate);
// We replace the key if it is already present.
- int index =
+ InternalIndex index =
old_descriptors->SearchWithCache(isolate, *descriptor->GetKey(), *map);
- if (index != DescriptorArray::kNotFound) {
+ if (index.is_found()) {
return CopyReplaceDescriptor(isolate, map, old_descriptors, descriptor,
index, flag);
}
@@ -2386,7 +2382,7 @@ Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
Handle<Map> Map::CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
Handle<DescriptorArray> descriptors,
Descriptor* descriptor,
- int insertion_index,
+ InternalIndex insertion_index,
TransitionFlag flag) {
Handle<Name> key = descriptor->GetKey();
DCHECK_EQ(*key, descriptors->GetKey(insertion_index));
@@ -2403,7 +2399,7 @@ Handle<Map> Map::CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
isolate, map, new_descriptors, new_descriptors->number_of_descriptors());
SimpleTransitionFlag simple_flag =
- (insertion_index == descriptors->number_of_descriptors() - 1)
+ (insertion_index.as_int() == descriptors->number_of_descriptors() - 1)
? SIMPLE_PROPERTY_TRANSITION
: PROPERTY_TRANSITION;
return CopyReplaceDescriptors(isolate, map, new_descriptors,
@@ -2465,8 +2461,7 @@ bool Map::EquivalentToForElementsKindTransition(const Map other) const {
// with fields that may be generalized in-place. This must already be handled
// during addition of a new field.
DescriptorArray descriptors = instance_descriptors();
- int nof = NumberOfOwnDescriptors();
- for (int i = 0; i < nof; i++) {
+ for (InternalIndex i : IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
DCHECK(IsMostGeneralFieldType(details.representation(),
@@ -2547,8 +2542,10 @@ void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
int number_of_own_descriptors) {
set_synchronized_instance_descriptors(descriptors);
SetNumberOfOwnDescriptors(number_of_own_descriptors);
+#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
number_of_own_descriptors);
+#endif
}
// static
diff --git a/deps/v8/src/objects/map.h b/deps/v8/src/objects/map.h
index ef16019685..0daadbee08 100644
--- a/deps/v8/src/objects/map.h
+++ b/deps/v8/src/objects/map.h
@@ -8,6 +8,7 @@
#include "src/common/globals.h"
#include "src/objects/code.h"
#include "src/objects/heap-object.h"
+#include "src/objects/internal-index.h"
#include "src/objects/objects.h"
#include "torque-generated/field-offsets-tq.h"
@@ -470,7 +471,8 @@ class Map : public HeapObject {
Map GetPrototypeChainRootMap(Isolate* isolate) const;
V8_EXPORT_PRIVATE Map FindRootMap(Isolate* isolate) const;
- V8_EXPORT_PRIVATE Map FindFieldOwner(Isolate* isolate, int descriptor) const;
+ V8_EXPORT_PRIVATE Map FindFieldOwner(Isolate* isolate,
+ InternalIndex descriptor) const;
inline int GetInObjectPropertyOffset(int index) const;
@@ -513,7 +515,8 @@ class Map : public HeapObject {
Representation rep1, Handle<FieldType> type1, Representation rep2,
Handle<FieldType> type2, Isolate* isolate);
static void GeneralizeField(Isolate* isolate, Handle<Map> map,
- int modify_index, PropertyConstness new_constness,
+ InternalIndex modify_index,
+ PropertyConstness new_constness,
Representation new_representation,
Handle<FieldType> new_field_type);
// Returns true if the |field_type| is the most general one for
@@ -533,7 +536,7 @@ class Map : public HeapObject {
Representation* representation, Handle<FieldType>* field_type);
V8_EXPORT_PRIVATE static Handle<Map> ReconfigureProperty(
- Isolate* isolate, Handle<Map> map, int modify_index,
+ Isolate* isolate, Handle<Map> map, InternalIndex modify_index,
PropertyKind new_kind, PropertyAttributes new_attributes,
Representation new_representation, Handle<FieldType> new_field_type);
@@ -541,7 +544,7 @@ class Map : public HeapObject {
Isolate* isolate, Handle<Map> map, ElementsKind new_elements_kind);
V8_EXPORT_PRIVATE static Handle<Map> PrepareForDataProperty(
- Isolate* isolate, Handle<Map> old_map, int descriptor_number,
+ Isolate* isolate, Handle<Map> old_map, InternalIndex descriptor_number,
PropertyConstness constness, Handle<Object> value);
V8_EXPORT_PRIVATE static Handle<Map> Normalize(Isolate* isolate,
@@ -636,10 +639,11 @@ class Map : public HeapObject {
inline PropertyDetails GetLastDescriptorDetails(Isolate* isolate) const;
- inline int LastAdded() const;
+ inline InternalIndex LastAdded() const;
inline int NumberOfOwnDescriptors() const;
inline void SetNumberOfOwnDescriptors(int number);
+ inline InternalIndex::Range IterateOwnDescriptors() const;
inline Cell RetrieveDescriptorsPointer();
@@ -742,12 +746,13 @@ class Map : public HeapObject {
Handle<Object> value, PropertyAttributes attributes,
PropertyConstness constness, StoreOrigin store_origin);
V8_EXPORT_PRIVATE static Handle<Map> TransitionToAccessorProperty(
- Isolate* isolate, Handle<Map> map, Handle<Name> name, int descriptor,
- Handle<Object> getter, Handle<Object> setter,
+ Isolate* isolate, Handle<Map> map, Handle<Name> name,
+ InternalIndex descriptor, Handle<Object> getter, Handle<Object> setter,
PropertyAttributes attributes);
V8_EXPORT_PRIVATE static Handle<Map> ReconfigureExistingProperty(
- Isolate* isolate, Handle<Map> map, int descriptor, PropertyKind kind,
- PropertyAttributes attributes, PropertyConstness constness);
+ Isolate* isolate, Handle<Map> map, InternalIndex descriptor,
+ PropertyKind kind, PropertyAttributes attributes,
+ PropertyConstness constness);
inline void AppendDescriptor(Isolate* isolate, Descriptor* desc);
@@ -881,9 +886,6 @@ class Map : public HeapObject {
InstanceType instance_type);
inline bool CanHaveFastTransitionableElementsKind() const;
- // Whether this is the map of the given native context's global proxy.
- bool IsMapOfGlobalProxy(Handle<NativeContext> native_context) const;
-
private:
// This byte encodes either the instance size without the in-object slack or
// the slack size in properties backing store.
@@ -925,7 +927,7 @@ class Map : public HeapObject {
Handle<LayoutDescriptor> full_layout_descriptor);
static void InstallDescriptors(
Isolate* isolate, Handle<Map> parent_map, Handle<Map> child_map,
- int new_descriptor, Handle<DescriptorArray> descriptors,
+ InternalIndex new_descriptor, Handle<DescriptorArray> descriptors,
Handle<LayoutDescriptor> full_layout_descriptor);
static Handle<Map> CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
Descriptor* descriptor,
@@ -938,7 +940,8 @@ class Map : public HeapObject {
static Handle<Map> CopyReplaceDescriptor(Isolate* isolate, Handle<Map> map,
Handle<DescriptorArray> descriptors,
- Descriptor* descriptor, int index,
+ Descriptor* descriptor,
+ InternalIndex index,
TransitionFlag flag);
static Handle<Map> CopyNormalized(Isolate* isolate, Handle<Map> map,
PropertyNormalizationMode mode);
@@ -951,22 +954,24 @@ class Map : public HeapObject {
// Update field type of the given descriptor to new representation and new
// type. The type must be prepared for storing in descriptor array:
// it must be either a simple type or a map wrapped in a weak cell.
- void UpdateFieldType(Isolate* isolate, int descriptor_number,
+ void UpdateFieldType(Isolate* isolate, InternalIndex descriptor_number,
Handle<Name> name, PropertyConstness new_constness,
Representation new_representation,
const MaybeObjectHandle& new_wrapped_type);
// TODO(ishell): Move to MapUpdater.
- void PrintReconfiguration(Isolate* isolate, FILE* file, int modify_index,
- PropertyKind kind, PropertyAttributes attributes);
+ void PrintReconfiguration(Isolate* isolate, FILE* file,
+ InternalIndex modify_index, PropertyKind kind,
+ PropertyAttributes attributes);
// TODO(ishell): Move to MapUpdater.
void PrintGeneralization(
- Isolate* isolate, FILE* file, const char* reason, int modify_index,
- int split, int descriptors, bool constant_to_field,
- Representation old_representation, Representation new_representation,
- PropertyConstness old_constness, PropertyConstness new_constness,
- MaybeHandle<FieldType> old_field_type, MaybeHandle<Object> old_value,
- MaybeHandle<FieldType> new_field_type, MaybeHandle<Object> new_value);
+ Isolate* isolate, FILE* file, const char* reason,
+ InternalIndex modify_index, int split, int descriptors,
+ bool constant_to_field, Representation old_representation,
+ Representation new_representation, PropertyConstness old_constness,
+ PropertyConstness new_constness, MaybeHandle<FieldType> old_field_type,
+ MaybeHandle<Object> old_value, MaybeHandle<FieldType> new_field_type,
+ MaybeHandle<Object> new_value);
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
DECL_ACCESSORS(synchronized_instance_descriptors, DescriptorArray)
diff --git a/deps/v8/src/objects/module-inl.h b/deps/v8/src/objects/module-inl.h
index ac54516376..aaf790cc8a 100644
--- a/deps/v8/src/objects/module-inl.h
+++ b/deps/v8/src/objects/module-inl.h
@@ -38,9 +38,17 @@ SMI_ACCESSORS(Module, hash, kHashOffset)
TQ_SMI_ACCESSORS(SourceTextModule, dfs_index)
TQ_SMI_ACCESSORS(SourceTextModule, dfs_ancestor_index)
+TQ_SMI_ACCESSORS(SourceTextModule, flags)
+BOOL_ACCESSORS(SourceTextModule, flags, async, kAsyncBit)
+BOOL_ACCESSORS(SourceTextModule, flags, async_evaluating, kAsyncEvaluatingBit)
+TQ_SMI_ACCESSORS(SourceTextModule, pending_async_dependencies)
+ACCESSORS(SourceTextModule, async_parent_modules, ArrayList,
+ kAsyncParentModulesOffset)
+ACCESSORS(SourceTextModule, top_level_capability, HeapObject,
+ kTopLevelCapabilityOffset)
SourceTextModuleInfo SourceTextModule::info() const {
- return (status() >= kEvaluating)
+ return status() == kErrored
? SourceTextModuleInfo::cast(code())
: GetSharedFunctionInfo().scope_info().ModuleDescriptorInfo();
}
@@ -112,6 +120,37 @@ class UnorderedModuleSet
ZoneAllocator<Handle<Module>>(zone)) {}
};
+void SourceTextModule::AddAsyncParentModule(Isolate* isolate,
+ Handle<SourceTextModule> module) {
+ Handle<ArrayList> new_array_list =
+ ArrayList::Add(isolate, handle(async_parent_modules(), isolate), module);
+ set_async_parent_modules(*new_array_list);
+}
+
+Handle<SourceTextModule> SourceTextModule::GetAsyncParentModule(
+ Isolate* isolate, int index) {
+ Handle<SourceTextModule> module(
+ SourceTextModule::cast(async_parent_modules().Get(index)), isolate);
+ return module;
+}
+
+int SourceTextModule::AsyncParentModuleCount() {
+ return async_parent_modules().Length();
+}
+
+bool SourceTextModule::HasPendingAsyncDependencies() {
+ DCHECK_GE(pending_async_dependencies(), 0);
+ return pending_async_dependencies() > 0;
+}
+
+void SourceTextModule::IncrementPendingAsyncDependencies() {
+ set_pending_async_dependencies(pending_async_dependencies() + 1);
+}
+
+void SourceTextModule::DecrementPendingAsyncDependencies() {
+ set_pending_async_dependencies(pending_async_dependencies() - 1);
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/module.cc b/deps/v8/src/objects/module.cc
index 60b9145d10..9c37de0c85 100644
--- a/deps/v8/src/objects/module.cc
+++ b/deps/v8/src/objects/module.cc
@@ -10,6 +10,7 @@
#include "src/api/api-inl.h"
#include "src/ast/modules.h"
#include "src/builtins/accessors.h"
+#include "src/heap/heap-inl.h"
#include "src/objects/cell-inl.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/js-generator-inl.h"
@@ -50,12 +51,14 @@ void Module::SetStatus(Status new_status) {
set_status(new_status);
}
-void Module::RecordError(Isolate* isolate) {
- DisallowHeapAllocation no_alloc;
- DCHECK(exception().IsTheHole(isolate));
- Object the_exception = isolate->pending_exception();
- DCHECK(!the_exception.IsTheHole(isolate));
+void Module::RecordErrorUsingPendingException(Isolate* isolate) {
+ Handle<Object> the_exception(isolate->pending_exception(), isolate);
+ RecordError(isolate, the_exception);
+}
+void Module::RecordError(Isolate* isolate, Handle<Object> error) {
+ DCHECK(exception().IsTheHole(isolate));
+ DCHECK(!error->IsTheHole(isolate));
if (this->IsSourceTextModule()) {
Handle<SourceTextModule> self(SourceTextModule::cast(*this), GetIsolate());
self->set_code(self->info());
@@ -64,7 +67,7 @@ void Module::RecordError(Isolate* isolate) {
PrintStatusTransition(Module::kErrored);
#endif // DEBUG
set_status(Module::kErrored);
- set_exception(the_exception);
+ set_exception(*error);
}
void Module::ResetGraph(Isolate* isolate, Handle<Module> module) {
@@ -244,46 +247,35 @@ MaybeHandle<Object> Module::Evaluate(Isolate* isolate, Handle<Module> module) {
#endif // OBJECT_PRINT
}
#endif // DEBUG
- if (module->status() == kErrored) {
- isolate->Throw(module->GetException());
- return MaybeHandle<Object>();
- }
- DCHECK_NE(module->status(), kEvaluating);
- DCHECK_GE(module->status(), kInstantiated);
- Zone zone(isolate->allocator(), ZONE_NAME);
-
- ZoneForwardList<Handle<SourceTextModule>> stack(&zone);
- unsigned dfs_index = 0;
- Handle<Object> result;
- if (!Evaluate(isolate, module, &stack, &dfs_index).ToHandle(&result)) {
- for (auto& descendant : stack) {
- DCHECK_EQ(descendant->status(), kEvaluating);
- descendant->RecordError(isolate);
- }
- DCHECK_EQ(module->GetException(), isolate->pending_exception());
- return MaybeHandle<Object>();
+ STACK_CHECK(isolate, MaybeHandle<Object>());
+ if (FLAG_harmony_top_level_await && module->IsSourceTextModule()) {
+ return SourceTextModule::EvaluateMaybeAsync(
+ isolate, Handle<SourceTextModule>::cast(module));
+ } else {
+ return Module::InnerEvaluate(isolate, module);
}
- DCHECK_EQ(module->status(), kEvaluated);
- DCHECK(stack.empty());
- return result;
}
-MaybeHandle<Object> Module::Evaluate(
- Isolate* isolate, Handle<Module> module,
- ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index) {
+MaybeHandle<Object> Module::InnerEvaluate(Isolate* isolate,
+ Handle<Module> module) {
if (module->status() == kErrored) {
isolate->Throw(module->GetException());
return MaybeHandle<Object>();
- }
- if (module->status() >= kEvaluating) {
+ } else if (module->status() == kEvaluated) {
return isolate->factory()->undefined_value();
}
- DCHECK_EQ(module->status(), kInstantiated);
- STACK_CHECK(isolate, MaybeHandle<Object>());
+
+ // InnerEvaluate can be called both to evaluate top level modules without
+ // the harmony_top_level_await flag and recursively to evaluate
+ // SyntheticModules in the dependency graphs of SourceTextModules.
+ //
+ // However, SyntheticModules transition directly to 'Evaluated,' so we should
+ // never see an 'Evaluating' module at this point.
+ CHECK_EQ(module->status(), kInstantiated);
if (module->IsSourceTextModule()) {
- return SourceTextModule::Evaluate(
- isolate, Handle<SourceTextModule>::cast(module), stack, dfs_index);
+ return SourceTextModule::Evaluate(isolate,
+ Handle<SourceTextModule>::cast(module));
} else {
return SyntheticModule::Evaluate(isolate,
Handle<SyntheticModule>::cast(module));
diff --git a/deps/v8/src/objects/module.h b/deps/v8/src/objects/module.h
index 08badf0357..d0ea22e6e5 100644
--- a/deps/v8/src/objects/module.h
+++ b/deps/v8/src/objects/module.h
@@ -112,18 +112,19 @@ class Module : public HeapObject {
ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index,
Zone* zone);
- static V8_WARN_UNUSED_RESULT MaybeHandle<Object> Evaluate(
- Isolate* isolate, Handle<Module> module,
- ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index);
+ static V8_WARN_UNUSED_RESULT MaybeHandle<Object> InnerEvaluate(
+ Isolate* isolate, Handle<Module> module);
// Set module's status back to kUninstantiated and reset other internal state.
// This is used when instantiation fails.
static void Reset(Isolate* isolate, Handle<Module> module);
static void ResetGraph(Isolate* isolate, Handle<Module> module);
- // To set status to kErrored, RecordError should be used.
+ // To set status to kErrored, RecordError or RecordErrorUsingPendingException
+ // should be used.
void SetStatus(Status status);
- void RecordError(Isolate* isolate);
+ void RecordErrorUsingPendingException(Isolate* isolate);
+ void RecordError(Isolate* isolate, Handle<Object> error);
#ifdef DEBUG
// For --trace-module-status.
@@ -137,7 +138,8 @@ class Module : public HeapObject {
// JSModuleNamespace object (representing module "bar") is created and bound to
// the declared variable (foo). A module can have at most one namespace object.
class JSModuleNamespace
- : public TorqueGeneratedJSModuleNamespace<JSModuleNamespace, JSObject> {
+ : public TorqueGeneratedJSModuleNamespace<JSModuleNamespace,
+ JSSpecialObject> {
public:
DECL_PRINTER(JSModuleNamespace)
diff --git a/deps/v8/src/objects/name-inl.h b/deps/v8/src/objects/name-inl.h
index b76ae245a2..88ae2feea5 100644
--- a/deps/v8/src/objects/name-inl.h
+++ b/deps/v8/src/objects/name-inl.h
@@ -9,6 +9,7 @@
#include "src/heap/heap-write-barrier-inl.h"
#include "src/objects/map-inl.h"
+#include "src/objects/primitive-heap-object-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -100,6 +101,10 @@ bool Name::AsArrayIndex(uint32_t* index) {
return IsString() && String::cast(*this).AsArrayIndex(index);
}
+bool Name::AsIntegerIndex(size_t* index) {
+ return IsString() && String::cast(*this).AsIntegerIndex(index);
+}
+
// static
bool Name::ContainsCachedArrayIndex(uint32_t hash) {
return (hash & Name::kDoesNotContainCachedArrayIndexMask) == 0;
diff --git a/deps/v8/src/objects/name.h b/deps/v8/src/objects/name.h
index a02bb3d794..386b9ec055 100644
--- a/deps/v8/src/objects/name.h
+++ b/deps/v8/src/objects/name.h
@@ -5,9 +5,8 @@
#ifndef V8_OBJECTS_NAME_H_
#define V8_OBJECTS_NAME_H_
-#include "src/objects/heap-object.h"
#include "src/objects/objects.h"
-#include "torque-generated/class-definitions-tq.h"
+#include "src/objects/primitive-heap-object.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -17,7 +16,7 @@ namespace internal {
// The Name abstract class captures anything that can be used as a property
// name, i.e., strings and symbols. All names store a hash value.
-class Name : public TorqueGeneratedName<Name, HeapObject> {
+class Name : public TorqueGeneratedName<Name, PrimitiveHeapObject> {
public:
// Tells whether the hash code has been computed.
inline bool HasHashCode();
@@ -32,6 +31,7 @@ class Name : public TorqueGeneratedName<Name, HeapObject> {
// Conversion.
inline bool AsArrayIndex(uint32_t* index);
+ inline bool AsIntegerIndex(size_t* index);
// An "interesting symbol" is a well-known symbol, like @@toStringTag,
// that's often looked up on random objects but is usually not present.
@@ -73,7 +73,8 @@ class Name : public TorqueGeneratedName<Name, HeapObject> {
// array index.
static const int kHashNotComputedMask = 1;
static const int kIsNotArrayIndexMask = 1 << 1;
- static const int kNofHashBitFields = 2;
+ static const int kIsNotIntegerIndexMask = 1 << 2;
+ static const int kNofHashBitFields = 3;
// Shift constant retrieving hash code from hash field.
static const int kHashShift = kNofHashBitFields;
@@ -88,6 +89,14 @@ class Name : public TorqueGeneratedName<Name, HeapObject> {
// Maximum number of characters to consider when trying to convert a string
// value into an array index.
static const int kMaxArrayIndexSize = 10;
+ // Maximum number of characters that might be parsed into a size_t:
+ // 10 characters per 32 bits of size_t width.
+ // We choose this as large as possible (rather than MAX_SAFE_INTEGER range)
+ // because TypedArray accesses will treat all string keys that are
+ // canonical representations of numbers in the range [MAX_SAFE_INTEGER ..
+ // size_t::max] as out-of-bounds accesses, and we can handle those in the
+ // fast path if we tag them as such (see kIsNotIntegerIndexMask).
+ static const int kMaxIntegerIndexSize = 10 * (sizeof(size_t) / 4);
// For strings which are array indexes the hash value has the string length
// mixed into the hash, mainly to avoid a hash value of zero which would be
@@ -120,7 +129,7 @@ class Name : public TorqueGeneratedName<Name, HeapObject> {
// Value of empty hash field indicating that the hash is not computed.
static const int kEmptyHashField =
- kIsNotArrayIndexMask | kHashNotComputedMask;
+ kIsNotIntegerIndexMask | kIsNotArrayIndexMask | kHashNotComputedMask;
protected:
static inline bool IsHashFieldComputed(uint32_t field);
diff --git a/deps/v8/src/objects/object-list-macros.h b/deps/v8/src/objects/object-list-macros.h
index d5bce62d43..09b1bdc5f0 100644
--- a/deps/v8/src/objects/object-list-macros.h
+++ b/deps/v8/src/objects/object-list-macros.h
@@ -41,6 +41,7 @@ class HeapNumber;
class ObjectHashTable;
class ObjectTemplateInfo;
class ObjectVisitor;
+class OSROptimizedCodeCache;
class PreparseData;
class PropertyArray;
class PropertyCell;
@@ -138,12 +139,14 @@ class ZoneForwardList;
V(JSCollection) \
V(JSCollectionIterator) \
V(JSContextExtensionObject) \
+ V(JSCustomElementsObject) \
V(JSDataView) \
V(JSDate) \
V(JSError) \
V(JSFinalizationGroup) \
V(JSFinalizationGroupCleanupIterator) \
V(JSFunction) \
+ V(JSFunctionOrBoundFunction) \
V(JSGeneratorObject) \
V(JSGlobalObject) \
V(JSGlobalProxy) \
@@ -158,10 +161,12 @@ class ZoneForwardList;
V(JSReceiver) \
V(JSRegExp) \
V(JSRegExpResult) \
+ V(JSRegExpResultIndices) \
V(JSRegExpStringIterator) \
V(JSSet) \
V(JSSetIterator) \
V(JSSloppyArgumentsObject) \
+ V(JSSpecialObject) \
V(JSStringIterator) \
V(JSTypedArray) \
V(JSWeakCollection) \
@@ -185,7 +190,9 @@ class ZoneForwardList;
V(OrderedHashMap) \
V(OrderedHashSet) \
V(OrderedNameDictionary) \
+ V(OSROptimizedCodeCache) \
V(PreparseData) \
+ V(PrimitiveHeapObject) \
V(PromiseReactionJobTask) \
V(PropertyArray) \
V(PropertyCell) \
@@ -225,6 +232,7 @@ class ZoneForwardList;
V(Undetectable) \
V(UniqueName) \
V(WasmExceptionObject) \
+ V(WasmExceptionPackage) \
V(WasmGlobalObject) \
V(WasmInstanceObject) \
V(WasmMemoryObject) \
diff --git a/deps/v8/src/objects/objects-body-descriptors-inl.h b/deps/v8/src/objects/objects-body-descriptors-inl.h
index 4c980b2697..68164fdce6 100644
--- a/deps/v8/src/objects/objects-body-descriptors-inl.h
+++ b/deps/v8/src/objects/objects-body-descriptors-inl.h
@@ -913,7 +913,7 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3, T4 p4) {
return Op::template apply<FeedbackVector::BodyDescriptor>(p1, p2, p3, p4);
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
- case JS_ARGUMENTS_TYPE:
+ case JS_ARGUMENTS_OBJECT_TYPE:
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
case JS_PROMISE_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@@ -933,8 +933,8 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3, T4 p4) {
case JS_MAP_KEY_VALUE_ITERATOR_TYPE:
case JS_MAP_VALUE_ITERATOR_TYPE:
case JS_STRING_ITERATOR_TYPE:
- case JS_REGEXP_STRING_ITERATOR_TYPE:
- case JS_REGEXP_TYPE:
+ case JS_REG_EXP_STRING_ITERATOR_TYPE:
+ case JS_REG_EXP_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_API_OBJECT_TYPE:
@@ -944,24 +944,24 @@ ReturnType BodyDescriptorApply(InstanceType type, T1 p1, T2 p2, T3 p3, T4 p4) {
case JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE:
case JS_FINALIZATION_GROUP_TYPE:
#ifdef V8_INTL_SUPPORT
- case JS_INTL_V8_BREAK_ITERATOR_TYPE:
- case JS_INTL_COLLATOR_TYPE:
- case JS_INTL_DATE_TIME_FORMAT_TYPE:
- case JS_INTL_LIST_FORMAT_TYPE:
- case JS_INTL_LOCALE_TYPE:
- case JS_INTL_NUMBER_FORMAT_TYPE:
- case JS_INTL_PLURAL_RULES_TYPE:
- case JS_INTL_RELATIVE_TIME_FORMAT_TYPE:
- case JS_INTL_SEGMENT_ITERATOR_TYPE:
- case JS_INTL_SEGMENTER_TYPE:
+ case JS_V8_BREAK_ITERATOR_TYPE:
+ case JS_COLLATOR_TYPE:
+ case JS_DATE_TIME_FORMAT_TYPE:
+ case JS_LIST_FORMAT_TYPE:
+ case JS_LOCALE_TYPE:
+ case JS_NUMBER_FORMAT_TYPE:
+ case JS_PLURAL_RULES_TYPE:
+ case JS_RELATIVE_TIME_FORMAT_TYPE:
+ case JS_SEGMENT_ITERATOR_TYPE:
+ case JS_SEGMENTER_TYPE:
#endif // V8_INTL_SUPPORT
- case WASM_EXCEPTION_TYPE:
- case WASM_GLOBAL_TYPE:
- case WASM_MEMORY_TYPE:
- case WASM_MODULE_TYPE:
- case WASM_TABLE_TYPE:
+ case WASM_EXCEPTION_OBJECT_TYPE:
+ case WASM_GLOBAL_OBJECT_TYPE:
+ case WASM_MEMORY_OBJECT_TYPE:
+ case WASM_MODULE_OBJECT_TYPE:
+ case WASM_TABLE_OBJECT_TYPE:
return Op::template apply<JSObject::BodyDescriptor>(p1, p2, p3, p4);
- case WASM_INSTANCE_TYPE:
+ case WASM_INSTANCE_OBJECT_TYPE:
return Op::template apply<WasmInstanceObject::BodyDescriptor>(p1, p2, p3,
p4);
case JS_WEAK_MAP_TYPE:
diff --git a/deps/v8/src/objects/objects-definitions.h b/deps/v8/src/objects/objects-definitions.h
index b346b5b7d1..53354014e9 100644
--- a/deps/v8/src/objects/objects-definitions.h
+++ b/deps/v8/src/objects/objects-definitions.h
@@ -32,15 +32,7 @@ namespace internal {
// instance_types that are less than those of all other types:
// HeapObject::Size, HeapObject::IterateBody, the typeof operator, and
// Object::IsString.
-//
-// NOTE: Everything following JS_PRIMITIVE_WRAPPER_TYPE is considered a
-// JSObject for GC purposes. The first four entries here have typeof
-// 'object', whereas JS_FUNCTION_TYPE has typeof 'function'.
-//
-// NOTE: List had to be split into two, because of conditional item(s) from
-// INTL namespace. They can't just be appended to the end, because of the
-// checks we do in tests (expecting JS_FUNCTION_TYPE to be last).
-#define INSTANCE_TYPE_LIST_BEFORE_INTL(V) \
+#define INSTANCE_TYPE_LIST_BASE(V) \
V(INTERNALIZED_STRING_TYPE) \
V(EXTERNAL_INTERNALIZED_STRING_TYPE) \
V(ONE_BYTE_INTERNALIZED_STRING_TYPE) \
@@ -58,191 +50,11 @@ namespace internal {
V(SLICED_ONE_BYTE_STRING_TYPE) \
V(THIN_ONE_BYTE_STRING_TYPE) \
V(UNCACHED_EXTERNAL_STRING_TYPE) \
- V(UNCACHED_EXTERNAL_ONE_BYTE_STRING_TYPE) \
- \
- V(SYMBOL_TYPE) \
- V(HEAP_NUMBER_TYPE) \
- V(BIGINT_TYPE) \
- V(ODDBALL_TYPE) \
- \
- V(MAP_TYPE) \
- V(CODE_TYPE) \
- V(FOREIGN_TYPE) \
- V(BYTE_ARRAY_TYPE) \
- V(BYTECODE_ARRAY_TYPE) \
- V(FREE_SPACE_TYPE) \
- \
- V(FIXED_DOUBLE_ARRAY_TYPE) \
- V(FEEDBACK_METADATA_TYPE) \
- V(FILLER_TYPE) \
- \
- V(ACCESS_CHECK_INFO_TYPE) \
- V(ACCESSOR_INFO_TYPE) \
- V(ACCESSOR_PAIR_TYPE) \
- V(ALIASED_ARGUMENTS_ENTRY_TYPE) \
- V(ALLOCATION_MEMENTO_TYPE) \
- V(ARRAY_BOILERPLATE_DESCRIPTION_TYPE) \
- V(ASM_WASM_DATA_TYPE) \
- V(ASYNC_GENERATOR_REQUEST_TYPE) \
- V(CLASS_POSITIONS_TYPE) \
- V(DEBUG_INFO_TYPE) \
- V(ENUM_CACHE_TYPE) \
- V(FUNCTION_TEMPLATE_INFO_TYPE) \
- V(FUNCTION_TEMPLATE_RARE_DATA_TYPE) \
- V(INTERCEPTOR_INFO_TYPE) \
- V(INTERPRETER_DATA_TYPE) \
- V(OBJECT_TEMPLATE_INFO_TYPE) \
- V(PROMISE_CAPABILITY_TYPE) \
- V(PROMISE_REACTION_TYPE) \
- V(PROTOTYPE_INFO_TYPE) \
- V(SCRIPT_TYPE) \
- V(SOURCE_POSITION_TABLE_WITH_FRAME_CACHE_TYPE) \
- V(SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE) \
- V(STACK_FRAME_INFO_TYPE) \
- V(STACK_TRACE_FRAME_TYPE) \
- V(TEMPLATE_OBJECT_DESCRIPTION_TYPE) \
- V(TUPLE2_TYPE) \
- V(TUPLE3_TYPE) \
- V(WASM_CAPI_FUNCTION_DATA_TYPE) \
- V(WASM_DEBUG_INFO_TYPE) \
- V(WASM_EXCEPTION_TAG_TYPE) \
- V(WASM_EXPORTED_FUNCTION_DATA_TYPE) \
- V(WASM_INDIRECT_FUNCTION_TABLE_TYPE) \
- V(WASM_JS_FUNCTION_DATA_TYPE) \
- \
- V(CALLABLE_TASK_TYPE) \
- V(CALLBACK_TASK_TYPE) \
- V(PROMISE_FULFILL_REACTION_JOB_TASK_TYPE) \
- V(PROMISE_REJECT_REACTION_JOB_TASK_TYPE) \
- V(PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE) \
- \
- TORQUE_DEFINED_INSTANCE_TYPES(V) \
- \
- V(SOURCE_TEXT_MODULE_TYPE) \
- V(SYNTHETIC_MODULE_TYPE) \
- \
- V(ALLOCATION_SITE_TYPE) \
- V(EMBEDDER_DATA_ARRAY_TYPE) \
- \
- V(FIXED_ARRAY_TYPE) \
- V(OBJECT_BOILERPLATE_DESCRIPTION_TYPE) \
- V(CLOSURE_FEEDBACK_CELL_ARRAY_TYPE) \
- V(HASH_TABLE_TYPE) \
- V(ORDERED_HASH_MAP_TYPE) \
- V(ORDERED_HASH_SET_TYPE) \
- V(ORDERED_NAME_DICTIONARY_TYPE) \
- V(NAME_DICTIONARY_TYPE) \
- V(GLOBAL_DICTIONARY_TYPE) \
- V(NUMBER_DICTIONARY_TYPE) \
- V(SIMPLE_NUMBER_DICTIONARY_TYPE) \
- V(STRING_TABLE_TYPE) \
- V(EPHEMERON_HASH_TABLE_TYPE) \
- V(SCOPE_INFO_TYPE) \
- V(SCRIPT_CONTEXT_TABLE_TYPE) \
- \
- V(AWAIT_CONTEXT_TYPE) \
- V(BLOCK_CONTEXT_TYPE) \
- V(CATCH_CONTEXT_TYPE) \
- V(DEBUG_EVALUATE_CONTEXT_TYPE) \
- V(EVAL_CONTEXT_TYPE) \
- V(FUNCTION_CONTEXT_TYPE) \
- V(MODULE_CONTEXT_TYPE) \
- V(NATIVE_CONTEXT_TYPE) \
- V(SCRIPT_CONTEXT_TYPE) \
- V(WITH_CONTEXT_TYPE) \
- \
- V(WEAK_FIXED_ARRAY_TYPE) \
- V(TRANSITION_ARRAY_TYPE) \
- \
- V(CALL_HANDLER_INFO_TYPE) \
- V(CELL_TYPE) \
- V(CODE_DATA_CONTAINER_TYPE) \
- V(DESCRIPTOR_ARRAY_TYPE) \
- V(FEEDBACK_CELL_TYPE) \
- V(FEEDBACK_VECTOR_TYPE) \
- V(LOAD_HANDLER_TYPE) \
- V(PREPARSE_DATA_TYPE) \
- V(PROPERTY_ARRAY_TYPE) \
- V(PROPERTY_CELL_TYPE) \
- V(SHARED_FUNCTION_INFO_TYPE) \
- V(SMALL_ORDERED_HASH_MAP_TYPE) \
- V(SMALL_ORDERED_HASH_SET_TYPE) \
- V(SMALL_ORDERED_NAME_DICTIONARY_TYPE) \
- V(STORE_HANDLER_TYPE) \
- V(UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE) \
- V(UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE) \
- V(WEAK_ARRAY_LIST_TYPE) \
- V(WEAK_CELL_TYPE) \
- \
- V(JS_PROXY_TYPE) \
- V(JS_GLOBAL_OBJECT_TYPE) \
- V(JS_GLOBAL_PROXY_TYPE) \
- V(JS_MODULE_NAMESPACE_TYPE) \
- V(JS_SPECIAL_API_OBJECT_TYPE) \
- V(JS_PRIMITIVE_WRAPPER_TYPE) \
- V(JS_API_OBJECT_TYPE) \
- V(JS_OBJECT_TYPE) \
- \
- V(JS_ARGUMENTS_TYPE) \
- V(JS_ARRAY_BUFFER_TYPE) \
- V(JS_ARRAY_ITERATOR_TYPE) \
- V(JS_ARRAY_TYPE) \
- V(JS_ASYNC_FROM_SYNC_ITERATOR_TYPE) \
- V(JS_ASYNC_FUNCTION_OBJECT_TYPE) \
- V(JS_ASYNC_GENERATOR_OBJECT_TYPE) \
- V(JS_CONTEXT_EXTENSION_OBJECT_TYPE) \
- V(JS_DATE_TYPE) \
- V(JS_ERROR_TYPE) \
- V(JS_GENERATOR_OBJECT_TYPE) \
- V(JS_MAP_TYPE) \
- V(JS_MAP_KEY_ITERATOR_TYPE) \
- V(JS_MAP_KEY_VALUE_ITERATOR_TYPE) \
- V(JS_MAP_VALUE_ITERATOR_TYPE) \
- V(JS_MESSAGE_OBJECT_TYPE) \
- V(JS_PROMISE_TYPE) \
- V(JS_REGEXP_TYPE) \
- V(JS_REGEXP_STRING_ITERATOR_TYPE) \
- V(JS_SET_TYPE) \
- V(JS_SET_KEY_VALUE_ITERATOR_TYPE) \
- V(JS_SET_VALUE_ITERATOR_TYPE) \
- V(JS_STRING_ITERATOR_TYPE) \
- V(JS_WEAK_REF_TYPE) \
- V(JS_FINALIZATION_GROUP_CLEANUP_ITERATOR_TYPE) \
- V(JS_FINALIZATION_GROUP_TYPE) \
- V(JS_WEAK_MAP_TYPE) \
- V(JS_WEAK_SET_TYPE) \
- V(JS_TYPED_ARRAY_TYPE) \
- V(JS_DATA_VIEW_TYPE)
+ V(UNCACHED_EXTERNAL_ONE_BYTE_STRING_TYPE)
-#define INSTANCE_TYPE_LIST_AFTER_INTL(V) \
- V(WASM_EXCEPTION_TYPE) \
- V(WASM_GLOBAL_TYPE) \
- V(WASM_INSTANCE_TYPE) \
- V(WASM_MEMORY_TYPE) \
- V(WASM_MODULE_TYPE) \
- V(WASM_TABLE_TYPE) \
- V(JS_BOUND_FUNCTION_TYPE) \
- V(JS_FUNCTION_TYPE)
-
-#ifdef V8_INTL_SUPPORT
-#define INSTANCE_TYPE_LIST(V) \
- INSTANCE_TYPE_LIST_BEFORE_INTL(V) \
- V(JS_INTL_V8_BREAK_ITERATOR_TYPE) \
- V(JS_INTL_COLLATOR_TYPE) \
- V(JS_INTL_DATE_TIME_FORMAT_TYPE) \
- V(JS_INTL_LIST_FORMAT_TYPE) \
- V(JS_INTL_LOCALE_TYPE) \
- V(JS_INTL_NUMBER_FORMAT_TYPE) \
- V(JS_INTL_PLURAL_RULES_TYPE) \
- V(JS_INTL_RELATIVE_TIME_FORMAT_TYPE) \
- V(JS_INTL_SEGMENT_ITERATOR_TYPE) \
- V(JS_INTL_SEGMENTER_TYPE) \
- INSTANCE_TYPE_LIST_AFTER_INTL(V)
-#else
-#define INSTANCE_TYPE_LIST(V) \
- INSTANCE_TYPE_LIST_BEFORE_INTL(V) \
- INSTANCE_TYPE_LIST_AFTER_INTL(V)
-#endif // V8_INTL_SUPPORT
+#define INSTANCE_TYPE_LIST(V) \
+ INSTANCE_TYPE_LIST_BASE(V) \
+ TORQUE_ASSIGNED_INSTANCE_TYPE_LIST(V)
// Since string types are not consecutive, this macro is used to
// iterate over them.
@@ -290,11 +102,20 @@ namespace internal {
// code for the class including allocation and garbage collection routines,
// casts and predicates. All you need to define is the class, methods and
// object verification routines. Easy, no?
-//
-// Note that for subtle reasons related to the ordering or numerical values of
-// type tags, elements in this list have to be added to the INSTANCE_TYPE_LIST
-// manually.
-#define STRUCT_LIST_GENERATOR(V, _) \
+#define STRUCT_LIST_GENERATOR_BASE(V, _) \
+ V(_, PROMISE_FULFILL_REACTION_JOB_TASK_TYPE, PromiseFulfillReactionJobTask, \
+ promise_fulfill_reaction_job_task) \
+ V(_, PROMISE_REJECT_REACTION_JOB_TASK_TYPE, PromiseRejectReactionJobTask, \
+ promise_reject_reaction_job_task) \
+ V(_, CALLABLE_TASK_TYPE, CallableTask, callable_task) \
+ V(_, CALLBACK_TASK_TYPE, CallbackTask, callback_task) \
+ V(_, PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, PromiseResolveThenableJobTask, \
+ promise_resolve_thenable_job_task) \
+ V(_, FUNCTION_TEMPLATE_INFO_TYPE, FunctionTemplateInfo, \
+ function_template_info) \
+ V(_, OBJECT_TEMPLATE_INFO_TYPE, ObjectTemplateInfo, object_template_info) \
+ V(_, TUPLE2_TYPE, Tuple2, tuple2) \
+ V(_, TUPLE3_TYPE, Tuple3, tuple3) \
V(_, ACCESS_CHECK_INFO_TYPE, AccessCheckInfo, access_check_info) \
V(_, ACCESSOR_INFO_TYPE, AccessorInfo, accessor_info) \
V(_, ACCESSOR_PAIR_TYPE, AccessorPair, accessor_pair) \
@@ -309,13 +130,10 @@ namespace internal {
V(_, CLASS_POSITIONS_TYPE, ClassPositions, class_positions) \
V(_, DEBUG_INFO_TYPE, DebugInfo, debug_info) \
V(_, ENUM_CACHE_TYPE, EnumCache, enum_cache) \
- V(_, FUNCTION_TEMPLATE_INFO_TYPE, FunctionTemplateInfo, \
- function_template_info) \
V(_, FUNCTION_TEMPLATE_RARE_DATA_TYPE, FunctionTemplateRareData, \
function_template_rare_data) \
V(_, INTERCEPTOR_INFO_TYPE, InterceptorInfo, interceptor_info) \
V(_, INTERPRETER_DATA_TYPE, InterpreterData, interpreter_data) \
- V(_, OBJECT_TEMPLATE_INFO_TYPE, ObjectTemplateInfo, object_template_info) \
V(_, PROMISE_CAPABILITY_TYPE, PromiseCapability, promise_capability) \
V(_, PROMISE_REACTION_TYPE, PromiseReaction, promise_reaction) \
V(_, PROTOTYPE_INFO_TYPE, PrototypeInfo, prototype_info) \
@@ -328,8 +146,6 @@ namespace internal {
V(_, STACK_TRACE_FRAME_TYPE, StackTraceFrame, stack_trace_frame) \
V(_, TEMPLATE_OBJECT_DESCRIPTION_TYPE, TemplateObjectDescription, \
template_object_description) \
- V(_, TUPLE2_TYPE, Tuple2, tuple2) \
- V(_, TUPLE3_TYPE, Tuple3, tuple3) \
V(_, WASM_CAPI_FUNCTION_DATA_TYPE, WasmCapiFunctionData, \
wasm_capi_function_data) \
V(_, WASM_DEBUG_INFO_TYPE, WasmDebugInfo, wasm_debug_info) \
@@ -338,32 +154,24 @@ namespace internal {
wasm_exported_function_data) \
V(_, WASM_INDIRECT_FUNCTION_TABLE_TYPE, WasmIndirectFunctionTable, \
wasm_indirect_function_table) \
- V(_, WASM_JS_FUNCTION_DATA_TYPE, WasmJSFunctionData, wasm_js_function_data) \
- V(_, CALLABLE_TASK_TYPE, CallableTask, callable_task) \
- V(_, CALLBACK_TASK_TYPE, CallbackTask, callback_task) \
- V(_, PROMISE_FULFILL_REACTION_JOB_TASK_TYPE, PromiseFulfillReactionJobTask, \
- promise_fulfill_reaction_job_task) \
- V(_, PROMISE_REJECT_REACTION_JOB_TASK_TYPE, PromiseRejectReactionJobTask, \
- promise_reject_reaction_job_task) \
- V(_, PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, PromiseResolveThenableJobTask, \
- promise_resolve_thenable_job_task)
+ V(_, WASM_JS_FUNCTION_DATA_TYPE, WasmJSFunctionData, wasm_js_function_data)
+
+#define STRUCT_LIST_GENERATOR(V, _) \
+ STRUCT_LIST_GENERATOR_BASE(V, _) \
+ TORQUE_STRUCT_LIST_GENERATOR(V, _)
// Adapts one STRUCT_LIST_GENERATOR entry to the STRUCT_LIST entry
#define STRUCT_LIST_ADAPTER(V, NAME, Name, name) V(NAME, Name, name)
// Produces (NAME, Name, name) entries.
-#define STRUCT_LIST(V) \
- STRUCT_LIST_GENERATOR(STRUCT_LIST_ADAPTER, V) \
- TORQUE_STRUCT_LIST_GENERATOR(STRUCT_LIST_ADAPTER, V)
+#define STRUCT_LIST(V) STRUCT_LIST_GENERATOR(STRUCT_LIST_ADAPTER, V)
// Adapts one STRUCT_LIST_GENERATOR entry to the STRUCT_MAPS_LIST entry
#define STRUCT_MAPS_LIST_ADAPTER(V, NAME, Name, name) \
V(Map, name##_map, Name##Map)
// Produces (Map, struct_name_map, StructNameMap) entries
-#define STRUCT_MAPS_LIST(V) \
- STRUCT_LIST_GENERATOR(STRUCT_MAPS_LIST_ADAPTER, V) \
- TORQUE_STRUCT_LIST_GENERATOR(STRUCT_MAPS_LIST_ADAPTER, V)
+#define STRUCT_MAPS_LIST(V) STRUCT_LIST_GENERATOR(STRUCT_MAPS_LIST_ADAPTER, V)
//
// The following macros define list of allocation size objects and list of
diff --git a/deps/v8/src/objects/objects-inl.h b/deps/v8/src/objects/objects-inl.h
index cf8c3ffad2..08f4a2b6f0 100644
--- a/deps/v8/src/objects/objects-inl.h
+++ b/deps/v8/src/objects/objects-inl.h
@@ -350,6 +350,13 @@ DEF_GETTER(HeapObject, IsDependentCode, bool) {
return true;
}
+DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
+ if (!IsWeakFixedArray(isolate)) return false;
+ // There's actually no way to see the difference between a weak fixed array
+ // and a osr optimized code cache.
+ return true;
+}
+
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
return IsBytecodeArray(isolate) || IsCode(isolate);
}
@@ -411,6 +418,12 @@ DEF_GETTER(HeapObject, IsSmallOrderedHashTable, bool) {
IsSmallOrderedNameDictionary(isolate);
}
+DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
+ // It is not possible to check for the existence of certain properties on the
+ // underlying {JSReceiver} here because that requires calling handlified code.
+ return IsJSReceiver(isolate);
+}
+
bool Object::IsPrimitive() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
@@ -506,7 +519,7 @@ bool Object::IsMinusZero() const {
OBJECT_CONSTRUCTORS_IMPL(RegExpMatchInfo, FixedArray)
OBJECT_CONSTRUCTORS_IMPL(ScopeInfo, FixedArray)
-OBJECT_CONSTRUCTORS_IMPL(BigIntBase, HeapObject)
+OBJECT_CONSTRUCTORS_IMPL(BigIntBase, PrimitiveHeapObject)
OBJECT_CONSTRUCTORS_IMPL(BigInt, BigIntBase)
OBJECT_CONSTRUCTORS_IMPL(FreshlyAllocatedBigInt, BigIntBase)
@@ -756,11 +769,13 @@ void HeapObject::set_map(Map value) {
#endif
}
set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
DEF_GETTER(HeapObject, synchronized_map, Map) {
@@ -774,11 +789,13 @@ void HeapObject::synchronized_set_map(Map value) {
#endif
}
synchronized_set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
// Unsafe accessor omitting write barrier.
@@ -793,12 +810,14 @@ void HeapObject::set_map_no_write_barrier(Map value) {
void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
set_map_word(MapWord::FromMap(value));
+#ifndef V8_DISABLE_WRITE_BARRIERS
if (mode != SKIP_WRITE_BARRIER) {
DCHECK(!value.is_null());
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
}
+#endif
}
ObjectSlot HeapObject::map_slot() const {
diff --git a/deps/v8/src/objects/objects.cc b/deps/v8/src/objects/objects.cc
index 134cb3998a..ec4a8594f6 100644
--- a/deps/v8/src/objects/objects.cc
+++ b/deps/v8/src/objects/objects.cc
@@ -65,6 +65,7 @@
#include "src/objects/lookup-inl.h"
#include "src/objects/map-updater.h"
#include "src/objects/objects-body-descriptors-inl.h"
+#include "src/objects/property-details.h"
#include "src/utils/identity-map.h"
#ifdef V8_INTL_SUPPORT
#include "src/objects/js-break-iterator.h"
@@ -1770,7 +1771,7 @@ bool Object::IterationHasObservableEffects() {
// Check that the ArrayPrototype hasn't been modified in a way that would
// affect iteration.
- if (!isolate->IsArrayIteratorLookupChainIntact()) return true;
+ if (!Protectors::IsArrayIteratorLookupChainIntact(isolate)) return true;
// For FastPacked kinds, iteration will have the same effect as simply
// accessing each property in order.
@@ -1781,7 +1782,7 @@ bool Object::IterationHasObservableEffects() {
// the prototype. This could have different results if the prototype has been
// changed.
if (IsHoleyElementsKind(array_kind) &&
- isolate->IsNoElementsProtectorIntact()) {
+ Protectors::IsNoElementsIntact(isolate)) {
return false;
}
return true;
@@ -2188,7 +2189,8 @@ int HeapObject::SizeFromMap(Map map) const {
}
if (IsInRange(instance_type, FIRST_CONTEXT_TYPE, LAST_CONTEXT_TYPE)) {
if (instance_type == NATIVE_CONTEXT_TYPE) return NativeContext::kSize;
- return Context::SizeFor(Context::unchecked_cast(*this).length());
+ return Context::SizeFor(
+ Context::unchecked_cast(*this).synchronized_length());
}
if (instance_type == ONE_BYTE_STRING_TYPE ||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
@@ -2378,7 +2380,7 @@ bool HeapObject::IsExternal(Isolate* isolate) const {
void DescriptorArray::GeneralizeAllFields() {
int length = number_of_descriptors();
- for (int i = 0; i < length; i++) {
+ for (InternalIndex i : InternalIndex::Range(length)) {
PropertyDetails details = GetDetails(i);
details = details.CopyWithRepresentation(Representation::Tagged());
if (details.location() == kField) {
@@ -3717,7 +3719,7 @@ Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
DescriptorArray::Allocate(isolate, size, slack);
if (attributes != NONE) {
- for (int i = 0; i < size; ++i) {
+ for (InternalIndex i : InternalIndex::Range(size)) {
MaybeObject value_or_field_type = desc->GetValue(i);
Name key = desc->GetKey(i);
PropertyDetails details = desc->GetDetails(i);
@@ -3737,7 +3739,7 @@ Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes(
descriptors->Set(i, key, value_or_field_type, details);
}
} else {
- for (int i = 0; i < size; ++i) {
+ for (InternalIndex i : InternalIndex::Range(size)) {
descriptors->CopyFrom(i, *desc);
}
}
@@ -3760,21 +3762,17 @@ Handle<DescriptorArray> DescriptorArray::CopyForFastObjectClone(
Handle<DescriptorArray> descriptors =
DescriptorArray::Allocate(isolate, size, slack);
- for (int i = 0; i < size; ++i) {
+ for (InternalIndex i : InternalIndex::Range(size)) {
Name key = src->GetKey(i);
PropertyDetails details = src->GetDetails(i);
+ Representation new_representation = details.representation();
DCHECK(!key.IsPrivateName());
DCHECK(details.IsEnumerable());
DCHECK_EQ(details.kind(), kData);
-
- // Ensure the ObjectClone property details are NONE, and that all source
- // details did not contain DONT_ENUM.
- PropertyDetails new_details(kData, NONE, details.location(),
- details.constness(), details.representation(),
- details.field_index());
- // Do not propagate the field type of normal object fields from the
- // original descriptors since FieldType changes don't create new maps.
+ // If the new representation is an in-place changeable field, make it
+ // generic as possible (under in-place changes) to avoid type confusion if
+ // the source representation changes after this feedback has been collected.
MaybeObject type = src->GetValue(i);
if (details.location() == PropertyLocation::kField) {
type = MaybeObject::FromObject(FieldType::Any());
@@ -3783,13 +3781,15 @@ Handle<DescriptorArray> DescriptorArray::CopyForFastObjectClone(
// need to generalize the descriptors here. That will also enable
// us to skip the defensive copying of the target map whenever a
// CloneObjectIC misses.
- if (FLAG_modify_field_representation_inplace &&
- (new_details.representation().IsSmi() ||
- new_details.representation().IsHeapObject())) {
- new_details =
- new_details.CopyWithRepresentation(Representation::Tagged());
- }
+ new_representation = new_representation.MostGenericInPlaceChange();
}
+
+ // Ensure the ObjectClone property details are NONE, and that all source
+ // details did not contain DONT_ENUM.
+ PropertyDetails new_details(kData, NONE, details.location(),
+ details.constness(), new_representation,
+ details.field_index());
+
descriptors->Set(i, key, type, new_details);
}
@@ -3799,7 +3799,7 @@ Handle<DescriptorArray> DescriptorArray::CopyForFastObjectClone(
}
bool DescriptorArray::IsEqualUpTo(DescriptorArray desc, int nof_descriptors) {
- for (int i = 0; i < nof_descriptors; i++) {
+ for (InternalIndex i : InternalIndex::Range(nof_descriptors)) {
if (GetKey(i) != desc.GetKey(i) || GetValue(i) != desc.GetValue(i)) {
return false;
}
@@ -3816,8 +3816,7 @@ bool DescriptorArray::IsEqualUpTo(DescriptorArray desc, int nof_descriptors) {
Handle<FixedArray> FixedArray::SetAndGrow(Isolate* isolate,
Handle<FixedArray> array, int index,
- Handle<Object> value,
- AllocationType allocation) {
+ Handle<Object> value) {
if (index < array->length()) {
array->set(index, *value);
return array;
@@ -3827,7 +3826,7 @@ Handle<FixedArray> FixedArray::SetAndGrow(Isolate* isolate,
capacity = JSObject::NewElementsCapacity(capacity);
} while (capacity <= index);
Handle<FixedArray> new_array =
- isolate->factory()->NewUninitializedFixedArray(capacity, allocation);
+ isolate->factory()->NewUninitializedFixedArray(capacity);
array->CopyTo(0, *new_array, 0, array->length());
new_array->FillWithHoles(array->length(), new_array->length());
new_array->set(index, *value);
@@ -4147,12 +4146,10 @@ Handle<FrameArray> FrameArray::EnsureSpace(Isolate* isolate,
Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
int nof_descriptors,
- int slack,
- AllocationType allocation) {
+ int slack) {
return nof_descriptors + slack == 0
? isolate->factory()->empty_descriptor_array()
- : isolate->factory()->NewDescriptorArray(nof_descriptors, slack,
- allocation);
+ : isolate->factory()->NewDescriptorArray(nof_descriptors, slack);
}
void DescriptorArray::Initialize(EnumCache enum_cache,
@@ -4174,8 +4171,8 @@ void DescriptorArray::ClearEnumCache() {
set_enum_cache(GetReadOnlyRoots().empty_enum_cache());
}
-void DescriptorArray::Replace(int index, Descriptor* descriptor) {
- descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
+void DescriptorArray::Replace(InternalIndex index, Descriptor* descriptor) {
+ descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index.as_int()));
Set(index, descriptor);
}
@@ -4193,7 +4190,7 @@ void DescriptorArray::InitializeOrChangeEnumCache(
}
}
-void DescriptorArray::CopyFrom(int index, DescriptorArray src) {
+void DescriptorArray::CopyFrom(InternalIndex index, DescriptorArray src) {
PropertyDetails details = src.GetDetails(index);
Set(index, src.GetKey(index), src.GetValue(index), details);
}
@@ -4304,7 +4301,7 @@ bool DescriptorArray::IsEqualTo(DescriptorArray other) {
if (number_of_all_descriptors() != other.number_of_all_descriptors()) {
return false;
}
- for (int i = 0; i < number_of_descriptors(); ++i) {
+ for (InternalIndex i : InternalIndex::Range(number_of_descriptors())) {
if (GetKey(i) != other.GetKey(i)) return false;
if (GetDetails(i).AsSmi() != other.GetDetails(i).AsSmi()) return false;
if (GetValue(i) != other.GetValue(i)) return false;
@@ -4507,6 +4504,7 @@ uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
value |= length << String::ArrayIndexLengthBits::kShift;
DCHECK_EQ(value & String::kIsNotArrayIndexMask, 0);
+ DCHECK_EQ(value & String::kIsNotIntegerIndexMask, 0);
DCHECK_EQ(length <= String::kMaxCachedArrayIndexLength,
Name::ContainsCachedArrayIndex(value));
return value;
@@ -4659,8 +4657,26 @@ bool Script::GetPositionInfo(int position, PositionInfo* info,
// directly.
if (type() == Script::TYPE_WASM) {
DCHECK_LE(0, position);
- return WasmModuleObject::cast(wasm_module_object())
- .GetPositionInfo(static_cast<uint32_t>(position), info);
+ wasm::NativeModule* native_module = wasm_native_module();
+ const wasm::WasmModule* module = native_module->module();
+ if (source_mapping_url().IsString()) {
+ if (module->functions.size() == 0) return false;
+ info->line = 0;
+ info->column = position;
+ info->line_start = module->functions[0].code.offset();
+ info->line_end = module->functions.back().code.end_offset();
+ return true;
+ }
+ int func_index = GetContainingWasmFunction(module, position);
+ if (func_index < 0) return false;
+
+ const wasm::WasmFunction& function = module->functions[func_index];
+
+ info->line = func_index;
+ info->column = position - function.code.offset();
+ info->line_start = function.code.offset();
+ info->line_end = function.code.end_offset();
+ return true;
}
if (line_ends().IsUndefined()) {
@@ -4972,26 +4988,8 @@ void SharedFunctionInfo::ScriptIterator::Reset(Isolate* isolate,
index_ = 0;
}
-SharedFunctionInfo::GlobalIterator::GlobalIterator(Isolate* isolate)
- : isolate_(isolate),
- script_iterator_(isolate),
- noscript_sfi_iterator_(isolate->heap()->noscript_shared_function_infos()),
- sfi_iterator_(isolate, script_iterator_.Next()) {}
-
-SharedFunctionInfo SharedFunctionInfo::GlobalIterator::Next() {
- HeapObject next = noscript_sfi_iterator_.Next();
- if (!next.is_null()) return SharedFunctionInfo::cast(next);
- for (;;) {
- next = sfi_iterator_.Next();
- if (!next.is_null()) return SharedFunctionInfo::cast(next);
- Script next_script = script_iterator_.Next();
- if (next_script.is_null()) return SharedFunctionInfo();
- sfi_iterator_.Reset(isolate_, next_script);
- }
-}
-
void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
- Handle<Object> script_object,
+ Handle<HeapObject> script_object,
int function_literal_id,
bool reset_preparsed_scope_data) {
if (shared->script() == *script_object) return;
@@ -5020,30 +5018,8 @@ void SharedFunctionInfo::SetScript(Handle<SharedFunctionInfo> shared,
}
#endif
list->Set(function_literal_id, HeapObjectReference::Weak(*shared));
-
- // Remove shared function info from root array.
- WeakArrayList noscript_list =
- isolate->heap()->noscript_shared_function_infos();
- CHECK(noscript_list.RemoveOne(MaybeObjectHandle::Weak(shared)));
} else {
DCHECK(shared->script().IsScript());
- Handle<WeakArrayList> list =
- isolate->factory()->noscript_shared_function_infos();
-
-#ifdef DEBUG
- if (FLAG_enable_slow_asserts) {
- WeakArrayList::Iterator iterator(*list);
- for (HeapObject next = iterator.Next(); !next.is_null();
- next = iterator.Next()) {
- DCHECK_NE(next, *shared);
- }
- }
-#endif // DEBUG
-
- list =
- WeakArrayList::AddToEnd(isolate, list, MaybeObjectHandle::Weak(shared));
-
- isolate->heap()->SetRootNoScriptSharedFunctionInfos(*list);
// Remove shared function info from old script's list.
Script old_script = Script::cast(shared->script());
@@ -5354,6 +5330,8 @@ void SharedFunctionInfo::InitFromFunctionLiteral(
Scope* outer_scope = lit->scope()->GetOuterScopeWithContext();
if (outer_scope) {
shared_info->set_outer_scope_info(*outer_scope->scope_info());
+ shared_info->set_private_name_lookup_skips_outer_class(
+ lit->scope()->private_name_lookup_skips_outer_class());
}
}
@@ -5669,9 +5647,10 @@ bool JSArray::HasReadOnlyLength(Handle<JSArray> array) {
// Fast path: "length" is the first fast property of arrays. Since it's not
// configurable, it's guaranteed to be the first in the descriptor array.
if (!map.is_dictionary_map()) {
- DCHECK(map.instance_descriptors().GetKey(0) ==
+ InternalIndex first(0);
+ DCHECK(map.instance_descriptors().GetKey(first) ==
array->GetReadOnlyRoots().length_string());
- return map.instance_descriptors().GetDetails(0).IsReadOnly();
+ return map.instance_descriptors().GetDetails(first).IsReadOnly();
}
Isolate* isolate = array->GetIsolate();
@@ -5927,17 +5906,25 @@ MaybeHandle<Object> JSPromise::Resolve(Handle<JSPromise> promise,
// 8. Let then be Get(resolution, "then").
MaybeHandle<Object> then;
- if (isolate->IsPromiseThenLookupChainIntact(
- Handle<JSReceiver>::cast(resolution))) {
+ Handle<JSReceiver> receiver(Handle<JSReceiver>::cast(resolution));
+
+ // Make sure a lookup of "then" on any JSPromise whose [[Prototype]] is the
+ // initial %PromisePrototype% yields the initial method. In addition this
+ // protector also guards the negative lookup of "then" on the intrinsic
+ // %ObjectPrototype%, meaning that such lookups are guaranteed to yield
+ // undefined without triggering any side-effects.
+ if (receiver->IsJSPromise() &&
+ isolate->IsInAnyContext(receiver->map().prototype(),
+ Context::PROMISE_PROTOTYPE_INDEX) &&
+ Protectors::IsPromiseThenLookupChainIntact(isolate)) {
// We can skip the "then" lookup on {resolution} if its [[Prototype]]
// is the (initial) Promise.prototype and the Promise#then protector
// is intact, as that guards the lookup path for the "then" property
// on JSPromise instances which have the (initial) %PromisePrototype%.
then = isolate->promise_then();
} else {
- then =
- JSReceiver::GetProperty(isolate, Handle<JSReceiver>::cast(resolution),
- isolate->factory()->then_string());
+ then = JSReceiver::GetProperty(isolate, receiver,
+ isolate->factory()->then_string());
}
// 9. If then is an abrupt completion, then
@@ -6151,27 +6138,40 @@ bool JSRegExp::ShouldProduceBytecode() {
}
// An irregexp is considered to be marked for tier up if the tier-up ticks value
-// is not zero. An atom is not subject to tier-up implementation, so the tier-up
-// ticks value is not set.
+// reaches zero. An atom is not subject to tier-up implementation, so the
+// tier-up ticks value is not set.
bool JSRegExp::MarkedForTierUp() {
DCHECK(data().IsFixedArray());
- if (TypeTag() == JSRegExp::ATOM) {
+ if (TypeTag() == JSRegExp::ATOM || !FLAG_regexp_tier_up) {
return false;
}
- return Smi::ToInt(DataAt(kIrregexpTierUpTicksIndex)) != 0;
+ return Smi::ToInt(DataAt(kIrregexpTicksUntilTierUpIndex)) == 0;
}
-void JSRegExp::ResetTierUp() {
+void JSRegExp::ResetLastTierUpTick() {
DCHECK(FLAG_regexp_tier_up);
DCHECK_EQ(TypeTag(), JSRegExp::IRREGEXP);
- FixedArray::cast(data()).set(JSRegExp::kIrregexpTierUpTicksIndex, Smi::kZero);
+ int tier_up_ticks = Smi::ToInt(DataAt(kIrregexpTicksUntilTierUpIndex)) + 1;
+ FixedArray::cast(data()).set(JSRegExp::kIrregexpTicksUntilTierUpIndex,
+ Smi::FromInt(tier_up_ticks));
+}
+
+void JSRegExp::TierUpTick() {
+ DCHECK(FLAG_regexp_tier_up);
+ DCHECK_EQ(TypeTag(), JSRegExp::IRREGEXP);
+ int tier_up_ticks = Smi::ToInt(DataAt(kIrregexpTicksUntilTierUpIndex));
+ if (tier_up_ticks == 0) {
+ return;
+ }
+ FixedArray::cast(data()).set(JSRegExp::kIrregexpTicksUntilTierUpIndex,
+ Smi::FromInt(tier_up_ticks - 1));
}
void JSRegExp::MarkTierUpForNextExec() {
DCHECK(FLAG_regexp_tier_up);
DCHECK_EQ(TypeTag(), JSRegExp::IRREGEXP);
- FixedArray::cast(data()).set(JSRegExp::kIrregexpTierUpTicksIndex,
- Smi::FromInt(1));
+ FixedArray::cast(data()).set(JSRegExp::kIrregexpTicksUntilTierUpIndex,
+ Smi::kZero);
}
namespace {
@@ -6938,7 +6938,7 @@ void AddToFeedbackCellsMap(Handle<CompilationCacheTable> cache, int cache_entry,
if (entry < 0) {
// Copy old optimized code map and append one new entry.
new_literals_map = isolate->factory()->CopyWeakFixedArrayAndGrow(
- old_literals_map, kLiteralEntryLength, AllocationType::kOld);
+ old_literals_map, kLiteralEntryLength);
entry = old_literals_map->length();
}
}
@@ -7312,8 +7312,13 @@ Handle<NumberDictionary> NumberDictionary::Set(
Isolate* isolate, Handle<NumberDictionary> dictionary, uint32_t key,
Handle<Object> value, Handle<JSObject> dictionary_holder,
PropertyDetails details) {
- dictionary->UpdateMaxNumberKey(key, dictionary_holder);
- return AtPut(isolate, dictionary, key, value, details);
+ // We could call Set with empty dictionaries. UpdateMaxNumberKey doesn't
+ // expect empty dictionaries so make sure to call AtPut that correctly handles
+ // them by creating new dictionary when required.
+ Handle<NumberDictionary> new_dictionary =
+ AtPut(isolate, dictionary, key, value, details);
+ new_dictionary->UpdateMaxNumberKey(key, dictionary_holder);
+ return new_dictionary;
}
void NumberDictionary::CopyValuesTo(FixedArray elements) {
@@ -7898,9 +7903,6 @@ void PropertyCell::SetValueWithInvalidation(Isolate* isolate,
Handle<PropertyCell> cell,
Handle<Object> new_value) {
if (cell->value() != *new_value) {
- if (FLAG_trace_protector_invalidation) {
- isolate->TraceProtectorInvalidation(cell_name);
- }
cell->set_value(*new_value);
cell->dependent_code().DeoptimizeDependentCodeGroup(
isolate, DependentCode::kPropertyCellChangedGroup);
diff --git a/deps/v8/src/objects/objects.h b/deps/v8/src/objects/objects.h
index b4e78a1937..f66023456c 100644
--- a/deps/v8/src/objects/objects.h
+++ b/deps/v8/src/objects/objects.h
@@ -46,19 +46,22 @@
// - JSArrayBufferView
// - JSTypedArray
// - JSDataView
-// - JSBoundFunction
// - JSCollection
// - JSSet
// - JSMap
+// - JSCustomElementsObject (may have elements despite empty FixedArray)
+// - JSSpecialObject (requires custom property lookup handling)
+// - JSGlobalObject
+// - JSGlobalProxy
+// - JSModuleNamespace
+// - JSPrimitiveWrapper
// - JSDate
-// - JSFunction
+// - JSFunctionOrBoundFunction
+// - JSBoundFunction
+// - JSFunction
// - JSGeneratorObject
-// - JSGlobalObject
-// - JSGlobalProxy
// - JSMapIterator
// - JSMessageObject
-// - JSModuleNamespace
-// - JSPrimitiveWrapper
// - JSRegExp
// - JSSetIterator
// - JSStringIterator
@@ -104,30 +107,32 @@
// - ScriptContextTable
// - ClosureFeedbackCellArray
// - FixedDoubleArray
-// - Name
-// - String
-// - SeqString
-// - SeqOneByteString
-// - SeqTwoByteString
-// - SlicedString
-// - ConsString
-// - ThinString
-// - ExternalString
-// - ExternalOneByteString
-// - ExternalTwoByteString
-// - InternalizedString
-// - SeqInternalizedString
-// - SeqOneByteInternalizedString
-// - SeqTwoByteInternalizedString
-// - ConsInternalizedString
-// - ExternalInternalizedString
-// - ExternalOneByteInternalizedString
-// - ExternalTwoByteInternalizedString
-// - Symbol
+// - PrimitiveHeapObject
+// - BigInt
+// - HeapNumber
+// - Name
+// - String
+// - SeqString
+// - SeqOneByteString
+// - SeqTwoByteString
+// - SlicedString
+// - ConsString
+// - ThinString
+// - ExternalString
+// - ExternalOneByteString
+// - ExternalTwoByteString
+// - InternalizedString
+// - SeqInternalizedString
+// - SeqOneByteInternalizedString
+// - SeqTwoByteInternalizedString
+// - ConsInternalizedString
+// - ExternalInternalizedString
+// - ExternalOneByteInternalizedString
+// - ExternalTwoByteInternalizedString
+// - Symbol
+// - Oddball
// - Context
// - NativeContext
-// - HeapNumber
-// - BigInt
// - Cell
// - DescriptorArray
// - PropertyCell
@@ -135,7 +140,6 @@
// - Code
// - AbstractCode, a wrapper around Code or BytecodeArray
// - Map
-// - Oddball
// - Foreign
// - SmallOrderedHashTable
// - SmallOrderedHashMap
@@ -607,15 +611,13 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
// For use with std::unordered_set.
struct Hasher {
size_t operator()(const Object o) const {
- return std::hash<v8::internal::Address>{}(o.ptr());
+ return std::hash<v8::internal::Address>{}(static_cast<Tagged_t>(o.ptr()));
}
};
// For use with std::map.
struct Comparer {
- bool operator()(const Object a, const Object b) const {
- return a.ptr() < b.ptr();
- }
+ bool operator()(const Object a, const Object b) const { return a < b; }
};
template <class T, typename std::enable_if<std::is_arithmetic<T>::value,
@@ -784,7 +786,8 @@ enum AccessorComponent { ACCESSOR_GETTER, ACCESSOR_SETTER };
enum class GetKeysConversion {
kKeepNumbers = static_cast<int>(v8::KeyConversionMode::kKeepNumbers),
- kConvertToString = static_cast<int>(v8::KeyConversionMode::kConvertToString)
+ kConvertToString = static_cast<int>(v8::KeyConversionMode::kConvertToString),
+ kNoNumbers = static_cast<int>(v8::KeyConversionMode::kNoNumbers)
};
enum class KeyCollectionMode {
diff --git a/deps/v8/src/objects/oddball.h b/deps/v8/src/objects/oddball.h
index 025f9379ba..e88d96624e 100644
--- a/deps/v8/src/objects/oddball.h
+++ b/deps/v8/src/objects/oddball.h
@@ -5,8 +5,7 @@
#ifndef V8_OBJECTS_ODDBALL_H_
#define V8_OBJECTS_ODDBALL_H_
-#include "src/objects/heap-object.h"
-#include "torque-generated/class-definitions-tq.h"
+#include "src/objects/primitive-heap-object.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -15,7 +14,7 @@ namespace v8 {
namespace internal {
// The Oddball describes objects null, undefined, true, and false.
-class Oddball : public TorqueGeneratedOddball<Oddball, HeapObject> {
+class Oddball : public TorqueGeneratedOddball<Oddball, PrimitiveHeapObject> {
public:
// [to_number_raw]: Cached raw to_number computed at startup.
inline void set_to_number_raw_as_bits(uint64_t bits);
diff --git a/deps/v8/src/objects/ordered-hash-table-inl.h b/deps/v8/src/objects/ordered-hash-table-inl.h
index a2270b0a4a..f6b8f972a9 100644
--- a/deps/v8/src/objects/ordered-hash-table-inl.h
+++ b/deps/v8/src/objects/ordered-hash-table-inl.h
@@ -164,10 +164,18 @@ inline bool OrderedHashMap::Is(Handle<HeapObject> table) {
return table->IsOrderedHashMap();
}
+inline bool OrderedNameDictionary::Is(Handle<HeapObject> table) {
+ return table->IsOrderedNameDictionary();
+}
+
inline bool SmallOrderedHashSet::Is(Handle<HeapObject> table) {
return table->IsSmallOrderedHashSet();
}
+inline bool SmallOrderedNameDictionary::Is(Handle<HeapObject> table) {
+ return table->IsSmallOrderedNameDictionary();
+}
+
inline bool SmallOrderedHashMap::Is(Handle<HeapObject> table) {
return table->IsSmallOrderedHashMap();
}
diff --git a/deps/v8/src/objects/ordered-hash-table.cc b/deps/v8/src/objects/ordered-hash-table.cc
index f0cc7b9df7..962224024e 100644
--- a/deps/v8/src/objects/ordered-hash-table.cc
+++ b/deps/v8/src/objects/ordered-hash-table.cc
@@ -168,8 +168,8 @@ Handle<FixedArray> OrderedHashSet::ConvertToKeysArray(
for (int i = 0; i < length; i++) {
int index = HashTableStartIndex() + nof_buckets + (i * kEntrySize);
Object key = table->get(index);
+ uint32_t index_value;
if (convert == GetKeysConversion::kConvertToString) {
- uint32_t index_value;
if (key.ToArrayIndex(&index_value)) {
// Avoid trashing the Number2String cache if indices get very large.
bool use_cache = i < kMaxStringTableEntries;
@@ -177,6 +177,8 @@ Handle<FixedArray> OrderedHashSet::ConvertToKeysArray(
} else {
CHECK(key.IsName());
}
+ } else if (convert == GetKeysConversion::kNoNumbers) {
+ DCHECK(!key.ToArrayIndex(&index_value));
}
result->set(i, key);
}
@@ -957,22 +959,19 @@ OrderedHashTableHandler<SmallOrderedNameDictionary,
OrderedNameDictionary>::Allocate(Isolate* isolate,
int capacity);
-#if !defined(V8_OS_WIN)
template <class SmallTable, class LargeTable>
bool OrderedHashTableHandler<SmallTable, LargeTable>::Delete(
- Handle<HeapObject> table, Handle<Object> key) {
+ Isolate* isolate, Handle<HeapObject> table, Handle<Object> key) {
if (SmallTable::Is(table)) {
- return SmallTable::Delete(Handle<SmallTable>::cast(table), key);
+ return SmallTable::Delete(isolate, *Handle<SmallTable>::cast(table), *key);
}
DCHECK(LargeTable::Is(table));
// Note: Once we migrate to the a big hash table, we never migrate
// down to a smaller hash table.
- return LargeTable::Delete(Handle<LargeTable>::cast(table), key);
+ return LargeTable::Delete(isolate, *Handle<LargeTable>::cast(table), *key);
}
-#endif
-#if !defined(V8_OS_WIN)
template <class SmallTable, class LargeTable>
bool OrderedHashTableHandler<SmallTable, LargeTable>::HasKey(
Isolate* isolate, Handle<HeapObject> table, Handle<Object> key) {
@@ -983,7 +982,6 @@ bool OrderedHashTableHandler<SmallTable, LargeTable>::HasKey(
DCHECK(LargeTable::Is(table));
return LargeTable::HasKey(isolate, LargeTable::cast(*table), *key);
}
-#endif
template bool
OrderedHashTableHandler<SmallOrderedHashSet, OrderedHashSet>::HasKey(
@@ -992,6 +990,18 @@ template bool
OrderedHashTableHandler<SmallOrderedHashMap, OrderedHashMap>::HasKey(
Isolate* isolate, Handle<HeapObject> table, Handle<Object> key);
+template bool
+OrderedHashTableHandler<SmallOrderedHashSet, OrderedHashSet>::Delete(
+ Isolate* isolate, Handle<HeapObject> table, Handle<Object> key);
+template bool
+OrderedHashTableHandler<SmallOrderedHashMap, OrderedHashMap>::Delete(
+ Isolate* isolate, Handle<HeapObject> table, Handle<Object> key);
+template bool
+OrderedHashTableHandler<SmallOrderedNameDictionary,
+ OrderedNameDictionary>::Delete(Isolate* isolate,
+ Handle<HeapObject> table,
+ Handle<Object> key);
+
MaybeHandle<OrderedHashMap> OrderedHashMapHandler::AdjustRepresentation(
Isolate* isolate, Handle<SmallOrderedHashMap> table) {
MaybeHandle<OrderedHashMap> new_table_candidate =
diff --git a/deps/v8/src/objects/ordered-hash-table.h b/deps/v8/src/objects/ordered-hash-table.h
index 21decaeba7..590846f130 100644
--- a/deps/v8/src/objects/ordered-hash-table.h
+++ b/deps/v8/src/objects/ordered-hash-table.h
@@ -658,7 +658,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) OrderedHashTableHandler {
using Entry = int;
static MaybeHandle<HeapObject> Allocate(Isolate* isolate, int capacity);
- static bool Delete(Handle<HeapObject> table, Handle<Object> key);
+ static bool Delete(Isolate* isolate, Handle<HeapObject> table,
+ Handle<Object> key);
static bool HasKey(Isolate* isolate, Handle<HeapObject> table,
Handle<Object> key);
@@ -730,6 +731,7 @@ class OrderedNameDictionary
static HeapObject GetEmpty(ReadOnlyRoots ro_roots);
static inline RootIndex GetMapRootIndex();
+ static inline bool Is(Handle<HeapObject> table);
static const int kValueOffset = 1;
static const int kPropertyDetailsOffset = 2;
@@ -831,6 +833,7 @@ class SmallOrderedNameDictionary
Object value, PropertyDetails details);
static inline RootIndex GetMapRootIndex();
+ static inline bool Is(Handle<HeapObject> table);
OBJECT_CONSTRUCTORS(SmallOrderedNameDictionary,
SmallOrderedHashTable<SmallOrderedNameDictionary>);
diff --git a/deps/v8/src/objects/osr-optimized-code-cache-inl.h b/deps/v8/src/objects/osr-optimized-code-cache-inl.h
new file mode 100644
index 0000000000..ab7a97b6aa
--- /dev/null
+++ b/deps/v8/src/objects/osr-optimized-code-cache-inl.h
@@ -0,0 +1,25 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_INL_H_
+#define V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_INL_H_
+
+#include "src/objects/osr-optimized-code-cache.h"
+
+#include "src/objects/fixed-array-inl.h"
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+OBJECT_CONSTRUCTORS_IMPL(OSROptimizedCodeCache, WeakFixedArray)
+CAST_ACCESSOR(OSROptimizedCodeCache)
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_INL_H_
diff --git a/deps/v8/src/objects/osr-optimized-code-cache.cc b/deps/v8/src/objects/osr-optimized-code-cache.cc
new file mode 100644
index 0000000000..62190529f1
--- /dev/null
+++ b/deps/v8/src/objects/osr-optimized-code-cache.cc
@@ -0,0 +1,223 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/execution/isolate-inl.h"
+#include "src/objects/code.h"
+#include "src/objects/maybe-object.h"
+#include "src/objects/shared-function-info.h"
+
+#include "src/objects/osr-optimized-code-cache.h"
+
+namespace v8 {
+namespace internal {
+
+const int OSROptimizedCodeCache::kInitialLength;
+const int OSROptimizedCodeCache::kMaxLength;
+
+void OSROptimizedCodeCache::AddOptimizedCode(
+ Handle<NativeContext> native_context, Handle<SharedFunctionInfo> shared,
+ Handle<Code> code, BailoutId osr_offset) {
+ DCHECK(!osr_offset.IsNone());
+ DCHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
+ STATIC_ASSERT(kEntryLength == 3);
+ Isolate* isolate = native_context->GetIsolate();
+ DCHECK(!isolate->serializer_enabled());
+
+ Handle<OSROptimizedCodeCache> osr_cache(
+ native_context->GetOSROptimizedCodeCache(), isolate);
+
+ DCHECK_EQ(osr_cache->FindEntry(shared, osr_offset), -1);
+ int entry = -1;
+ for (int index = 0; index < osr_cache->length(); index += kEntryLength) {
+ if (osr_cache->Get(index + kSharedOffset)->IsCleared() ||
+ osr_cache->Get(index + kCachedCodeOffset)->IsCleared()) {
+ entry = index;
+ break;
+ }
+ }
+
+ if (entry == -1 && osr_cache->length() + kEntryLength <= kMaxLength) {
+ entry = GrowOSRCache(native_context, &osr_cache);
+ } else if (entry == -1) {
+ // We reached max capacity and cannot grow further. Reuse an existing entry.
+ // TODO(mythria): We could use better mechanisms (like lru) to replace
+ // existing entries. Though we don't expect this to be a common case, so
+ // for now choosing to replace the first entry.
+ entry = 0;
+ }
+
+ osr_cache->InitializeEntry(entry, *shared, *code, osr_offset);
+}
+
+void OSROptimizedCodeCache::Clear(NativeContext native_context) {
+ native_context.set_osr_code_cache(
+ *native_context.GetIsolate()->factory()->empty_weak_fixed_array());
+}
+
+void OSROptimizedCodeCache::Compact(Handle<NativeContext> native_context) {
+ Handle<OSROptimizedCodeCache> osr_cache(
+ native_context->GetOSROptimizedCodeCache(), native_context->GetIsolate());
+ Isolate* isolate = native_context->GetIsolate();
+
+ // Re-adjust the cache so all the valid entries are on one side. This will
+ // enable us to compress the cache if needed.
+ int curr_valid_index = 0;
+ for (int curr_index = 0; curr_index < osr_cache->length();
+ curr_index += kEntryLength) {
+ if (osr_cache->Get(curr_index + kSharedOffset)->IsCleared() ||
+ osr_cache->Get(curr_index + kCachedCodeOffset)->IsCleared()) {
+ continue;
+ }
+ if (curr_valid_index != curr_index) {
+ osr_cache->MoveEntry(curr_index, curr_valid_index, isolate);
+ }
+ curr_valid_index += kEntryLength;
+ }
+
+ if (!NeedsTrimming(curr_valid_index, osr_cache->length())) return;
+
+ Handle<OSROptimizedCodeCache> new_osr_cache =
+ Handle<OSROptimizedCodeCache>::cast(isolate->factory()->NewWeakFixedArray(
+ CapacityForLength(curr_valid_index), AllocationType::kOld));
+ DCHECK_LT(new_osr_cache->length(), osr_cache->length());
+ {
+ DisallowHeapAllocation no_gc;
+ new_osr_cache->CopyElements(native_context->GetIsolate(), 0, *osr_cache, 0,
+ new_osr_cache->length(),
+ new_osr_cache->GetWriteBarrierMode(no_gc));
+ }
+ native_context->set_osr_code_cache(*new_osr_cache);
+}
+
+Code OSROptimizedCodeCache::GetOptimizedCode(Handle<SharedFunctionInfo> shared,
+ BailoutId osr_offset,
+ Isolate* isolate) {
+ DisallowHeapAllocation no_gc;
+ int index = FindEntry(shared, osr_offset);
+ if (index == -1) return Code();
+ Code code = GetCodeFromEntry(index);
+ if (code.is_null()) {
+ ClearEntry(index, isolate);
+ return code;
+ }
+ DCHECK(code.is_optimized_code() && !code.marked_for_deoptimization());
+ return code;
+}
+
+void OSROptimizedCodeCache::EvictMarkedCode(Isolate* isolate) {
+ // This is called from DeoptimizeMarkedCodeForContext that uses raw pointers
+ // and hence the DisallowHeapAllocation scope here.
+ DisallowHeapAllocation no_gc;
+ for (int index = 0; index < length(); index += kEntryLength) {
+ MaybeObject code_entry = Get(index + kCachedCodeOffset);
+ HeapObject heap_object;
+ if (!code_entry->GetHeapObject(&heap_object)) continue;
+
+ DCHECK(heap_object.IsCode());
+ DCHECK(Code::cast(heap_object).is_optimized_code());
+ if (!Code::cast(heap_object).marked_for_deoptimization()) continue;
+
+ ClearEntry(index, isolate);
+ }
+}
+
+int OSROptimizedCodeCache::GrowOSRCache(
+ Handle<NativeContext> native_context,
+ Handle<OSROptimizedCodeCache>* osr_cache) {
+ Isolate* isolate = native_context->GetIsolate();
+ int old_length = (*osr_cache)->length();
+ int grow_by = CapacityForLength(old_length) - old_length;
+ DCHECK_GT(grow_by, kEntryLength);
+ *osr_cache = Handle<OSROptimizedCodeCache>::cast(
+ isolate->factory()->CopyWeakFixedArrayAndGrow(*osr_cache, grow_by));
+ for (int i = old_length; i < (*osr_cache)->length(); i++) {
+ (*osr_cache)->Set(i, HeapObjectReference::ClearedValue(isolate));
+ }
+ native_context->set_osr_code_cache(**osr_cache);
+
+ return old_length;
+}
+
+Code OSROptimizedCodeCache::GetCodeFromEntry(int index) {
+ DCHECK_LE(index + OSRCodeCacheConstants::kEntryLength, length());
+ DCHECK_EQ(index % kEntryLength, 0);
+ HeapObject code_entry;
+ Get(index + OSRCodeCacheConstants::kCachedCodeOffset)
+ ->GetHeapObject(&code_entry);
+ return code_entry.is_null() ? Code() : Code::cast(code_entry);
+}
+
+SharedFunctionInfo OSROptimizedCodeCache::GetSFIFromEntry(int index) {
+ DCHECK_LE(index + OSRCodeCacheConstants::kEntryLength, length());
+ DCHECK_EQ(index % kEntryLength, 0);
+ HeapObject sfi_entry;
+ Get(index + OSRCodeCacheConstants::kSharedOffset)->GetHeapObject(&sfi_entry);
+ return sfi_entry.is_null() ? SharedFunctionInfo()
+ : SharedFunctionInfo::cast(sfi_entry);
+}
+
+BailoutId OSROptimizedCodeCache::GetBailoutIdFromEntry(int index) {
+ DCHECK_LE(index + OSRCodeCacheConstants::kEntryLength, length());
+ DCHECK_EQ(index % kEntryLength, 0);
+ Smi osr_offset_entry;
+ Get(index + kOsrIdOffset)->ToSmi(&osr_offset_entry);
+ return BailoutId(osr_offset_entry.value());
+}
+
+int OSROptimizedCodeCache::FindEntry(Handle<SharedFunctionInfo> shared,
+ BailoutId osr_offset) {
+ DisallowHeapAllocation no_gc;
+ DCHECK(!osr_offset.IsNone());
+ for (int index = 0; index < length(); index += kEntryLength) {
+ if (GetSFIFromEntry(index) != *shared) continue;
+ if (GetBailoutIdFromEntry(index) != osr_offset) continue;
+ return index;
+ }
+ return -1;
+}
+
+void OSROptimizedCodeCache::ClearEntry(int index, Isolate* isolate) {
+ Set(index + OSRCodeCacheConstants::kSharedOffset,
+ HeapObjectReference::ClearedValue(isolate));
+ Set(index + OSRCodeCacheConstants::kCachedCodeOffset,
+ HeapObjectReference::ClearedValue(isolate));
+ Set(index + OSRCodeCacheConstants::kOsrIdOffset,
+ HeapObjectReference::ClearedValue(isolate));
+}
+
+void OSROptimizedCodeCache::InitializeEntry(int entry,
+ SharedFunctionInfo shared,
+ Code code, BailoutId osr_offset) {
+ Set(entry + OSRCodeCacheConstants::kSharedOffset,
+ HeapObjectReference::Weak(shared));
+ Set(entry + OSRCodeCacheConstants::kCachedCodeOffset,
+ HeapObjectReference::Weak(code));
+ Set(entry + OSRCodeCacheConstants::kOsrIdOffset,
+ MaybeObject::FromSmi(Smi::FromInt(osr_offset.ToInt())));
+}
+
+void OSROptimizedCodeCache::MoveEntry(int src, int dst, Isolate* isolate) {
+ Set(dst + OSRCodeCacheConstants::kSharedOffset,
+ Get(src + OSRCodeCacheConstants::kSharedOffset));
+ Set(dst + OSRCodeCacheConstants::kCachedCodeOffset,
+ Get(src + OSRCodeCacheConstants::kCachedCodeOffset));
+ Set(dst + OSRCodeCacheConstants::kOsrIdOffset, Get(src + kOsrIdOffset));
+ ClearEntry(src, isolate);
+}
+
+int OSROptimizedCodeCache::CapacityForLength(int curr_length) {
+ // TODO(mythria): This is a randomly chosen heuristic and is not based on any
+ // data. We may have to tune this later.
+ if (curr_length == 0) return kInitialLength;
+ if (curr_length * 2 > kMaxLength) return kMaxLength;
+ return curr_length * 2;
+}
+
+bool OSROptimizedCodeCache::NeedsTrimming(int num_valid_entries,
+ int curr_length) {
+ return curr_length > kInitialLength && curr_length > num_valid_entries * 3;
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/src/objects/osr-optimized-code-cache.h b/deps/v8/src/objects/osr-optimized-code-cache.h
new file mode 100644
index 0000000000..99c148a7e1
--- /dev/null
+++ b/deps/v8/src/objects/osr-optimized-code-cache.h
@@ -0,0 +1,77 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_H_
+#define V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_H_
+
+#include "src/objects/fixed-array.h"
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+class V8_EXPORT OSROptimizedCodeCache : public WeakFixedArray {
+ public:
+ DECL_CAST(OSROptimizedCodeCache)
+
+ enum OSRCodeCacheConstants {
+ kSharedOffset,
+ kCachedCodeOffset,
+ kOsrIdOffset,
+ kEntryLength
+ };
+
+ static const int kInitialLength = OSRCodeCacheConstants::kEntryLength * 4;
+ static const int kMaxLength = OSRCodeCacheConstants::kEntryLength * 1024;
+
+ // Caches the optimized code |code| corresponding to the shared function
+ // |shared| and bailout id |osr_offset| in the OSROptimized code cache.
+ // If the OSR code cache wasn't created before it creates a code cache with
+ // kOSRCodeCacheInitialLength entries.
+ static void AddOptimizedCode(Handle<NativeContext> context,
+ Handle<SharedFunctionInfo> shared,
+ Handle<Code> code, BailoutId osr_offset);
+ // Reduces the size of the OSR code cache if the number of valid entries are
+ // less than the current capacity of the cache.
+ static void Compact(Handle<NativeContext> context);
+ // Sets the OSR optimized code cache to an empty array.
+ static void Clear(NativeContext context);
+
+ // Returns the code corresponding to the shared function |shared| and
+ // BailoutId |offset| if an entry exists in the cache. Returns an empty
+ // object otherwise.
+ Code GetOptimizedCode(Handle<SharedFunctionInfo> shared, BailoutId osr_offset,
+ Isolate* isolate);
+
+ // Remove all code objects marked for deoptimization from OSR code cache.
+ void EvictMarkedCode(Isolate* isolate);
+
+ private:
+ // Functions that implement heuristics on when to grow / shrink the cache.
+ static int CapacityForLength(int curr_capacity);
+ static bool NeedsTrimming(int num_valid_entries, int curr_capacity);
+ static int GrowOSRCache(Handle<NativeContext> native_context,
+ Handle<OSROptimizedCodeCache>* osr_cache);
+
+ // Helper functions to get individual items from an entry in the cache.
+ Code GetCodeFromEntry(int index);
+ SharedFunctionInfo GetSFIFromEntry(int index);
+ BailoutId GetBailoutIdFromEntry(int index);
+
+ inline int FindEntry(Handle<SharedFunctionInfo> shared, BailoutId osr_offset);
+ inline void ClearEntry(int src, Isolate* isolate);
+ inline void InitializeEntry(int entry, SharedFunctionInfo shared, Code code,
+ BailoutId osr_offset);
+ inline void MoveEntry(int src, int dst, Isolate* isolate);
+
+ OBJECT_CONSTRUCTORS(OSROptimizedCodeCache, WeakFixedArray);
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_OSR_OPTIMIZED_CODE_CACHE_H_
diff --git a/deps/v8/src/objects/primitive-heap-object-inl.h b/deps/v8/src/objects/primitive-heap-object-inl.h
new file mode 100644
index 0000000000..2c694bd1d6
--- /dev/null
+++ b/deps/v8/src/objects/primitive-heap-object-inl.h
@@ -0,0 +1,26 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_INL_H_
+#define V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_INL_H_
+
+#include "src/objects/primitive-heap-object.h"
+
+#include "src/objects/heap-object-inl.h"
+#include "torque-generated/class-definitions-tq-inl.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+TQ_OBJECT_CONSTRUCTORS_IMPL(PrimitiveHeapObject)
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_INL_H_
diff --git a/deps/v8/src/objects/primitive-heap-object.h b/deps/v8/src/objects/primitive-heap-object.h
new file mode 100644
index 0000000000..9bd13cafc9
--- /dev/null
+++ b/deps/v8/src/objects/primitive-heap-object.h
@@ -0,0 +1,33 @@
+// Copyright 2019 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_H_
+#define V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_H_
+
+#include "src/objects/heap-object.h"
+#include "torque-generated/class-definitions-tq.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+// An abstract superclass for classes representing JavaScript primitive values
+// other than Smi. It doesn't carry any functionality but allows primitive
+// classes to be identified in the type system.
+class PrimitiveHeapObject
+ : public TorqueGeneratedPrimitiveHeapObject<PrimitiveHeapObject,
+ HeapObject> {
+ public:
+ STATIC_ASSERT(kHeaderSize == HeapObject::kHeaderSize);
+ TQ_OBJECT_CONSTRUCTORS(PrimitiveHeapObject)
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_PRIMITIVE_HEAP_OBJECT_H_
diff --git a/deps/v8/src/objects/property-descriptor.cc b/deps/v8/src/objects/property-descriptor.cc
index b3b05deceb..c5cfe8c9a9 100644
--- a/deps/v8/src/objects/property-descriptor.cc
+++ b/deps/v8/src/objects/property-descriptor.cc
@@ -58,7 +58,7 @@ bool ToPropertyDescriptorFastPath(Isolate* isolate, Handle<JSReceiver> obj,
if (map.is_dictionary_map()) return false;
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(map.instance_descriptors(), isolate);
- for (int i = 0; i < map.NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : map.IterateOwnDescriptors()) {
PropertyDetails details = descs->GetDetails(i);
Name key = descs->GetKey(i);
Handle<Object> value;
diff --git a/deps/v8/src/objects/property-details.h b/deps/v8/src/objects/property-details.h
index e350fe2c27..51318f475a 100644
--- a/deps/v8/src/objects/property-details.h
+++ b/deps/v8/src/objects/property-details.h
@@ -112,7 +112,19 @@ class Representation {
// smi and tagged values. Doubles, however, would require a box allocation.
if (IsNone()) return !other.IsDouble();
if (!FLAG_modify_field_representation_inplace) return false;
- return (IsSmi() || IsHeapObject()) && other.IsTagged();
+ return (IsSmi() || (!FLAG_unbox_double_fields && IsDouble()) ||
+ IsHeapObject()) &&
+ other.IsTagged();
+ }
+
+ // Return the most generic representation that this representation can be
+ // changed to in-place. If in-place representation changes are disabled, then
+ // this will return the current representation.
+ Representation MostGenericInPlaceChange() const {
+ if (!FLAG_modify_field_representation_inplace) return *this;
+ // Everything but unboxed doubles can be in-place changed to Tagged.
+ if (FLAG_unbox_double_fields && IsDouble()) return Representation::Double();
+ return Representation::Tagged();
}
bool is_more_general_than(const Representation& other) const {
diff --git a/deps/v8/src/objects/scope-info.cc b/deps/v8/src/objects/scope-info.cc
index c390298b5d..65a26e5d98 100644
--- a/deps/v8/src/objects/scope-info.cc
+++ b/deps/v8/src/objects/scope-info.cc
@@ -138,6 +138,10 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
const bool has_brand = scope->is_class_scope()
? scope->AsClassScope()->brand() != nullptr
: false;
+ const bool should_save_class_variable_index =
+ scope->is_class_scope()
+ ? scope->AsClassScope()->should_save_class_variable_index()
+ : false;
const bool has_function_name = function_name_info != NONE;
const bool has_position_info = NeedsPositionInfo(scope->scope_type());
const bool has_receiver = receiver_info == STACK || receiver_info == CONTEXT;
@@ -146,7 +150,9 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
? scope->AsDeclarationScope()->num_parameters()
: 0;
const bool has_outer_scope_info = !outer_scope.is_null();
+
const int length = kVariablePartIndex + 2 * context_local_count +
+ (should_save_class_variable_index ? 1 : 0) +
(has_receiver ? 1 : 0) +
(has_function_name ? kFunctionNameEntries : 0) +
(has_inferred_function_name ? 1 : 0) +
@@ -187,6 +193,8 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
DeclarationScopeField::encode(scope->is_declaration_scope()) |
ReceiverVariableField::encode(receiver_info) |
HasClassBrandField::encode(has_brand) |
+ HasSavedClassVariableIndexField::encode(
+ should_save_class_variable_index) |
HasNewTargetField::encode(has_new_target) |
FunctionVariableField::encode(function_name_info) |
HasInferredFunctionNameField::encode(has_inferred_function_name) |
@@ -196,7 +204,9 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
HasOuterScopeInfoField::encode(has_outer_scope_info) |
IsDebugEvaluateScopeField::encode(scope->is_debug_evaluate_scope()) |
ForceContextAllocationField::encode(
- scope->ForceContextForLanguageMode());
+ scope->ForceContextForLanguageMode()) |
+ PrivateNameLookupSkipsOuterClassField::encode(
+ scope->private_name_lookup_skips_outer_class());
scope_info.SetFlags(flags);
scope_info.SetParameterCount(parameter_count);
@@ -220,7 +230,8 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
- ParameterNumberField::encode(ParameterNumberField::kMax);
+ ParameterNumberField::encode(ParameterNumberField::kMax) |
+ IsStaticFlagField::encode(var->is_static_flag());
scope_info.set(context_local_base + local_index, *var->name(), mode);
scope_info.set(context_local_info_base + local_index,
Smi::FromInt(info));
@@ -235,7 +246,8 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
- ParameterNumberField::encode(ParameterNumberField::kMax);
+ ParameterNumberField::encode(ParameterNumberField::kMax) |
+ IsStaticFlagField::encode(var->is_static_flag());
scope_info.set(module_var_entry + kModuleVariablePropertiesOffset,
Smi::FromInt(properties));
module_var_entry += kModuleVariableEntryLength;
@@ -273,7 +285,8 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned()) |
- ParameterNumberField::encode(ParameterNumberField::kMax);
+ ParameterNumberField::encode(ParameterNumberField::kMax) |
+ IsStaticFlagField::encode(var->is_static_flag());
scope_info.set(context_local_base + local_index, *var->name(), mode);
scope_info.set(context_local_info_base + local_index,
Smi::FromInt(info));
@@ -283,6 +296,16 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
index += 2 * context_local_count;
+ DCHECK_EQ(index, scope_info.SavedClassVariableInfoIndex());
+ // If the scope is a class scope and has used static private methods, save
+ // the context slot index of the class variable.
+ // Store the class variable index.
+ if (should_save_class_variable_index) {
+ Variable* class_variable = scope->AsClassScope()->class_variable();
+ DCHECK_EQ(class_variable->location(), VariableLocation::CONTEXT);
+ scope_info.set(index++, Smi::FromInt(class_variable->index()));
+ }
+
// If the receiver is allocated, add its index.
DCHECK_EQ(index, scope_info.ReceiverInfoIndex());
if (has_receiver) {
@@ -362,11 +385,14 @@ Handle<ScopeInfo> ScopeInfo::CreateForWithScope(
LanguageModeField::encode(LanguageMode::kSloppy) |
DeclarationScopeField::encode(false) |
ReceiverVariableField::encode(NONE) | HasClassBrandField::encode(false) |
+ HasSavedClassVariableIndexField::encode(false) |
HasNewTargetField::encode(false) | FunctionVariableField::encode(NONE) |
IsAsmModuleField::encode(false) | HasSimpleParametersField::encode(true) |
FunctionKindField::encode(kNormalFunction) |
HasOuterScopeInfoField::encode(has_outer_scope_info) |
- IsDebugEvaluateScopeField::encode(false);
+ IsDebugEvaluateScopeField::encode(false) |
+ ForceContextAllocationField::encode(false) |
+ PrivateNameLookupSkipsOuterClassField::encode(false);
scope_info->SetFlags(flags);
scope_info->SetParameterCount(0);
@@ -425,13 +451,17 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
LanguageModeField::encode(LanguageMode::kSloppy) |
DeclarationScopeField::encode(true) |
ReceiverVariableField::encode(is_empty_function ? UNUSED : CONTEXT) |
- HasClassBrandField::encode(false) | HasNewTargetField::encode(false) |
+ HasClassBrandField::encode(false) |
+ HasSavedClassVariableIndexField::encode(false) |
+ HasNewTargetField::encode(false) |
FunctionVariableField::encode(is_empty_function ? UNUSED : NONE) |
HasInferredFunctionNameField::encode(has_inferred_function_name) |
IsAsmModuleField::encode(false) | HasSimpleParametersField::encode(true) |
FunctionKindField::encode(FunctionKind::kNormalFunction) |
HasOuterScopeInfoField::encode(false) |
- IsDebugEvaluateScopeField::encode(false);
+ IsDebugEvaluateScopeField::encode(false) |
+ ForceContextAllocationField::encode(false) |
+ PrivateNameLookupSkipsOuterClassField::encode(false);
scope_info->SetFlags(flags);
scope_info->SetParameterCount(parameter_count);
scope_info->SetContextLocalCount(context_local_count);
@@ -449,7 +479,8 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
VariableModeField::encode(VariableMode::kConst) |
InitFlagField::encode(kCreatedInitialized) |
MaybeAssignedFlagField::encode(kNotAssigned) |
- ParameterNumberField::encode(ParameterNumberField::kMax);
+ ParameterNumberField::encode(ParameterNumberField::kMax) |
+ IsStaticFlagField::encode(IsStaticFlag::kNotStatic);
scope_info->set(index++, Smi::FromInt(value));
}
@@ -549,6 +580,10 @@ bool ScopeInfo::HasClassBrand() const {
return HasClassBrandField::decode(Flags());
}
+bool ScopeInfo::HasSavedClassVariableIndex() const {
+ return HasSavedClassVariableIndexField::decode(Flags());
+}
+
bool ScopeInfo::HasNewTarget() const {
return HasNewTargetField::decode(Flags());
}
@@ -608,6 +643,11 @@ void ScopeInfo::SetIsDebugEvaluateScope() {
}
}
+bool ScopeInfo::PrivateNameLookupSkipsOuterClass() const {
+ if (length() == 0) return false;
+ return PrivateNameLookupSkipsOuterClassField::decode(Flags());
+}
+
bool ScopeInfo::HasContext() const { return ContextLength() > 0; }
Object ScopeInfo::FunctionName() const {
@@ -674,6 +714,14 @@ VariableMode ScopeInfo::ContextLocalMode(int var) const {
return VariableModeField::decode(value);
}
+IsStaticFlag ScopeInfo::ContextLocalIsStaticFlag(int var) const {
+ DCHECK_LE(0, var);
+ DCHECK_LT(var, ContextLocalCount());
+ int info_index = ContextLocalInfosIndex() + var;
+ int value = Smi::ToInt(get(info_index));
+ return IsStaticFlagField::decode(value);
+}
+
InitializationFlag ScopeInfo::ContextLocalInitFlag(int var) const {
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount());
@@ -744,7 +792,8 @@ int ScopeInfo::ModuleIndex(String name, VariableMode* mode,
int ScopeInfo::ContextSlotIndex(ScopeInfo scope_info, String name,
VariableMode* mode,
InitializationFlag* init_flag,
- MaybeAssignedFlag* maybe_assigned_flag) {
+ MaybeAssignedFlag* maybe_assigned_flag,
+ IsStaticFlag* is_static_flag) {
DisallowHeapAllocation no_gc;
DCHECK(name.IsInternalizedString());
DCHECK_NOT_NULL(mode);
@@ -759,6 +808,7 @@ int ScopeInfo::ContextSlotIndex(ScopeInfo scope_info, String name,
if (name != scope_info.get(i)) continue;
int var = i - start;
*mode = scope_info.ContextLocalMode(var);
+ *is_static_flag = scope_info.ContextLocalIsStaticFlag(var);
*init_flag = scope_info.ContextLocalInitFlag(var);
*maybe_assigned_flag = scope_info.ContextLocalMaybeAssignedFlag(var);
int result = Context::MIN_CONTEXT_SLOTS + var;
@@ -770,6 +820,14 @@ int ScopeInfo::ContextSlotIndex(ScopeInfo scope_info, String name,
return -1;
}
+int ScopeInfo::SavedClassVariableContextLocalIndex() const {
+ if (length() > 0 && HasSavedClassVariableIndexField::decode(Flags())) {
+ int index = Smi::ToInt(get(SavedClassVariableInfoIndex()));
+ return index - Context::MIN_CONTEXT_SLOTS;
+ }
+ return -1;
+}
+
int ScopeInfo::ReceiverContextSlotIndex() const {
if (length() > 0 && ReceiverVariableField::decode(Flags()) == CONTEXT) {
return Smi::ToInt(get(ReceiverInfoIndex()));
@@ -801,10 +859,14 @@ int ScopeInfo::ContextLocalInfosIndex() const {
return ContextLocalNamesIndex() + ContextLocalCount();
}
-int ScopeInfo::ReceiverInfoIndex() const {
+int ScopeInfo::SavedClassVariableInfoIndex() const {
return ContextLocalInfosIndex() + ContextLocalCount();
}
+int ScopeInfo::ReceiverInfoIndex() const {
+ return SavedClassVariableInfoIndex() + (HasSavedClassVariableIndex() ? 1 : 0);
+}
+
int ScopeInfo::FunctionNameInfoIndex() const {
return ReceiverInfoIndex() + (HasAllocatedReceiver() ? 1 : 0);
}
@@ -879,9 +941,10 @@ std::ostream& operator<<(std::ostream& os,
}
Handle<SourceTextModuleInfoEntry> SourceTextModuleInfoEntry::New(
- Isolate* isolate, Handle<HeapObject> export_name,
- Handle<HeapObject> local_name, Handle<HeapObject> import_name,
- int module_request, int cell_index, int beg_pos, int end_pos) {
+ Isolate* isolate, Handle<PrimitiveHeapObject> export_name,
+ Handle<PrimitiveHeapObject> local_name,
+ Handle<PrimitiveHeapObject> import_name, int module_request, int cell_index,
+ int beg_pos, int end_pos) {
Handle<SourceTextModuleInfoEntry> result =
Handle<SourceTextModuleInfoEntry>::cast(isolate->factory()->NewStruct(
SOURCE_TEXT_MODULE_INFO_ENTRY_TYPE, AllocationType::kOld));
diff --git a/deps/v8/src/objects/scope-info.h b/deps/v8/src/objects/scope-info.h
index 123b9b1797..aa63cf2998 100644
--- a/deps/v8/src/objects/scope-info.h
+++ b/deps/v8/src/objects/scope-info.h
@@ -72,6 +72,10 @@ class ScopeInfo : public FixedArray {
// Does this scope has class brand (for private methods)?
bool HasClassBrand() const;
+ // Does this scope contains a saved class variable context local slot index
+ // for checking receivers of static private methods?
+ bool HasSavedClassVariableIndex() const;
+
// Does this scope declare a "new.target" binding?
bool HasNewTarget() const;
@@ -121,6 +125,9 @@ class ScopeInfo : public FixedArray {
// Return the mode of the given context local.
VariableMode ContextLocalMode(int var) const;
+ // Return whether the given context local variable is static.
+ IsStaticFlag ContextLocalIsStaticFlag(int var) const;
+
// Return the initialization flag of the given context local.
InitializationFlag ContextLocalInitFlag(int var) const;
@@ -141,7 +148,8 @@ class ScopeInfo : public FixedArray {
// mode for that variable.
static int ContextSlotIndex(ScopeInfo scope_info, String name,
VariableMode* mode, InitializationFlag* init_flag,
- MaybeAssignedFlag* maybe_assigned_flag);
+ MaybeAssignedFlag* maybe_assigned_flag,
+ IsStaticFlag* is_static_flag);
// Lookup metadata of a MODULE-allocated variable. Return 0 if there is no
// module variable with the given name (the index value of a MODULE variable
@@ -161,6 +169,12 @@ class ScopeInfo : public FixedArray {
// context-allocated. Otherwise returns a value < 0.
int ReceiverContextSlotIndex() const;
+ // Lookup support for serialized scope info. Returns the index of the
+ // saved class variable in context local slots if scope is a class scope
+ // and it contains static private methods that may be accessed.
+ // Otherwise returns a value < 0.
+ int SavedClassVariableContextLocalIndex() const;
+
FunctionKind function_kind() const;
// Returns true if this ScopeInfo is linked to a outer ScopeInfo.
@@ -176,6 +190,10 @@ class ScopeInfo : public FixedArray {
// Return the outer ScopeInfo if present.
ScopeInfo OuterScopeInfo() const;
+ // Returns true if this ScopeInfo was created for a scope that skips the
+ // closest outer class when resolving private names.
+ bool PrivateNameLookupSkipsOuterClass() const;
+
#ifdef DEBUG
bool Equals(ScopeInfo other) const;
#endif
@@ -228,7 +246,8 @@ class ScopeInfo : public FixedArray {
using ReceiverVariableField =
DeclarationScopeField::Next<VariableAllocationInfo, 2>;
using HasClassBrandField = ReceiverVariableField::Next<bool, 1>;
- using HasNewTargetField = HasClassBrandField::Next<bool, 1>;
+ using HasSavedClassVariableIndexField = HasClassBrandField::Next<bool, 1>;
+ using HasNewTargetField = HasSavedClassVariableIndexField::Next<bool, 1>;
using FunctionVariableField =
HasNewTargetField::Next<VariableAllocationInfo, 2>;
// TODO(cbruni): Combine with function variable field when only storing the
@@ -240,6 +259,8 @@ class ScopeInfo : public FixedArray {
using HasOuterScopeInfoField = FunctionKindField::Next<bool, 1>;
using IsDebugEvaluateScopeField = HasOuterScopeInfoField::Next<bool, 1>;
using ForceContextAllocationField = IsDebugEvaluateScopeField::Next<bool, 1>;
+ using PrivateNameLookupSkipsOuterClassField =
+ ForceContextAllocationField::Next<bool, 1>;
STATIC_ASSERT(kLastFunctionKind <= FunctionKindField::kMax);
@@ -256,27 +277,32 @@ class ScopeInfo : public FixedArray {
// the context locals in ContextLocalNames. One slot is used per
// context local, so in total this part occupies ContextLocalCount()
// slots in the array.
- // 3. ReceiverInfo:
+ // 3. SavedClassVariableInfo:
+ // If the scope is a class scope and it has static private methods that
+ // may be accessed directly or through eval, one slot is reserved to hold
+ // the context slot index for the class variable.
+ // 4. ReceiverInfo:
// If the scope binds a "this" value, one slot is reserved to hold the
// context or stack slot index for the variable.
- // 4. FunctionNameInfo:
+ // 5. FunctionNameInfo:
// If the scope belongs to a named function expression this part contains
// information about the function variable. It always occupies two array
// slots: a. The name of the function variable.
// b. The context or stack slot index for the variable.
- // 5. InferredFunctionName:
+ // 6. InferredFunctionName:
// Contains the function's inferred name.
- // 6. SourcePosition:
+ // 7. SourcePosition:
// Contains two slots with a) the startPosition and b) the endPosition if
// the scope belongs to a function or script.
- // 7. OuterScopeInfoIndex:
+ // 8. OuterScopeInfoIndex:
// The outer scope's ScopeInfo or the hole if there's none.
- // 8. SourceTextModuleInfo, ModuleVariableCount, and ModuleVariables:
+ // 9. SourceTextModuleInfo, ModuleVariableCount, and ModuleVariables:
// For a module scope, this part contains the SourceTextModuleInfo, the
// number of MODULE-allocated variables, and the metadata of those
// variables. For non-module scopes it is empty.
int ContextLocalNamesIndex() const;
int ContextLocalInfosIndex() const;
+ int SavedClassVariableInfoIndex() const;
int ReceiverInfoIndex() const;
int FunctionNameInfoIndex() const;
int InferredFunctionNameIndex() const;
@@ -310,6 +336,7 @@ class ScopeInfo : public FixedArray {
using InitFlagField = VariableModeField::Next<InitializationFlag, 1>;
using MaybeAssignedFlagField = InitFlagField::Next<MaybeAssignedFlag, 1>;
using ParameterNumberField = MaybeAssignedFlagField::Next<uint32_t, 16>;
+ using IsStaticFlagField = ParameterNumberField::Next<IsStaticFlag, 1>;
friend class ScopeIterator;
friend std::ostream& operator<<(std::ostream& os,
diff --git a/deps/v8/src/objects/script-inl.h b/deps/v8/src/objects/script-inl.h
index 07450c73ec..c306c2c092 100644
--- a/deps/v8/src/objects/script-inl.h
+++ b/deps/v8/src/objects/script-inl.h
@@ -36,15 +36,17 @@ ACCESSORS_CHECKED(Script, eval_from_shared_or_wrapped_arguments, Object,
this->type() != TYPE_WASM)
SMI_ACCESSORS_CHECKED(Script, eval_from_position, kEvalFromPositionOffset,
this->type() != TYPE_WASM)
-ACCESSORS(Script, shared_function_infos, WeakFixedArray,
- kSharedFunctionInfosOffset)
SMI_ACCESSORS(Script, flags, kFlagsOffset)
ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
ACCESSORS(Script, host_defined_options, FixedArray, kHostDefinedOptionsOffset)
-ACCESSORS_CHECKED(Script, wasm_module_object, Object,
+ACCESSORS_CHECKED(Script, wasm_breakpoint_infos, FixedArray,
kEvalFromSharedOrWrappedArgumentsOffset,
this->type() == TYPE_WASM)
+ACCESSORS_CHECKED(Script, wasm_managed_native_module, Object,
+ kEvalFromPositionOffset, this->type() == TYPE_WASM)
+ACCESSORS_CHECKED(Script, wasm_weak_instance_list, WeakArrayList,
+ kSharedFunctionInfosOffset, this->type() == TYPE_WASM)
bool Script::is_wrapped() const {
return eval_from_shared_or_wrapped_arguments().IsFixedArray();
@@ -75,6 +77,28 @@ FixedArray Script::wrapped_arguments() const {
return FixedArray::cast(eval_from_shared_or_wrapped_arguments());
}
+DEF_GETTER(Script, shared_function_infos, WeakFixedArray) {
+ return type() == TYPE_WASM
+ ? ReadOnlyRoots(GetHeap()).empty_weak_fixed_array()
+ : TaggedField<WeakFixedArray, kSharedFunctionInfosOffset>::load(
+ *this);
+}
+
+void Script::set_shared_function_infos(WeakFixedArray value,
+ WriteBarrierMode mode) {
+ DCHECK_NE(TYPE_WASM, type());
+ TaggedField<WeakFixedArray, kSharedFunctionInfosOffset>::store(*this, value);
+ CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfosOffset, value, mode);
+}
+
+bool Script::has_wasm_breakpoint_infos() const {
+ return type() == TYPE_WASM && wasm_breakpoint_infos().length() > 0;
+}
+
+wasm::NativeModule* Script::wasm_native_module() const {
+ return Managed<wasm::NativeModule>::cast(wasm_managed_native_module()).raw();
+}
+
Script::CompilationType Script::compilation_type() {
return BooleanBit::get(flags(), kCompilationTypeBit) ? COMPILATION_TYPE_EVAL
: COMPILATION_TYPE_HOST;
diff --git a/deps/v8/src/objects/script.h b/deps/v8/src/objects/script.h
index 2d9e4bca78..935241a149 100644
--- a/deps/v8/src/objects/script.h
+++ b/deps/v8/src/objects/script.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_SCRIPT_H_
#define V8_OBJECTS_SCRIPT_H_
+#include <memory>
+
#include "src/objects/fixed-array.h"
#include "src/objects/objects.h"
#include "src/objects/struct.h"
@@ -101,9 +103,21 @@ class Script : public Struct {
// [source_mapping_url]: sourceMappingURL magic comment
DECL_ACCESSORS(source_mapping_url, Object)
- // [wasm_module_object]: the wasm module object this script belongs to.
+ // [wasm_breakpoint_infos]: the list of {BreakPointInfo} objects describing
+ // all WebAssembly breakpoints for modules/instances managed via this script.
+ // This must only be called if the type of this script is TYPE_WASM.
+ DECL_ACCESSORS(wasm_breakpoint_infos, FixedArray)
+ inline bool has_wasm_breakpoint_infos() const;
+
+ // [wasm_native_module]: the wasm {NativeModule} this script belongs to.
+ // This must only be called if the type of this script is TYPE_WASM.
+ DECL_ACCESSORS(wasm_managed_native_module, Object)
+ inline wasm::NativeModule* wasm_native_module() const;
+
+ // [wasm_weak_instance_list]: the list of all {WasmInstanceObject} being
+ // affected by breakpoints that are managed via this script.
// This must only be called if the type of this script is TYPE_WASM.
- DECL_ACCESSORS(wasm_module_object, Object)
+ DECL_ACCESSORS(wasm_weak_instance_list, WeakArrayList)
// [host_defined_options]: Options defined by the embedder.
DECL_ACCESSORS(host_defined_options, FixedArray)
diff --git a/deps/v8/src/objects/shared-function-info-inl.h b/deps/v8/src/objects/shared-function-info-inl.h
index 6023c3b828..4f12bc4324 100644
--- a/deps/v8/src/objects/shared-function-info-inl.h
+++ b/deps/v8/src/objects/shared-function-info-inl.h
@@ -21,11 +21,7 @@
namespace v8 {
namespace internal {
-OBJECT_CONSTRUCTORS_IMPL(PreparseData, HeapObject)
-
-CAST_ACCESSOR(PreparseData)
-INT_ACCESSORS(PreparseData, data_length, kDataLengthOffset)
-INT_ACCESSORS(PreparseData, children_length, kInnerLengthOffset)
+TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)
int PreparseData::inner_start_offset() const {
return InnerOffset(data_length());
@@ -84,26 +80,9 @@ void PreparseData::set_child(int index, PreparseData value,
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
}
-OBJECT_CONSTRUCTORS_IMPL(UncompiledData, HeapObject)
-OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData, UncompiledData)
-OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseData, UncompiledData)
-CAST_ACCESSOR(UncompiledData)
-ACCESSORS(UncompiledData, inferred_name, String, kInferredNameOffset)
-INT32_ACCESSORS(UncompiledData, start_position, kStartPositionOffset)
-INT32_ACCESSORS(UncompiledData, end_position, kEndPositionOffset)
-
-void UncompiledData::clear_padding() {
- if (FIELD_SIZE(kOptionalPaddingOffset) == 0) return;
- DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
- memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
- FIELD_SIZE(kOptionalPaddingOffset));
-}
-
-CAST_ACCESSOR(UncompiledDataWithoutPreparseData)
-
-CAST_ACCESSOR(UncompiledDataWithPreparseData)
-ACCESSORS(UncompiledDataWithPreparseData, preparse_data, PreparseData,
- kPreparseDataOffset)
+TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledData)
+TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)
+TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseData)
DEF_GETTER(HeapObject, IsUncompiledData, bool) {
return IsUncompiledDataWithoutPreparseData(isolate) ||
@@ -124,7 +103,7 @@ DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
ACCESSORS(SharedFunctionInfo, name_or_scope_info, Object,
kNameOrScopeInfoOffset)
-ACCESSORS(SharedFunctionInfo, script_or_debug_info, Object,
+ACCESSORS(SharedFunctionInfo, script_or_debug_info, HeapObject,
kScriptOrDebugInfoOffset)
INT32_ACCESSORS(SharedFunctionInfo, function_literal_id,
@@ -229,6 +208,9 @@ BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags,
BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags,
is_safe_to_skip_arguments_adaptor,
SharedFunctionInfo::IsSafeToSkipArgumentsAdaptorBit)
+BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags,
+ private_name_lookup_skips_outer_class,
+ SharedFunctionInfo::PrivateNameLookupSkipsOuterClassBit)
bool SharedFunctionInfo::optimization_disabled() const {
return disable_optimization_reason() != BailoutReason::kNoReason;
@@ -613,12 +595,11 @@ void SharedFunctionInfo::ClearPreparseData() {
Heap* heap = GetHeapFromWritableObject(data);
// Swap the map.
- heap->NotifyObjectLayoutChange(data, UncompiledDataWithPreparseData::kSize,
- no_gc);
+ heap->NotifyObjectLayoutChange(data, no_gc);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize <
UncompiledDataWithPreparseData::kSize);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
- UncompiledData::kSize);
+ UncompiledData::kHeaderSize);
data.synchronized_set_map(
GetReadOnlyRoots().uncompiled_data_without_preparse_data_map());
@@ -644,7 +625,6 @@ void UncompiledData::Initialize(
data, data.RawField(UncompiledData::kInferredNameOffset), inferred_name);
data.set_start_position(start_position);
data.set_end_position(end_position);
- data.clear_padding();
}
void UncompiledDataWithPreparseData::Initialize(
@@ -672,16 +652,16 @@ bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
return function_data().IsWasmCapiFunctionData();
}
-Object SharedFunctionInfo::script() const {
- Object maybe_script = script_or_debug_info();
+HeapObject SharedFunctionInfo::script() const {
+ HeapObject maybe_script = script_or_debug_info();
if (maybe_script.IsDebugInfo()) {
return DebugInfo::cast(maybe_script).script();
}
return maybe_script;
}
-void SharedFunctionInfo::set_script(Object script) {
- Object maybe_debug_info = script_or_debug_info();
+void SharedFunctionInfo::set_script(HeapObject script) {
+ HeapObject maybe_debug_info = script_or_debug_info();
if (maybe_debug_info.IsDebugInfo()) {
DebugInfo::cast(maybe_debug_info).set_script(script);
} else {
diff --git a/deps/v8/src/objects/shared-function-info.h b/deps/v8/src/objects/shared-function-info.h
index dc84653ede..9c57d36697 100644
--- a/deps/v8/src/objects/shared-function-info.h
+++ b/deps/v8/src/objects/shared-function-info.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_SHARED_FUNCTION_INFO_H_
#define V8_OBJECTS_SHARED_FUNCTION_INFO_H_
+#include <memory>
+
#include "src/codegen/bailout-reason.h"
#include "src/objects/compressed-slots.h"
#include "src/objects/function-kind.h"
@@ -55,11 +57,9 @@ class WasmJSFunctionData;
// +-------------------------------+
// | Inner PreparseData N |
// +-------------------------------+
-class PreparseData : public HeapObject {
+class PreparseData
+ : public TorqueGeneratedPreparseData<PreparseData, HeapObject> {
public:
- DECL_INT_ACCESSORS(data_length)
- DECL_INT_ACCESSORS(children_length)
-
inline int inner_start_offset() const;
inline ObjectSlot inner_data_start() const;
@@ -74,12 +74,9 @@ class PreparseData : public HeapObject {
// Clear uninitialized padding space.
inline void clear_padding();
- DECL_CAST(PreparseData)
DECL_PRINTER(PreparseData)
DECL_VERIFIER(PreparseData)
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- TORQUE_GENERATED_PREPARSE_DATA_FIELDS)
static const int kDataStartOffset = kSize;
class BodyDescriptor;
@@ -92,7 +89,7 @@ class PreparseData : public HeapObject {
return InnerOffset(data_length) + children_length * kTaggedSize;
}
- OBJECT_CONSTRUCTORS(PreparseData, HeapObject);
+ TQ_OBJECT_CONSTRUCTORS(PreparseData)
private:
inline Object get_child_raw(int index) const;
@@ -100,14 +97,9 @@ class PreparseData : public HeapObject {
// Abstract class representing extra data for an uncompiled function, which is
// not stored in the SharedFunctionInfo.
-class UncompiledData : public HeapObject {
+class UncompiledData
+ : public TorqueGeneratedUncompiledData<UncompiledData, HeapObject> {
public:
- DECL_ACCESSORS(inferred_name, String)
- DECL_INT32_ACCESSORS(start_position)
- DECL_INT32_ACCESSORS(end_position)
-
- DECL_CAST(UncompiledData)
-
inline static void Initialize(
UncompiledData data, String inferred_name, int start_position,
int end_position,
@@ -115,56 +107,35 @@ class UncompiledData : public HeapObject {
gc_notify_updated_slot =
[](HeapObject object, ObjectSlot slot, HeapObject target) {});
- // Layout description.
-#define UNCOMPILED_DATA_FIELDS(V) \
- V(kStartOfStrongFieldsOffset, 0) \
- V(kInferredNameOffset, kTaggedSize) \
- V(kEndOfStrongFieldsOffset, 0) \
- /* Raw data fields. */ \
- V(kStartPositionOffset, kInt32Size) \
- V(kEndPositionOffset, kInt32Size) \
- V(kOptionalPaddingOffset, POINTER_SIZE_PADDING(kOptionalPaddingOffset)) \
- /* Header size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, UNCOMPILED_DATA_FIELDS)
-#undef UNCOMPILED_DATA_FIELDS
-
- using BodyDescriptor = FixedBodyDescriptor<kStartOfStrongFieldsOffset,
- kEndOfStrongFieldsOffset, kSize>;
-
- // Clear uninitialized padding space.
- inline void clear_padding();
+ using BodyDescriptor =
+ FixedBodyDescriptor<kStartOfStrongFieldsOffset, kEndOfStrongFieldsOffset,
+ kHeaderSize>;
- OBJECT_CONSTRUCTORS(UncompiledData, HeapObject);
+ TQ_OBJECT_CONSTRUCTORS(UncompiledData)
};
// Class representing data for an uncompiled function that does not have any
// data from the pre-parser, either because it's a leaf function or because the
// pre-parser bailed out.
-class UncompiledDataWithoutPreparseData : public UncompiledData {
+class UncompiledDataWithoutPreparseData
+ : public TorqueGeneratedUncompiledDataWithoutPreparseData<
+ UncompiledDataWithoutPreparseData, UncompiledData> {
public:
- DECL_CAST(UncompiledDataWithoutPreparseData)
DECL_PRINTER(UncompiledDataWithoutPreparseData)
- DECL_VERIFIER(UncompiledDataWithoutPreparseData)
-
- static const int kSize = UncompiledData::kSize;
// No extra fields compared to UncompiledData.
using BodyDescriptor = UncompiledData::BodyDescriptor;
- OBJECT_CONSTRUCTORS(UncompiledDataWithoutPreparseData, UncompiledData);
+ TQ_OBJECT_CONSTRUCTORS(UncompiledDataWithoutPreparseData)
};
// Class representing data for an uncompiled function that has pre-parsed scope
// data.
-class UncompiledDataWithPreparseData : public UncompiledData {
+class UncompiledDataWithPreparseData
+ : public TorqueGeneratedUncompiledDataWithPreparseData<
+ UncompiledDataWithPreparseData, UncompiledData> {
public:
- DECL_ACCESSORS(preparse_data, PreparseData)
-
- DECL_CAST(UncompiledDataWithPreparseData)
DECL_PRINTER(UncompiledDataWithPreparseData)
- DECL_VERIFIER(UncompiledDataWithPreparseData)
inline static void Initialize(
UncompiledDataWithPreparseData data, String inferred_name,
@@ -173,28 +144,12 @@ class UncompiledDataWithPreparseData : public UncompiledData {
gc_notify_updated_slot =
[](HeapObject object, ObjectSlot slot, HeapObject target) {});
- // Layout description.
-
-#define UNCOMPILED_DATA_WITH_PREPARSE_DATA_FIELDS(V) \
- V(kStartOfStrongFieldsOffset, 0) \
- V(kPreparseDataOffset, kTaggedSize) \
- V(kEndOfStrongFieldsOffset, 0) \
- /* Total size. */ \
- V(kSize, 0)
-
- DEFINE_FIELD_OFFSET_CONSTANTS(UncompiledData::kSize,
- UNCOMPILED_DATA_WITH_PREPARSE_DATA_FIELDS)
-#undef UNCOMPILED_DATA_WITH_PREPARSE_DATA_FIELDS
-
- // Make sure the size is aligned
- STATIC_ASSERT(IsAligned(kSize, kTaggedSize));
-
using BodyDescriptor = SubclassBodyDescriptor<
UncompiledData::BodyDescriptor,
FixedBodyDescriptor<kStartOfStrongFieldsOffset, kEndOfStrongFieldsOffset,
kSize>>;
- OBJECT_CONSTRUCTORS(UncompiledDataWithPreparseData, UncompiledData);
+ TQ_OBJECT_CONSTRUCTORS(UncompiledDataWithPreparseData)
};
class InterpreterData : public Struct {
@@ -242,7 +197,7 @@ class SharedFunctionInfo : public HeapObject {
// Set up the link between shared function info and the script. The shared
// function info is added to the list on the script.
V8_EXPORT_PRIVATE static void SetScript(
- Handle<SharedFunctionInfo> shared, Handle<Object> script_object,
+ Handle<SharedFunctionInfo> shared, Handle<HeapObject> script_object,
int function_literal_id, bool reset_preparsed_scope_data = true);
// Layout description of the optimized code map.
@@ -408,10 +363,10 @@ class SharedFunctionInfo : public HeapObject {
// [script_or_debug_info]: One of:
// - Script from which the function originates.
// - a DebugInfo which holds the actual script [HasDebugInfo()].
- DECL_ACCESSORS(script_or_debug_info, Object)
+ DECL_ACCESSORS(script_or_debug_info, HeapObject)
- inline Object script() const;
- inline void set_script(Object script);
+ inline HeapObject script() const;
+ inline void set_script(HeapObject script);
// The function is subject to debugging if a debug info is attached.
inline bool HasDebugInfo() const;
@@ -490,6 +445,10 @@ class SharedFunctionInfo : public HeapObject {
// Indicates that the function has been reported for binary code coverage.
DECL_BOOLEAN_ACCESSORS(has_reported_binary_coverage)
+ // Indicates that the private name lookups inside the function skips the
+ // closest outer class scope.
+ DECL_BOOLEAN_ACCESSORS(private_name_lookup_skips_outer_class)
+
inline FunctionKind kind() const;
// Defines the index in a native context of closure's map instantiated using
@@ -640,21 +599,6 @@ class SharedFunctionInfo : public HeapObject {
DISALLOW_COPY_AND_ASSIGN(ScriptIterator);
};
- // Iterate over all shared function infos on the heap.
- class GlobalIterator {
- public:
- V8_EXPORT_PRIVATE explicit GlobalIterator(Isolate* isolate);
- V8_EXPORT_PRIVATE SharedFunctionInfo Next();
-
- private:
- Isolate* isolate_;
- Script::Iterator script_iterator_;
- WeakArrayList::Iterator noscript_sfi_iterator_;
- SharedFunctionInfo::ScriptIterator sfi_iterator_;
- DISALLOW_HEAP_ALLOCATION(no_gc_)
- DISALLOW_COPY_AND_ASSIGN(GlobalIterator);
- };
-
DECL_CAST(SharedFunctionInfo)
// Constants.
@@ -691,7 +635,8 @@ class SharedFunctionInfo : public HeapObject {
V(HasReportedBinaryCoverageBit, bool, 1, _) \
V(IsTopLevelBit, bool, 1, _) \
V(IsOneshotIIFEOrPropertiesAreFinalBit, bool, 1, _) \
- V(IsSafeToSkipArgumentsAdaptorBit, bool, 1, _)
+ V(IsSafeToSkipArgumentsAdaptorBit, bool, 1, _) \
+ V(PrivateNameLookupSkipsOuterClassBit, bool, 1, _)
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
diff --git a/deps/v8/src/objects/slots-inl.h b/deps/v8/src/objects/slots-inl.h
index b240729114..7e692b7948 100644
--- a/deps/v8/src/objects/slots-inl.h
+++ b/deps/v8/src/objects/slots-inl.h
@@ -119,7 +119,7 @@ inline void MemsetTagged(ObjectSlot start, Object value, size_t counter) {
#ifdef V8_COMPRESS_POINTERS
Tagged_t raw_value = CompressTagged(value.ptr());
STATIC_ASSERT(kTaggedSize == kInt32Size);
- MemsetInt32(start.location(), raw_value, counter);
+ MemsetInt32(reinterpret_cast<int32_t*>(start.location()), raw_value, counter);
#else
Address raw_value = value.ptr();
MemsetPointer(start.location(), raw_value, counter);
diff --git a/deps/v8/src/objects/source-text-module.cc b/deps/v8/src/objects/source-text-module.cc
index f17c59de1a..2959e1b854 100644
--- a/deps/v8/src/objects/source-text-module.cc
+++ b/deps/v8/src/objects/source-text-module.cc
@@ -78,8 +78,6 @@ class Module::ResolveSet
SharedFunctionInfo SourceTextModule::GetSharedFunctionInfo() const {
DisallowHeapAllocation no_alloc;
- DCHECK_NE(status(), Module::kEvaluating);
- DCHECK_NE(status(), Module::kEvaluated);
switch (status()) {
case kUninstantiated:
case kPreInstantiating:
@@ -89,10 +87,10 @@ SharedFunctionInfo SourceTextModule::GetSharedFunctionInfo() const {
DCHECK(code().IsJSFunction());
return JSFunction::cast(code()).shared();
case kInstantiated:
- DCHECK(code().IsJSGeneratorObject());
- return JSGeneratorObject::cast(code()).function().shared();
case kEvaluating:
case kEvaluated:
+ DCHECK(code().IsJSGeneratorObject());
+ return JSGeneratorObject::cast(code()).function().shared();
case kErrored:
UNREACHABLE();
}
@@ -580,58 +578,518 @@ Handle<JSModuleNamespace> SourceTextModule::GetModuleNamespace(
return Module::GetModuleNamespace(isolate, requested_module);
}
+MaybeHandle<Object> SourceTextModule::EvaluateMaybeAsync(
+ Isolate* isolate, Handle<SourceTextModule> module) {
+ // In the event of errored evaluation, return a rejected promise.
+ if (module->status() == kErrored) {
+ // If we have a top level capability we assume it has already been
+ // rejected, and return it here. Otherwise create a new promise and
+ // reject it with the module's exception.
+ if (module->top_level_capability().IsJSPromise()) {
+ Handle<JSPromise> top_level_capability(
+ JSPromise::cast(module->top_level_capability()), isolate);
+ DCHECK(top_level_capability->status() == Promise::kRejected &&
+ top_level_capability->result() == module->exception());
+ return top_level_capability;
+ }
+ Handle<JSPromise> capability = isolate->factory()->NewJSPromise();
+ JSPromise::Reject(capability, handle(module->exception(), isolate));
+ return capability;
+ }
+
+ // Start of Evaluate () Concrete Method
+ // 2. Assert: module.[[Status]] is "linked" or "evaluated".
+ CHECK(module->status() == kInstantiated || module->status() == kEvaluated);
+
+ // 3. If module.[[Status]] is "evaluated", set module to
+ // GetAsyncCycleRoot(module).
+ if (module->status() == kEvaluated) {
+ module = GetAsyncCycleRoot(isolate, module);
+ }
+
+ // 4. If module.[[TopLevelCapability]] is not undefined, then
+ // a. Return module.[[TopLevelCapability]].[[Promise]].
+ if (module->top_level_capability().IsJSPromise()) {
+ return handle(JSPromise::cast(module->top_level_capability()), isolate);
+ }
+ DCHECK(module->top_level_capability().IsUndefined());
+
+ // 6. Let capability be ! NewPromiseCapability(%Promise%).
+ Handle<JSPromise> capability = isolate->factory()->NewJSPromise();
+
+ // 7. Set module.[[TopLevelCapability]] to capability.
+ module->set_top_level_capability(*capability);
+ DCHECK(module->top_level_capability().IsJSPromise());
+
+ // 9. If result is an abrupt completion, then
+ Handle<Object> unused_result;
+ if (!Evaluate(isolate, module).ToHandle(&unused_result)) {
+ // d. Perform ! Call(capability.[[Reject]], undefined,
+ // «result.[[Value]]»).
+ isolate->clear_pending_exception();
+ JSPromise::Reject(capability, handle(module->exception(), isolate));
+ } else {
+ // 10. Otherwise,
+ // a. Assert: module.[[Status]] is "evaluated"...
+ CHECK_EQ(module->status(), kEvaluated);
+
+ // b. If module.[[AsyncEvaluating]] is false, then
+ if (!module->async_evaluating()) {
+ // i. Perform ! Call(capability.[[Resolve]], undefined,
+ // «undefined»).
+ JSPromise::Resolve(capability, isolate->factory()->undefined_value())
+ .ToHandleChecked();
+ }
+ }
+
+ // 11. Return capability.[[Promise]].
+ return capability;
+}
+
MaybeHandle<Object> SourceTextModule::Evaluate(
+ Isolate* isolate, Handle<SourceTextModule> module) {
+ // Evaluate () Concrete Method continued from EvaluateMaybeAsync.
+ CHECK(module->status() == kInstantiated || module->status() == kEvaluated);
+
+ // 5. Let stack be a new empty List.
+ Zone zone(isolate->allocator(), ZONE_NAME);
+ ZoneForwardList<Handle<SourceTextModule>> stack(&zone);
+ unsigned dfs_index = 0;
+
+ // 8. Let result be InnerModuleEvaluation(module, stack, 0).
+ // 9. If result is an abrupt completion, then
+ Handle<Object> result;
+ if (!InnerModuleEvaluation(isolate, module, &stack, &dfs_index)
+ .ToHandle(&result)) {
+ // a. For each Cyclic Module Record m in stack, do
+ for (auto& descendant : stack) {
+ // i. Assert: m.[[Status]] is "evaluating".
+ CHECK_EQ(descendant->status(), kEvaluating);
+ // ii. Set m.[[Status]] to "evaluated".
+ // iii. Set m.[[EvaluationError]] to result.
+ descendant->RecordErrorUsingPendingException(isolate);
+ }
+ DCHECK_EQ(module->exception(), isolate->pending_exception());
+ } else {
+ // 10. Otherwise,
+ // c. Assert: stack is empty.
+ DCHECK(stack.empty());
+ }
+ return result;
+}
+
+void SourceTextModule::AsyncModuleExecutionFulfilled(
+ Isolate* isolate, Handle<SourceTextModule> module) {
+ // 1. Assert: module.[[Status]] is "evaluated".
+ CHECK(module->status() == kEvaluated || module->status() == kErrored);
+
+ // 2. If module.[[AsyncEvaluating]] is false,
+ if (!module->async_evaluating()) {
+ // a. Assert: module.[[EvaluationError]] is not undefined.
+ CHECK_EQ(module->status(), kErrored);
+
+ // b. Return undefined.
+ return;
+ }
+
+ // 3. Assert: module.[[EvaluationError]] is undefined.
+ CHECK_EQ(module->status(), kEvaluated);
+
+ // 4. Set module.[[AsyncEvaluating]] to false.
+ module->set_async_evaluating(false);
+
+ // 5. For each Module m of module.[[AsyncParentModules]], do
+ for (int i = 0; i < module->AsyncParentModuleCount(); i++) {
+ Handle<SourceTextModule> m = module->GetAsyncParentModule(isolate, i);
+
+ // a. If module.[[DFSIndex]] is not equal to module.[[DFSAncestorIndex]],
+ // then
+ if (module->dfs_index() != module->dfs_ancestor_index()) {
+ // i. Assert: m.[[DFSAncestorIndex]] is equal to
+ // module.[[DFSAncestorIndex]].
+ DCHECK_LE(m->dfs_ancestor_index(), module->dfs_ancestor_index());
+ }
+ // b. Decrement m.[[PendingAsyncDependencies]] by 1.
+ m->DecrementPendingAsyncDependencies();
+
+ // c. If m.[[PendingAsyncDependencies]] is 0 and m.[[EvaluationError]] is
+ // undefined, then
+ if (!m->HasPendingAsyncDependencies() && m->status() == kEvaluated) {
+ // i. Assert: m.[[AsyncEvaluating]] is true.
+ DCHECK(m->async_evaluating());
+
+ // ii. Let cycleRoot be ! GetAsyncCycleRoot(m).
+ auto cycle_root = GetAsyncCycleRoot(isolate, m);
+
+ // iii. If cycleRoot.[[EvaluationError]] is not undefined,
+ // return undefined.
+ if (cycle_root->status() == kErrored) {
+ return;
+ }
+
+ // iv. If m.[[Async]] is true, then
+ if (m->async()) {
+ // 1. Perform ! ExecuteAsyncModule(m).
+ ExecuteAsyncModule(isolate, m);
+ } else {
+ // v. Otherwise,
+ // 1. Let result be m.ExecuteModule().
+ // 2. If result is a normal completion,
+ Handle<Object> unused_result;
+ if (ExecuteModule(isolate, m).ToHandle(&unused_result)) {
+ // a. Perform ! AsyncModuleExecutionFulfilled(m).
+ AsyncModuleExecutionFulfilled(isolate, m);
+ } else {
+ // 3. Otherwise,
+ // a. Perform ! AsyncModuleExecutionRejected(m,
+ // result.[[Value]]).
+ Handle<Object> exception(isolate->pending_exception(), isolate);
+ isolate->clear_pending_exception();
+ AsyncModuleExecutionRejected(isolate, m, exception);
+ }
+ }
+ }
+ }
+
+ // 6. If module.[[TopLevelCapability]] is not undefined, then
+ if (!module->top_level_capability().IsUndefined(isolate)) {
+ // a. Assert: module.[[DFSIndex]] is equal to module.[[DFSAncestorIndex]].
+ DCHECK_EQ(module->dfs_index(), module->dfs_ancestor_index());
+
+ // b. Perform ! Call(module.[[TopLevelCapability]].[[Resolve]],
+ // undefined, «undefined»).
+ Handle<JSPromise> capability(
+ JSPromise::cast(module->top_level_capability()), isolate);
+ JSPromise::Resolve(capability, isolate->factory()->undefined_value())
+ .ToHandleChecked();
+ }
+
+ // 7. Return undefined.
+}
+
+void SourceTextModule::AsyncModuleExecutionRejected(
Isolate* isolate, Handle<SourceTextModule> module,
- ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index) {
+ Handle<Object> exception) {
+ // 1. Assert: module.[[Status]] is "evaluated".
+ CHECK(module->status() == kEvaluated || module->status() == kErrored);
+
+ // 2. If module.[[AsyncEvaluating]] is false,
+ if (!module->async_evaluating()) {
+ // a. Assert: module.[[EvaluationError]] is not undefined.
+ CHECK_EQ(module->status(), kErrored);
+
+ // b. Return undefined.
+ return;
+ }
+
+ // 4. Set module.[[EvaluationError]] to ThrowCompletion(error).
+ module->RecordError(isolate, exception);
+
+ // 5. Set module.[[AsyncEvaluating]] to false.
+ module->set_async_evaluating(false);
+
+ // 6. For each Module m of module.[[AsyncParentModules]], do
+ for (int i = 0; i < module->AsyncParentModuleCount(); i++) {
+ Handle<SourceTextModule> m = module->GetAsyncParentModule(isolate, i);
+
+ // a. If module.[[DFSIndex]] is not equal to module.[[DFSAncestorIndex]],
+ // then
+ if (module->dfs_index() != module->dfs_ancestor_index()) {
+ // i. Assert: m.[[DFSAncestorIndex]] is equal to
+ // module.[[DFSAncestorIndex]].
+ DCHECK_EQ(m->dfs_ancestor_index(), module->dfs_ancestor_index());
+ }
+ // b. Perform ! AsyncModuleExecutionRejected(m, error).
+ AsyncModuleExecutionRejected(isolate, m, exception);
+ }
+
+ // 7. If module.[[TopLevelCapability]] is not undefined, then
+ if (!module->top_level_capability().IsUndefined(isolate)) {
+ // a. Assert: module.[[DFSIndex]] is equal to module.[[DFSAncestorIndex]].
+ DCHECK(module->dfs_index() == module->dfs_ancestor_index());
+
+ // b. Perform ! Call(module.[[TopLevelCapability]].[[Reject]],
+ // undefined, «error»).
+ Handle<JSPromise> capability(
+ JSPromise::cast(module->top_level_capability()), isolate);
+ JSPromise::Reject(capability, exception);
+ }
+
+ // 8. Return undefined.
+}
+
+void SourceTextModule::ExecuteAsyncModule(Isolate* isolate,
+ Handle<SourceTextModule> module) {
+ // 1. Assert: module.[[Status]] is "evaluating" or "evaluated".
+ CHECK(module->status() == kEvaluating || module->status() == kEvaluated);
+
+ // 2. Assert: module.[[Async]] is true.
+ DCHECK(module->async());
+
+ // 3. Set module.[[AsyncEvaluating]] to true.
+ module->set_async_evaluating(true);
+
+ // 4. Let capability be ! NewPromiseCapability(%Promise%).
+ Handle<JSPromise> capability = isolate->factory()->NewJSPromise();
+
+ // 5. Let stepsFulfilled be the steps of a CallAsyncModuleFulfilled
+ Handle<JSFunction> steps_fulfilled(
+ isolate->native_context()->call_async_module_fulfilled(), isolate);
+
+ ScopedVector<Handle<Object>> empty_argv(0);
+
+ // 6. Let onFulfilled be CreateBuiltinFunction(stepsFulfilled,
+ // «[[Module]]»).
+ // 7. Set onFulfilled.[[Module]] to module.
+ Handle<JSBoundFunction> on_fulfilled =
+ isolate->factory()
+ ->NewJSBoundFunction(steps_fulfilled, module, empty_argv)
+ .ToHandleChecked();
+
+ // 8. Let stepsRejected be the steps of a CallAsyncModuleRejected.
+ Handle<JSFunction> steps_rejected(
+ isolate->native_context()->call_async_module_rejected(), isolate);
+
+ // 9. Let onRejected be CreateBuiltinFunction(stepsRejected, «[[Module]]»).
+ // 10. Set onRejected.[[Module]] to module.
+ Handle<JSBoundFunction> on_rejected =
+ isolate->factory()
+ ->NewJSBoundFunction(steps_rejected, module, empty_argv)
+ .ToHandleChecked();
+
+ // 11. Perform ! PerformPromiseThen(capability.[[Promise]],
+ // onFulfilled, onRejected).
+ Handle<Object> argv[] = {on_fulfilled, on_rejected};
+ Execution::CallBuiltin(isolate, isolate->promise_then(), capability,
+ arraysize(argv), argv)
+ .ToHandleChecked();
+
+ // 12. Perform ! module.ExecuteModule(capability).
+ // Note: In V8 we have broken module.ExecuteModule into
+ // ExecuteModule for synchronous module execution and
+ // InnerExecuteAsyncModule for asynchronous execution.
+ InnerExecuteAsyncModule(isolate, module, capability).ToHandleChecked();
+
+ // 13. Return.
+}
+
+MaybeHandle<Object> SourceTextModule::InnerExecuteAsyncModule(
+ Isolate* isolate, Handle<SourceTextModule> module,
+ Handle<JSPromise> capability) {
+ // If we have an async module, then it has an associated
+ // JSAsyncFunctionObject, which we then evaluate with the passed in promise
+ // capability.
+ Handle<JSAsyncFunctionObject> async_function_object(
+ JSAsyncFunctionObject::cast(module->code()), isolate);
+ async_function_object->set_promise(*capability);
+ Handle<JSFunction> resume(
+ isolate->native_context()->async_module_evaluate_internal(), isolate);
+ Handle<Object> result;
+ ASSIGN_RETURN_ON_EXCEPTION(
+ isolate, result,
+ Execution::Call(isolate, resume, async_function_object, 0, nullptr),
+ Object);
+ return result;
+}
+
+MaybeHandle<Object> SourceTextModule::ExecuteModule(
+ Isolate* isolate, Handle<SourceTextModule> module) {
+ // Synchronous modules have an associated JSGeneratorObject.
Handle<JSGeneratorObject> generator(JSGeneratorObject::cast(module->code()),
isolate);
- module->set_code(
- generator->function().shared().scope_info().ModuleDescriptorInfo());
+ Handle<JSFunction> resume(
+ isolate->native_context()->generator_next_internal(), isolate);
+ Handle<Object> result;
+ ASSIGN_RETURN_ON_EXCEPTION(
+ isolate, result, Execution::Call(isolate, resume, generator, 0, nullptr),
+ Object);
+ DCHECK(JSIteratorResult::cast(*result).done().BooleanValue(isolate));
+ return handle(JSIteratorResult::cast(*result).value(), isolate);
+}
+
+MaybeHandle<Object> SourceTextModule::InnerModuleEvaluation(
+ Isolate* isolate, Handle<SourceTextModule> module,
+ ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index) {
+ STACK_CHECK(isolate, MaybeHandle<Object>());
+
+ // InnerModuleEvaluation(module, stack, index)
+ // 2. If module.[[Status]] is "evaluated", then
+ // a. If module.[[EvaluationError]] is undefined, return index.
+ // (We return undefined instead)
+ if (module->status() == kEvaluated || module->status() == kEvaluating) {
+ return isolate->factory()->undefined_value();
+ }
+
+ // b. Otherwise return module.[[EvaluationError]].
+ // (We throw on isolate and return a MaybeHandle<Object>
+ // instead)
+ if (module->status() == kErrored) {
+ isolate->Throw(module->exception());
+ return MaybeHandle<Object>();
+ }
+
+ // 4. Assert: module.[[Status]] is "linked".
+ CHECK_EQ(module->status(), kInstantiated);
+
+ // 5. Set module.[[Status]] to "evaluating".
module->SetStatus(kEvaluating);
+
+ // 6. Set module.[[DFSIndex]] to index.
module->set_dfs_index(*dfs_index);
+
+ // 7. Set module.[[DFSAncestorIndex]] to index.
module->set_dfs_ancestor_index(*dfs_index);
- stack->push_front(module);
+
+ // 8. Set module.[[PendingAsyncDependencies]] to 0.
+ DCHECK(!module->HasPendingAsyncDependencies());
+
+ // 9. Set module.[[AsyncParentModules]] to a new empty List.
+ Handle<ArrayList> async_parent_modules = ArrayList::New(isolate, 0);
+ module->set_async_parent_modules(*async_parent_modules);
+
+ // 10. Set index to index + 1.
(*dfs_index)++;
+ // 11. Append module to stack.
+ stack->push_front(module);
+
// Recursion.
Handle<FixedArray> requested_modules(module->requested_modules(), isolate);
+
+ // 12. For each String required that is an element of
+ // module.[[RequestedModules]], do
for (int i = 0, length = requested_modules->length(); i < length; ++i) {
Handle<Module> requested_module(Module::cast(requested_modules->get(i)),
isolate);
- RETURN_ON_EXCEPTION(
- isolate, Module::Evaluate(isolate, requested_module, stack, dfs_index),
- Object);
-
- DCHECK_GE(requested_module->status(), kEvaluating);
- DCHECK_NE(requested_module->status(), kErrored);
- SLOW_DCHECK(
- // {requested_module} is evaluating iff it's on the {stack}.
- (requested_module->status() == kEvaluating) ==
- std::count_if(stack->begin(), stack->end(), [&](Handle<Module> m) {
- return *m == *requested_module;
- }));
-
- if (requested_module->status() == kEvaluating) {
- // SyntheticModules go straight to kEvaluated so this must be a
- // SourceTextModule
- module->set_dfs_ancestor_index(
- std::min(module->dfs_ancestor_index(),
- Handle<SourceTextModule>::cast(requested_module)
- ->dfs_ancestor_index()));
+ // d. If requiredModule is a Cyclic Module Record, then
+ if (requested_module->IsSourceTextModule()) {
+ Handle<SourceTextModule> required_module(
+ SourceTextModule::cast(*requested_module), isolate);
+ RETURN_ON_EXCEPTION(
+ isolate,
+ InnerModuleEvaluation(isolate, required_module, stack, dfs_index),
+ Object);
+
+ // i. Assert: requiredModule.[[Status]] is either "evaluating" or
+ // "evaluated".
+ // (We also assert the module cannot be errored, because if it was
+ // we would have already returned from InnerModuleEvaluation)
+ CHECK_GE(required_module->status(), kEvaluating);
+ CHECK_NE(required_module->status(), kErrored);
+
+ // ii. Assert: requiredModule.[[Status]] is "evaluating" if and
+ // only if requiredModule is in stack.
+ SLOW_DCHECK(
+ (requested_module->status() == kEvaluating) ==
+ std::count_if(stack->begin(), stack->end(), [&](Handle<Module> m) {
+ return *m == *requested_module;
+ }));
+
+ // iii. If requiredModule.[[Status]] is "evaluating", then
+ if (required_module->status() == kEvaluating) {
+ // 1. Set module.[[DFSAncestorIndex]] to
+ // min(
+ // module.[[DFSAncestorIndex]],
+ // requiredModule.[[DFSAncestorIndex]]).
+ module->set_dfs_ancestor_index(
+ std::min(module->dfs_ancestor_index(),
+ required_module->dfs_ancestor_index()));
+ } else {
+ // iv. Otherwise,
+ // 1. Set requiredModule to GetAsyncCycleRoot(requiredModule).
+ required_module = GetAsyncCycleRoot(isolate, required_module);
+
+ // 2. Assert: requiredModule.[[Status]] is "evaluated".
+ CHECK_GE(required_module->status(), kEvaluated);
+
+ // 3. If requiredModule.[[EvaluationError]] is not undefined,
+ // return module.[[EvaluationError]].
+ // (If there was an exception on the original required module
+ // we would have already returned. This check handles the case
+ // where the AsyncCycleRoot has an error. Instead of returning
+ // the exception, we throw on isolate and return a
+ // MaybeHandle<Object>)
+ if (required_module->status() == kErrored) {
+ isolate->Throw(required_module->exception());
+ return MaybeHandle<Object>();
+ }
+ }
+ // v. If requiredModule.[[AsyncEvaluating]] is true, then
+ if (required_module->async_evaluating()) {
+ // 1. Set module.[[PendingAsyncDependencies]] to
+ // module.[[PendingAsyncDependencies]] + 1.
+ module->IncrementPendingAsyncDependencies();
+
+ // 2. Append module to requiredModule.[[AsyncParentModules]].
+ required_module->AddAsyncParentModule(isolate, module);
+ }
+ } else {
+ RETURN_ON_EXCEPTION(isolate, Module::Evaluate(isolate, requested_module),
+ Object);
}
}
- // Evaluation of module body.
- Handle<JSFunction> resume(
- isolate->native_context()->generator_next_internal(), isolate);
- Handle<Object> result;
- ASSIGN_RETURN_ON_EXCEPTION(
- isolate, result, Execution::Call(isolate, resume, generator, 0, nullptr),
- Object);
- DCHECK(JSIteratorResult::cast(*result).done().BooleanValue(isolate));
+ // The spec returns the module index for proper numbering of dependencies.
+ // However, we pass the module index by pointer instead.
+ //
+ // Before async modules v8 returned the value result from calling next
+ // on the module's implicit iterator. We preserve this behavior for
+ // synchronous modules, but return undefined for AsyncModules.
+ Handle<Object> result = isolate->factory()->undefined_value();
+
+ // 14. If module.[[PendingAsyncDependencies]] is > 0, set
+ // module.[[AsyncEvaluating]] to true.
+ if (module->HasPendingAsyncDependencies()) {
+ module->set_async_evaluating(true);
+ } else if (module->async()) {
+ // 15. Otherwise, if module.[[Async]] is true,
+ // perform ! ExecuteAsyncModule(module).
+ SourceTextModule::ExecuteAsyncModule(isolate, module);
+ } else {
+ // 16. Otherwise, perform ? module.ExecuteModule().
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, result, ExecuteModule(isolate, module),
+ Object);
+ }
CHECK(MaybeTransitionComponent(isolate, module, stack, kEvaluated));
- return handle(JSIteratorResult::cast(*result).value(), isolate);
+ return result;
+}
+
+Handle<SourceTextModule> SourceTextModule::GetAsyncCycleRoot(
+ Isolate* isolate, Handle<SourceTextModule> module) {
+ // 1. Assert: module.[[Status]] is "evaluated".
+ CHECK_GE(module->status(), kEvaluated);
+
+ // 2. If module.[[AsyncParentModules]] is an empty List, return module.
+ if (module->AsyncParentModuleCount() == 0) {
+ return module;
+ }
+
+ // 3. Repeat, while module.[[DFSIndex]] is greater than
+ // module.[[DFSAncestorIndex]],
+ while (module->dfs_index() > module->dfs_ancestor_index()) {
+ // a. Assert: module.[[AsyncParentModules]] is a non-empty List.
+ DCHECK_GT(module->AsyncParentModuleCount(), 0);
+
+ // b. Let nextCycleModule be the first element of
+ // module.[[AsyncParentModules]].
+ Handle<SourceTextModule> next_cycle_module =
+ module->GetAsyncParentModule(isolate, 0);
+
+ // c. Assert: nextCycleModule.[[DFSAncestorIndex]] is less than or equal
+ // to module.[[DFSAncestorIndex]].
+ DCHECK_LE(next_cycle_module->dfs_ancestor_index(),
+ module->dfs_ancestor_index());
+
+ // d. Set module to nextCycleModule
+ module = next_cycle_module;
+ }
+
+ // 4. Assert: module.[[DFSIndex]] is equal to module.[[DFSAncestorIndex]].
+ DCHECK_EQ(module->dfs_index(), module->dfs_ancestor_index());
+
+ // 5. Return module.
+ return module;
}
void SourceTextModule::Reset(Isolate* isolate,
diff --git a/deps/v8/src/objects/source-text-module.h b/deps/v8/src/objects/source-text-module.h
index e6cf260e10..f1387635d0 100644
--- a/deps/v8/src/objects/source-text-module.h
+++ b/deps/v8/src/objects/source-text-module.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_SOURCE_TEXT_MODULE_H_
#include "src/objects/module.h"
+#include "src/objects/promise.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -28,6 +29,10 @@ class SourceTextModule
// kErrored.
SharedFunctionInfo GetSharedFunctionInfo() const;
+ // Whether or not this module is an async module. Set during module creation
+ // and does not change afterwards.
+ DECL_BOOLEAN_ACCESSORS(async)
+
// Get the SourceTextModuleInfo associated with the code.
inline SourceTextModuleInfo info() const;
@@ -41,6 +46,14 @@ class SourceTextModule
static int ImportIndex(int cell_index);
static int ExportIndex(int cell_index);
+ // Used by builtins to fulfill or reject the promise associated
+ // with async SourceTextModules.
+ static void AsyncModuleExecutionFulfilled(Isolate* isolate,
+ Handle<SourceTextModule> module);
+ static void AsyncModuleExecutionRejected(Isolate* isolate,
+ Handle<SourceTextModule> module,
+ Handle<Object> exception);
+
// Get the namespace object for [module_request] of [module]. If it doesn't
// exist yet, it is created.
static Handle<JSModuleNamespace> GetModuleNamespace(
@@ -54,12 +67,54 @@ class SourceTextModule
friend class Factory;
friend class Module;
+ // Appends a tuple of module and generator to the async parent modules
+ // ArrayList.
+ inline void AddAsyncParentModule(Isolate* isolate,
+ Handle<SourceTextModule> module);
+
+ // Returns a SourceTextModule, the
+ // ith parent in depth first traversal order of a given async child.
+ inline Handle<SourceTextModule> GetAsyncParentModule(Isolate* isolate,
+ int index);
+
+ // Returns the number of async parent modules for a given async child.
+ inline int AsyncParentModuleCount();
+
+ inline bool HasPendingAsyncDependencies();
+ inline void IncrementPendingAsyncDependencies();
+ inline void DecrementPendingAsyncDependencies();
+
// TODO(neis): Don't store those in the module object?
DECL_INT_ACCESSORS(dfs_index)
DECL_INT_ACCESSORS(dfs_ancestor_index)
- // Helpers for Instantiate and Evaluate.
+ // Storage for boolean flags.
+ DECL_INT_ACCESSORS(flags)
+
+ // Bits for flags.
+ static const int kAsyncBit = 0;
+ static const int kAsyncEvaluatingBit = 1;
+
+ // async_evaluating, top_level_capability, pending_async_dependencies, and
+ // async_parent_modules are used exclusively during evaluation of async
+ // modules and the modules which depend on them.
+ //
+ // Whether or not this module is async and evaluating or currently evaluating
+ // an async child.
+ DECL_BOOLEAN_ACCESSORS(async_evaluating)
+
+ // The top level promise capability of this module. Will only be defined
+ // for cycle roots.
+ DECL_ACCESSORS(top_level_capability, HeapObject)
+
+ // The number of currently evaluating async dependencies of this module.
+ DECL_INT_ACCESSORS(pending_async_dependencies)
+
+ // The parent modules of a given async dependency, use async_parent_modules()
+ // to retrieve the ArrayList representation.
+ DECL_ACCESSORS(async_parent_modules, ArrayList)
+ // Helpers for Instantiate and Evaluate.
static void CreateExport(Isolate* isolate, Handle<SourceTextModule> module,
int cell_index, Handle<FixedArray> names);
static void CreateIndirectExport(Isolate* isolate,
@@ -95,7 +150,16 @@ class SourceTextModule
Handle<SourceTextModule> module, Zone* zone,
UnorderedModuleSet* visited);
+ // Implementation of spec concrete method Evaluate.
+ static V8_WARN_UNUSED_RESULT MaybeHandle<Object> EvaluateMaybeAsync(
+ Isolate* isolate, Handle<SourceTextModule> module);
+
+ // Continued implementation of spec concrete method Evaluate.
static V8_WARN_UNUSED_RESULT MaybeHandle<Object> Evaluate(
+ Isolate* isolate, Handle<SourceTextModule> module);
+
+ // Implementation of spec abstract operation InnerModuleEvaluation.
+ static V8_WARN_UNUSED_RESULT MaybeHandle<Object> InnerModuleEvaluation(
Isolate* isolate, Handle<SourceTextModule> module,
ZoneForwardList<Handle<SourceTextModule>>* stack, unsigned* dfs_index);
@@ -103,6 +167,24 @@ class SourceTextModule
Isolate* isolate, Handle<SourceTextModule> module,
ZoneForwardList<Handle<SourceTextModule>>* stack, Status new_status);
+ // Implementation of spec GetAsyncCycleRoot.
+ static V8_WARN_UNUSED_RESULT Handle<SourceTextModule> GetAsyncCycleRoot(
+ Isolate* isolate, Handle<SourceTextModule> module);
+
+ // Implementation of spec ExecuteModule is broken up into
+ // InnerExecuteAsyncModule for asynchronous modules and ExecuteModule
+ // for synchronous modules.
+ static V8_WARN_UNUSED_RESULT MaybeHandle<Object> InnerExecuteAsyncModule(
+ Isolate* isolate, Handle<SourceTextModule> module,
+ Handle<JSPromise> capability);
+
+ static V8_WARN_UNUSED_RESULT MaybeHandle<Object> ExecuteModule(
+ Isolate* isolate, Handle<SourceTextModule> module);
+
+ // Implementation of spec ExecuteAsyncModule.
+ static void ExecuteAsyncModule(Isolate* isolate,
+ Handle<SourceTextModule> module);
+
static void Reset(Isolate* isolate, Handle<SourceTextModule> module);
TQ_OBJECT_CONSTRUCTORS(SourceTextModule)
@@ -169,9 +251,10 @@ class SourceTextModuleInfoEntry
DECL_INT_ACCESSORS(end_pos)
static Handle<SourceTextModuleInfoEntry> New(
- Isolate* isolate, Handle<HeapObject> export_name,
- Handle<HeapObject> local_name, Handle<HeapObject> import_name,
- int module_request, int cell_index, int beg_pos, int end_pos);
+ Isolate* isolate, Handle<PrimitiveHeapObject> export_name,
+ Handle<PrimitiveHeapObject> local_name,
+ Handle<PrimitiveHeapObject> import_name, int module_request,
+ int cell_index, int beg_pos, int end_pos);
TQ_OBJECT_CONSTRUCTORS(SourceTextModuleInfoEntry)
};
diff --git a/deps/v8/src/objects/stack-frame-info.cc b/deps/v8/src/objects/stack-frame-info.cc
index 323c4b8fcb..040c6f7b32 100644
--- a/deps/v8/src/objects/stack-frame-info.cc
+++ b/deps/v8/src/objects/stack-frame-info.cc
@@ -299,10 +299,8 @@ void AppendMethodCall(Isolate* isolate, Handle<StackTraceFrame> frame,
}
}
-void SerializeJSStackFrame(
- Isolate* isolate, Handle<StackTraceFrame> frame,
- IncrementalStringBuilder& builder // NOLINT(runtime/references)
-) {
+void SerializeJSStackFrame(Isolate* isolate, Handle<StackTraceFrame> frame,
+ IncrementalStringBuilder* builder) {
Handle<Object> function_name = StackTraceFrame::GetFunctionName(frame);
const bool is_toplevel = StackTraceFrame::IsToplevel(frame);
@@ -316,96 +314,91 @@ void SerializeJSStackFrame(
const bool is_method_call = !(is_toplevel || is_constructor);
if (is_async) {
- builder.AppendCString("async ");
+ builder->AppendCString("async ");
}
if (is_promise_all) {
- builder.AppendCString("Promise.all (index ");
- builder.AppendInt(StackTraceFrame::GetPromiseAllIndex(frame));
- builder.AppendCString(")");
+ builder->AppendCString("Promise.all (index ");
+ builder->AppendInt(StackTraceFrame::GetPromiseAllIndex(frame));
+ builder->AppendCString(")");
return;
}
if (is_method_call) {
- AppendMethodCall(isolate, frame, &builder);
+ AppendMethodCall(isolate, frame, builder);
} else if (is_constructor) {
- builder.AppendCString("new ");
+ builder->AppendCString("new ");
if (IsNonEmptyString(function_name)) {
- builder.AppendString(Handle<String>::cast(function_name));
+ builder->AppendString(Handle<String>::cast(function_name));
} else {
- builder.AppendCString("<anonymous>");
+ builder->AppendCString("<anonymous>");
}
} else if (IsNonEmptyString(function_name)) {
- builder.AppendString(Handle<String>::cast(function_name));
+ builder->AppendString(Handle<String>::cast(function_name));
} else {
- AppendFileLocation(isolate, frame, &builder);
+ AppendFileLocation(isolate, frame, builder);
return;
}
- builder.AppendCString(" (");
- AppendFileLocation(isolate, frame, &builder);
- builder.AppendCString(")");
+ builder->AppendCString(" (");
+ AppendFileLocation(isolate, frame, builder);
+ builder->AppendCString(")");
}
-void SerializeAsmJsWasmStackFrame(
- Isolate* isolate, Handle<StackTraceFrame> frame,
- IncrementalStringBuilder& builder // NOLINT(runtime/references)
-) {
+void SerializeAsmJsWasmStackFrame(Isolate* isolate,
+ Handle<StackTraceFrame> frame,
+ IncrementalStringBuilder* builder) {
// The string should look exactly as the respective javascript frame string.
// Keep this method in line to
// JSStackFrame::ToString(IncrementalStringBuilder&).
Handle<Object> function_name = StackTraceFrame::GetFunctionName(frame);
if (IsNonEmptyString(function_name)) {
- builder.AppendString(Handle<String>::cast(function_name));
- builder.AppendCString(" (");
+ builder->AppendString(Handle<String>::cast(function_name));
+ builder->AppendCString(" (");
}
- AppendFileLocation(isolate, frame, &builder);
+ AppendFileLocation(isolate, frame, builder);
- if (IsNonEmptyString(function_name)) builder.AppendCString(")");
+ if (IsNonEmptyString(function_name)) builder->AppendCString(")");
return;
}
-void SerializeWasmStackFrame(
- Isolate* isolate, Handle<StackTraceFrame> frame,
- IncrementalStringBuilder& builder // NOLINT(runtime/references)
-) {
+void SerializeWasmStackFrame(Isolate* isolate, Handle<StackTraceFrame> frame,
+ IncrementalStringBuilder* builder) {
Handle<Object> module_name = StackTraceFrame::GetWasmModuleName(frame);
Handle<Object> function_name = StackTraceFrame::GetFunctionName(frame);
const bool has_name = !module_name->IsNull() || !function_name->IsNull();
if (has_name) {
if (module_name->IsNull()) {
- builder.AppendString(Handle<String>::cast(function_name));
+ builder->AppendString(Handle<String>::cast(function_name));
} else {
- builder.AppendString(Handle<String>::cast(module_name));
+ builder->AppendString(Handle<String>::cast(module_name));
if (!function_name->IsNull()) {
- builder.AppendCString(".");
- builder.AppendString(Handle<String>::cast(function_name));
+ builder->AppendCString(".");
+ builder->AppendString(Handle<String>::cast(function_name));
}
}
- builder.AppendCString(" (");
+ builder->AppendCString(" (");
}
const int wasm_func_index = StackTraceFrame::GetLineNumber(frame);
- builder.AppendCString("wasm-function[");
- builder.AppendInt(wasm_func_index);
- builder.AppendCString("]:");
+ builder->AppendCString("wasm-function[");
+ builder->AppendInt(wasm_func_index);
+ builder->AppendCString("]:");
char buffer[16];
SNPrintF(ArrayVector(buffer), "0x%x",
StackTraceFrame::GetColumnNumber(frame));
- builder.AppendCString(buffer);
+ builder->AppendCString(buffer);
- if (has_name) builder.AppendCString(")");
+ if (has_name) builder->AppendCString(")");
}
} // namespace
-void SerializeStackTraceFrame(
- Isolate* isolate, Handle<StackTraceFrame> frame,
- IncrementalStringBuilder& builder // NOLINT(runtime/references)
-) {
+void SerializeStackTraceFrame(Isolate* isolate, Handle<StackTraceFrame> frame,
+ IncrementalStringBuilder* builder) {
// Ordering here is important, as AsmJs frames are also marked as Wasm.
if (StackTraceFrame::IsAsmJsWasm(frame)) {
SerializeAsmJsWasmStackFrame(isolate, frame, builder);
@@ -419,7 +412,7 @@ void SerializeStackTraceFrame(
MaybeHandle<String> SerializeStackTraceFrame(Isolate* isolate,
Handle<StackTraceFrame> frame) {
IncrementalStringBuilder builder(isolate);
- SerializeStackTraceFrame(isolate, frame, builder);
+ SerializeStackTraceFrame(isolate, frame, &builder);
return builder.Finish();
}
diff --git a/deps/v8/src/objects/stack-frame-info.h b/deps/v8/src/objects/stack-frame-info.h
index 7c4918a3c6..54b64b6118 100644
--- a/deps/v8/src/objects/stack-frame-info.h
+++ b/deps/v8/src/objects/stack-frame-info.h
@@ -124,10 +124,8 @@ Handle<FrameArray> GetFrameArrayFromStackTrace(Isolate* isolate,
Handle<FixedArray> stack_trace);
class IncrementalStringBuilder;
-void SerializeStackTraceFrame(
- Isolate* isolate, Handle<StackTraceFrame> frame,
- IncrementalStringBuilder& builder // NOLINT(runtime/references)
-);
+void SerializeStackTraceFrame(Isolate* isolate, Handle<StackTraceFrame> frame,
+ IncrementalStringBuilder* builder);
MaybeHandle<String> SerializeStackTraceFrame(Isolate* isolate,
Handle<StackTraceFrame> frame);
diff --git a/deps/v8/src/objects/string-inl.h b/deps/v8/src/objects/string-inl.h
index 083928d211..b4aea68cb1 100644
--- a/deps/v8/src/objects/string-inl.h
+++ b/deps/v8/src/objects/string-inl.h
@@ -778,6 +778,14 @@ bool String::AsArrayIndex(uint32_t* index) {
return SlowAsArrayIndex(index);
}
+bool String::AsIntegerIndex(size_t* index) {
+ uint32_t field = hash_field();
+ if (IsHashFieldComputed(field) && (field & kIsNotIntegerIndexMask)) {
+ return false;
+ }
+ return SlowAsIntegerIndex(index);
+}
+
SubStringRange::SubStringRange(String string,
const DisallowHeapAllocation& no_gc, int first,
int length)
diff --git a/deps/v8/src/objects/string.cc b/deps/v8/src/objects/string.cc
index 41de3aef04..a1eb7f4310 100644
--- a/deps/v8/src/objects/string.cc
+++ b/deps/v8/src/objects/string.cc
@@ -113,7 +113,10 @@ void String::MakeThin(Isolate* isolate, String internalized) {
bool has_pointers = StringShape(*this).IsIndirect();
int old_size = this->Size();
- isolate->heap()->NotifyObjectLayoutChange(*this, old_size, no_gc);
+ // Slot invalidation is not necessary here: ThinString only stores tagged
+ // value, so it can't store an untagged value in a recorded slot.
+ isolate->heap()->NotifyObjectLayoutChange(*this, no_gc,
+ InvalidateRecordedSlots::kNo);
bool one_byte = internalized.IsOneByteRepresentation();
Handle<Map> map = one_byte ? isolate->factory()->thin_one_byte_string_map()
: isolate->factory()->thin_string_map();
@@ -158,7 +161,8 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
- isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
+ isolate->heap()->NotifyObjectLayoutChange(*this, no_allocation,
+ InvalidateRecordedSlots::kYes);
}
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
@@ -184,10 +188,6 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size,
has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo);
- if (has_pointers) {
- isolate->heap()->ClearRecordedSlotRange(this->address(),
- this->address() + new_size);
- }
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
@@ -232,7 +232,8 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
bool has_pointers = StringShape(*this).IsIndirect();
if (has_pointers) {
- isolate->heap()->NotifyObjectLayoutChange(*this, size, no_allocation);
+ isolate->heap()->NotifyObjectLayoutChange(*this, no_allocation,
+ InvalidateRecordedSlots::kYes);
}
// Morph the string to an external string by replacing the map and
// reinitializing the fields. This won't work if the space the existing
@@ -257,10 +258,6 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
isolate->heap()->CreateFillerObjectAt(
this->address() + new_size, size - new_size,
has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo);
- if (has_pointers) {
- isolate->heap()->ClearRecordedSlotRange(this->address(),
- this->address() + new_size);
- }
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
@@ -598,9 +595,8 @@ void String::WriteToFlat(String src, sinkchar* sink, int f, int t) {
String source = src;
int from = f;
int to = t;
- while (true) {
+ while (from < to) {
DCHECK_LE(0, from);
- DCHECK_LE(from, to);
DCHECK_LE(to, source.length());
switch (StringShape(source).full_representation_tag()) {
case kOneByteStringTag | kExternalStringTag: {
@@ -678,6 +674,7 @@ void String::WriteToFlat(String src, sinkchar* sink, int f, int t) {
break;
}
}
+ DCHECK_EQ(from, to);
}
template <typename SourceChar>
@@ -1358,25 +1355,39 @@ uint32_t String::ComputeAndSetHash() {
return result;
}
-bool String::ComputeArrayIndex(uint32_t* index) {
+bool String::SlowAsArrayIndex(uint32_t* index) {
+ DisallowHeapAllocation no_gc;
int length = this->length();
+ if (length <= kMaxCachedArrayIndexLength) {
+ Hash(); // Force computation of hash code.
+ uint32_t field = hash_field();
+ if ((field & kIsNotArrayIndexMask) != 0) return false;
+ *index = ArrayIndexValueBits::decode(field);
+ return true;
+ }
if (length == 0 || length > kMaxArrayIndexSize) return false;
StringCharacterStream stream(*this);
return StringToArrayIndex(&stream, index);
}
-bool String::SlowAsArrayIndex(uint32_t* index) {
+bool String::SlowAsIntegerIndex(size_t* index) {
DisallowHeapAllocation no_gc;
- if (length() <= kMaxCachedArrayIndexLength) {
- Hash(); // force computation of hash code
+ int length = this->length();
+ if (length <= kMaxCachedArrayIndexLength) {
+ Hash(); // Force computation of hash code.
uint32_t field = hash_field();
- if ((field & kIsNotArrayIndexMask) != 0) return false;
- // Isolate the array index form the full hash field.
+ if ((field & kIsNotArrayIndexMask) != 0) {
+ // If it was short but it's not an array index, then it can't be an
+ // integer index either.
+ DCHECK_NE(0, field & kIsNotIntegerIndexMask);
+ return false;
+ }
*index = ArrayIndexValueBits::decode(field);
return true;
- } else {
- return ComputeArrayIndex(index);
}
+ if (length == 0 || length > kMaxIntegerIndexSize) return false;
+ StringCharacterStream stream(*this);
+ return StringToArrayIndex(&stream, index);
}
void String::PrintOn(FILE* file) {
diff --git a/deps/v8/src/objects/string.h b/deps/v8/src/objects/string.h
index 27bd7e8765..fcdf75a968 100644
--- a/deps/v8/src/objects/string.h
+++ b/deps/v8/src/objects/string.h
@@ -5,6 +5,8 @@
#ifndef V8_OBJECTS_STRING_H_
#define V8_OBJECTS_STRING_H_
+#include <memory>
+
#include "src/base/bits.h"
#include "src/base/export-template.h"
#include "src/objects/instance-type.h"
@@ -306,8 +308,6 @@ class String : public TorqueGeneratedString<String, Name> {
RobustnessFlag robustness_flag = FAST_STRING_TRAVERSAL,
int* length_output = nullptr);
- bool ComputeArrayIndex(uint32_t* index);
-
// Externalization.
V8_EXPORT_PRIVATE bool MakeExternal(
v8::String::ExternalStringResource* resource);
@@ -316,8 +316,12 @@ class String : public TorqueGeneratedString<String, Name> {
bool SupportsExternalization();
// Conversion.
+ // "array index": an index allowed by the ES spec for JSArrays.
inline bool AsArrayIndex(uint32_t* index);
uint32_t inline ToValidIndex(Object number);
+ // "integer index": the string is the decimal representation of an
+ // integer in the range of a size_t. Useful for TypedArray accesses.
+ inline bool AsIntegerIndex(size_t* index);
// Trimming.
enum TrimMode { kTrim, kTrimStart, kTrimEnd };
@@ -448,6 +452,7 @@ class String : public TorqueGeneratedString<String, Name> {
// Slow case of AsArrayIndex.
V8_EXPORT_PRIVATE bool SlowAsArrayIndex(uint32_t* index);
+ V8_EXPORT_PRIVATE bool SlowAsIntegerIndex(size_t* index);
// Compute and set the hash code.
V8_EXPORT_PRIVATE uint32_t ComputeAndSetHash();
diff --git a/deps/v8/src/objects/struct-inl.h b/deps/v8/src/objects/struct-inl.h
index af0fed126b..34de889786 100644
--- a/deps/v8/src/objects/struct-inl.h
+++ b/deps/v8/src/objects/struct-inl.h
@@ -22,12 +22,10 @@ namespace internal {
TQ_OBJECT_CONSTRUCTORS_IMPL(Struct)
TQ_OBJECT_CONSTRUCTORS_IMPL(Tuple2)
TQ_OBJECT_CONSTRUCTORS_IMPL(Tuple3)
-OBJECT_CONSTRUCTORS_IMPL(AccessorPair, Struct)
+TQ_OBJECT_CONSTRUCTORS_IMPL(AccessorPair)
TQ_OBJECT_CONSTRUCTORS_IMPL(ClassPositions)
-CAST_ACCESSOR(AccessorPair)
-
void Struct::InitializeBody(int object_size) {
Object value = GetReadOnlyRoots().undefined_value();
for (int offset = kHeaderSize; offset < object_size; offset += kTaggedSize) {
@@ -35,9 +33,6 @@ void Struct::InitializeBody(int object_size) {
}
}
-ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
-ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
-
TQ_SMI_ACCESSORS(ClassPositions, start)
TQ_SMI_ACCESSORS(ClassPositions, end)
diff --git a/deps/v8/src/objects/struct.h b/deps/v8/src/objects/struct.h
index c9372d9ada..f786c4711a 100644
--- a/deps/v8/src/objects/struct.h
+++ b/deps/v8/src/objects/struct.h
@@ -16,12 +16,13 @@ namespace v8 {
namespace internal {
// An abstract superclass, a marker class really, for simple structure classes.
-// It doesn't carry much functionality but allows struct classes to be
+// It doesn't carry any functionality but allows struct classes to be
// identified in the type system.
class Struct : public TorqueGeneratedStruct<Struct, HeapObject> {
public:
inline void InitializeBody(int object_size);
void BriefPrintDetails(std::ostream& os);
+ STATIC_ASSERT(kHeaderSize == HeapObject::kHeaderSize);
TQ_OBJECT_CONSTRUCTORS(Struct)
};
@@ -46,13 +47,8 @@ class Tuple3 : public TorqueGeneratedTuple3<Tuple3, Tuple2> {
// * a FunctionTemplateInfo: a real (lazy) accessor
// * undefined: considered an accessor by the spec, too, strangely enough
// * null: an accessor which has not been set
-class AccessorPair : public Struct {
+class AccessorPair : public TorqueGeneratedAccessorPair<AccessorPair, Struct> {
public:
- DECL_ACCESSORS(getter, Object)
- DECL_ACCESSORS(setter, Object)
-
- DECL_CAST(AccessorPair)
-
static Handle<AccessorPair> Copy(Isolate* isolate, Handle<AccessorPair> pair);
inline Object get(AccessorComponent component);
@@ -71,13 +67,8 @@ class AccessorPair : public Struct {
// Dispatched behavior.
DECL_PRINTER(AccessorPair)
- DECL_VERIFIER(AccessorPair)
-
- // Layout description.
- DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
- TORQUE_GENERATED_ACCESSOR_PAIR_FIELDS)
- OBJECT_CONSTRUCTORS(AccessorPair, Struct);
+ TQ_OBJECT_CONSTRUCTORS(AccessorPair)
};
class ClassPositions
diff --git a/deps/v8/src/objects/synthetic-module.cc b/deps/v8/src/objects/synthetic-module.cc
index 850721ac99..58e0c1b58c 100644
--- a/deps/v8/src/objects/synthetic-module.cc
+++ b/deps/v8/src/objects/synthetic-module.cc
@@ -17,36 +17,16 @@ namespace internal {
// Implements SetSyntheticModuleBinding:
// https://heycam.github.io/webidl/#setsyntheticmoduleexport
-Maybe<bool> SyntheticModule::SetExport(Isolate* isolate,
- Handle<SyntheticModule> module,
- Handle<String> export_name,
- Handle<Object> export_value) {
+void SyntheticModule::SetExport(Isolate* isolate,
+ Handle<SyntheticModule> module,
+ Handle<String> export_name,
+ Handle<Object> export_value) {
Handle<ObjectHashTable> exports(module->exports(), isolate);
Handle<Object> export_object(exports->Lookup(export_name), isolate);
-
- if (!export_object->IsCell()) {
- isolate->Throw(*isolate->factory()->NewReferenceError(
- MessageTemplate::kModuleExportUndefined, export_name));
- return Nothing<bool>();
- }
-
+ CHECK(export_object->IsCell());
Handle<Cell> export_cell(Handle<Cell>::cast(export_object));
// Spec step 2: Set the mutable binding of export_name to export_value
export_cell->set_value(*export_value);
-
- return Just(true);
-}
-
-void SyntheticModule::SetExportStrict(Isolate* isolate,
- Handle<SyntheticModule> module,
- Handle<String> export_name,
- Handle<Object> export_value) {
- Handle<ObjectHashTable> exports(module->exports(), isolate);
- Handle<Object> export_object(exports->Lookup(export_name), isolate);
- CHECK(export_object->IsCell());
- Maybe<bool> set_export_result =
- SetExport(isolate, module, export_name, export_value);
- CHECK(set_export_result.FromJust());
}
// Implements Synthetic Module Record's ResolveExport concrete method:
@@ -116,7 +96,7 @@ MaybeHandle<Object> SyntheticModule::Evaluate(Isolate* isolate,
Utils::ToLocal(Handle<Module>::cast(module)))
.ToLocal(&result)) {
isolate->PromoteScheduledException();
- module->RecordError(isolate);
+ module->RecordErrorUsingPendingException(isolate);
return MaybeHandle<Object>();
}
diff --git a/deps/v8/src/objects/synthetic-module.h b/deps/v8/src/objects/synthetic-module.h
index 77a6eed276..6f3bb0438e 100644
--- a/deps/v8/src/objects/synthetic-module.h
+++ b/deps/v8/src/objects/synthetic-module.h
@@ -24,21 +24,9 @@ class SyntheticModule
DECL_VERIFIER(SyntheticModule)
DECL_PRINTER(SyntheticModule)
- // Set module's exported value for the specified export_name to the specified
- // export_value. An error will be thrown if export_name is not one
- // of the export_names that were supplied during module construction.
- // Returns Just(true) on success, Nothing<bool>() if an error was thrown.
- static Maybe<bool> SetExport(Isolate* isolate, Handle<SyntheticModule> module,
- Handle<String> export_name,
- Handle<Object> export_value);
- // The following redundant method should be deleted when the deprecated
- // version of v8::SetSyntheticModuleExport is removed. It differs from
- // SetExport in that it crashes rather than throwing an error if the caller
- // attempts to set an export_name that was not present during construction of
- // the module.
- static void SetExportStrict(Isolate* isolate, Handle<SyntheticModule> module,
- Handle<String> export_name,
- Handle<Object> export_value);
+ static void SetExport(Isolate* isolate, Handle<SyntheticModule> module,
+ Handle<String> export_name,
+ Handle<Object> export_value);
using BodyDescriptor = SubclassBodyDescriptor<
Module::BodyDescriptor,
diff --git a/deps/v8/src/objects/transitions-inl.h b/deps/v8/src/objects/transitions-inl.h
index 048774f49b..5694d66d94 100644
--- a/deps/v8/src/objects/transitions-inl.h
+++ b/deps/v8/src/objects/transitions-inl.h
@@ -64,6 +64,10 @@ Name TransitionArray::GetKey(int transition_number) {
Get(ToKeyIndex(transition_number))->GetHeapObjectAssumeStrong());
}
+Name TransitionArray::GetKey(InternalIndex index) {
+ return GetKey(index.as_int());
+}
+
Name TransitionsAccessor::GetKey(int transition_number) {
switch (encoding()) {
case kPrototypeInfo:
@@ -95,7 +99,7 @@ HeapObjectSlot TransitionArray::GetTargetSlot(int transition_number) {
// static
PropertyDetails TransitionsAccessor::GetTargetDetails(Name name, Map target) {
DCHECK(!IsSpecialTransition(name.GetReadOnlyRoots(), name));
- int descriptor = target.LastAdded();
+ InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
// Transitions are allowed only for the last added property.
DCHECK(descriptors.GetKey(descriptor).Equals(name));
@@ -108,7 +112,7 @@ PropertyDetails TransitionsAccessor::GetSimpleTargetDetails(Map transition) {
// static
Name TransitionsAccessor::GetSimpleTransitionKey(Map transition) {
- int descriptor = transition.LastAdded();
+ InternalIndex descriptor = transition.LastAdded();
return transition.instance_descriptors().GetKey(descriptor);
}
diff --git a/deps/v8/src/objects/transitions.cc b/deps/v8/src/objects/transitions.cc
index 843b790b7d..e0ba40ce7d 100644
--- a/deps/v8/src/objects/transitions.cc
+++ b/deps/v8/src/objects/transitions.cc
@@ -247,7 +247,7 @@ bool TransitionsAccessor::CanHaveMoreTransitions() {
bool TransitionsAccessor::IsMatchingMap(Map target, Name name,
PropertyKind kind,
PropertyAttributes attributes) {
- int descriptor = target.LastAdded();
+ InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
Name key = descriptors.GetKey(descriptor);
if (key != name) return false;
@@ -296,8 +296,7 @@ Handle<WeakFixedArray> TransitionArray::GrowPrototypeTransitionArray(
new_capacity = Min(kMaxCachedPrototypeTransitions, new_capacity);
DCHECK_GT(new_capacity, capacity);
int grow_by = new_capacity - capacity;
- array = isolate->factory()->CopyWeakFixedArrayAndGrow(array, grow_by,
- AllocationType::kOld);
+ array = isolate->factory()->CopyWeakFixedArrayAndGrow(array, grow_by);
if (capacity < 0) {
// There was no prototype transitions array before, so the size
// couldn't be copied. Initialize it explicitly.
diff --git a/deps/v8/src/objects/transitions.h b/deps/v8/src/objects/transitions.h
index f21e8cd54e..5a7db13e51 100644
--- a/deps/v8/src/objects/transitions.h
+++ b/deps/v8/src/objects/transitions.h
@@ -221,6 +221,7 @@ class TransitionArray : public WeakFixedArray {
Map* target);
// Required for templatized Search interface.
+ inline Name GetKey(InternalIndex index);
static constexpr int kNotFound = -1;
inline Name GetSortedKey(int transition_number);
diff --git a/deps/v8/src/objects/value-serializer.cc b/deps/v8/src/objects/value-serializer.cc
index 3b3506fbb9..af5cdc57ea 100644
--- a/deps/v8/src/objects/value-serializer.cc
+++ b/deps/v8/src/objects/value-serializer.cc
@@ -52,8 +52,6 @@ static const uint32_t kLatestVersion = 13;
static_assert(kLatestVersion == v8::CurrentValueSerializerFormatVersion(),
"Exported format version must match latest version.");
-static const int kPretenureThreshold = 100 * KB;
-
template <typename T>
static size_t BytesNeededForVarint(T value) {
static_assert(std::is_integral<T>::value && std::is_unsigned<T>::value,
@@ -554,7 +552,7 @@ Maybe<bool> ValueSerializer::WriteJSReceiver(Handle<JSReceiver> receiver) {
case JS_PRIMITIVE_WRAPPER_TYPE:
return WriteJSPrimitiveWrapper(
Handle<JSPrimitiveWrapper>::cast(receiver));
- case JS_REGEXP_TYPE:
+ case JS_REG_EXP_TYPE:
WriteJSRegExp(JSRegExp::cast(*receiver));
return ThrowIfOutOfMemory();
case JS_MAP_TYPE:
@@ -568,7 +566,7 @@ Maybe<bool> ValueSerializer::WriteJSReceiver(Handle<JSReceiver> receiver) {
return WriteJSArrayBufferView(JSArrayBufferView::cast(*receiver));
case JS_ERROR_TYPE:
return WriteJSError(Handle<JSObject>::cast(receiver));
- case WASM_MODULE_TYPE: {
+ case WASM_MODULE_OBJECT_TYPE: {
auto enabled_features = wasm::WasmFeaturesFromIsolate(isolate_);
if (!FLAG_wasm_disable_structured_cloning || enabled_features.threads) {
// Only write WebAssembly modules if not disabled by a flag.
@@ -576,7 +574,7 @@ Maybe<bool> ValueSerializer::WriteJSReceiver(Handle<JSReceiver> receiver) {
}
break;
}
- case WASM_MEMORY_TYPE: {
+ case WASM_MEMORY_OBJECT_TYPE: {
auto enabled_features = wasm::WasmFeaturesFromIsolate(isolate_);
if (enabled_features.threads) {
return WriteWasmMemory(Handle<WasmMemoryObject>::cast(receiver));
@@ -604,7 +602,7 @@ Maybe<bool> ValueSerializer::WriteJSObject(Handle<JSObject> object) {
// map doesn't change.
uint32_t properties_written = 0;
bool map_changed = false;
- for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
+ for (InternalIndex i : map->IterateOwnDescriptors()) {
Handle<Name> key(map->instance_descriptors().GetKey(i), isolate_);
if (!key->IsString()) continue;
PropertyDetails details = map->instance_descriptors().GetDetails(i);
@@ -1025,8 +1023,8 @@ Maybe<bool> ValueSerializer::WriteWasmMemory(Handle<WasmMemoryObject> object) {
return Nothing<bool>();
}
- isolate_->wasm_engine()->memory_tracker()->RegisterWasmMemoryAsShared(
- object, isolate_);
+ GlobalBackingStoreRegistry::Register(
+ object->array_buffer().GetBackingStore());
WriteTag(SerializationTag::kWasmMemoryTransfer);
WriteZigZag<int32_t>(object->maximum_pages());
@@ -1112,8 +1110,6 @@ ValueDeserializer::ValueDeserializer(Isolate* isolate,
delegate_(delegate),
position_(data.begin()),
end_(data.begin() + data.length()),
- allocation_(data.length() > kPretenureThreshold ? AllocationType::kOld
- : AllocationType::kYoung),
id_map_(isolate->global_handles()->Create(
ReadOnlyRoots(isolate_).empty_fixed_array())) {}
@@ -1302,19 +1298,17 @@ MaybeHandle<Object> ValueDeserializer::ReadObjectInternal() {
case SerializationTag::kInt32: {
Maybe<int32_t> number = ReadZigZag<int32_t>();
if (number.IsNothing()) return MaybeHandle<Object>();
- return isolate_->factory()->NewNumberFromInt(number.FromJust(),
- allocation_);
+ return isolate_->factory()->NewNumberFromInt(number.FromJust());
}
case SerializationTag::kUint32: {
Maybe<uint32_t> number = ReadVarint<uint32_t>();
if (number.IsNothing()) return MaybeHandle<Object>();
- return isolate_->factory()->NewNumberFromUint(number.FromJust(),
- allocation_);
+ return isolate_->factory()->NewNumberFromUint(number.FromJust());
}
case SerializationTag::kDouble: {
Maybe<double> number = ReadDouble();
if (number.IsNothing()) return MaybeHandle<Object>();
- return isolate_->factory()->NewNumber(number.FromJust(), allocation_);
+ return isolate_->factory()->NewNumber(number.FromJust());
}
case SerializationTag::kBigInt:
return ReadBigInt();
@@ -1398,8 +1392,7 @@ MaybeHandle<BigInt> ValueDeserializer::ReadBigInt() {
if (!ReadRawBytes(bytelength).To(&digits_storage)) {
return MaybeHandle<BigInt>();
}
- return BigInt::FromSerializedDigits(isolate_, bitfield, digits_storage,
- allocation_);
+ return BigInt::FromSerializedDigits(isolate_, bitfield, digits_storage);
}
MaybeHandle<String> ValueDeserializer::ReadUtf8String() {
@@ -1412,7 +1405,7 @@ MaybeHandle<String> ValueDeserializer::ReadUtf8String() {
return MaybeHandle<String>();
}
return isolate_->factory()->NewStringFromUtf8(
- Vector<const char>::cast(utf8_bytes), allocation_);
+ Vector<const char>::cast(utf8_bytes));
}
MaybeHandle<String> ValueDeserializer::ReadOneByteString() {
@@ -1424,7 +1417,7 @@ MaybeHandle<String> ValueDeserializer::ReadOneByteString() {
!ReadRawBytes(byte_length).To(&bytes)) {
return MaybeHandle<String>();
}
- return isolate_->factory()->NewStringFromOneByte(bytes, allocation_);
+ return isolate_->factory()->NewStringFromOneByte(bytes);
}
MaybeHandle<String> ValueDeserializer::ReadTwoByteString() {
@@ -1443,7 +1436,7 @@ MaybeHandle<String> ValueDeserializer::ReadTwoByteString() {
if (byte_length == 0) return isolate_->factory()->empty_string();
Handle<SeqTwoByteString> string;
if (!isolate_->factory()
- ->NewRawTwoByteString(byte_length / sizeof(uc16), allocation_)
+ ->NewRawTwoByteString(byte_length / sizeof(uc16))
.ToHandle(&string)) {
return MaybeHandle<String>();
}
@@ -1506,8 +1499,8 @@ MaybeHandle<JSObject> ValueDeserializer::ReadJSObject() {
uint32_t id = next_id_++;
HandleScope scope(isolate_);
- Handle<JSObject> object = isolate_->factory()->NewJSObject(
- isolate_->object_function(), allocation_);
+ Handle<JSObject> object =
+ isolate_->factory()->NewJSObject(isolate_->object_function());
AddObjectWithID(id, object);
uint32_t num_properties;
@@ -1532,8 +1525,8 @@ MaybeHandle<JSArray> ValueDeserializer::ReadSparseJSArray() {
uint32_t id = next_id_++;
HandleScope scope(isolate_);
- Handle<JSArray> array = isolate_->factory()->NewJSArray(
- 0, TERMINAL_FAST_ELEMENTS_KIND, allocation_);
+ Handle<JSArray> array =
+ isolate_->factory()->NewJSArray(0, TERMINAL_FAST_ELEMENTS_KIND);
JSArray::SetLength(array, length);
AddObjectWithID(id, array);
@@ -1569,8 +1562,7 @@ MaybeHandle<JSArray> ValueDeserializer::ReadDenseJSArray() {
uint32_t id = next_id_++;
HandleScope scope(isolate_);
Handle<JSArray> array = isolate_->factory()->NewJSArray(
- HOLEY_ELEMENTS, length, length, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE,
- allocation_);
+ HOLEY_ELEMENTS, length, length, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
AddObjectWithID(id, array);
Handle<FixedArray> elements(FixedArray::cast(array->elements()), isolate_);
@@ -1631,22 +1623,21 @@ MaybeHandle<JSPrimitiveWrapper> ValueDeserializer::ReadJSPrimitiveWrapper(
Handle<JSPrimitiveWrapper> value;
switch (tag) {
case SerializationTag::kTrueObject:
- value = Handle<JSPrimitiveWrapper>::cast(isolate_->factory()->NewJSObject(
- isolate_->boolean_function(), allocation_));
+ value = Handle<JSPrimitiveWrapper>::cast(
+ isolate_->factory()->NewJSObject(isolate_->boolean_function()));
value->set_value(ReadOnlyRoots(isolate_).true_value());
break;
case SerializationTag::kFalseObject:
- value = Handle<JSPrimitiveWrapper>::cast(isolate_->factory()->NewJSObject(
- isolate_->boolean_function(), allocation_));
+ value = Handle<JSPrimitiveWrapper>::cast(
+ isolate_->factory()->NewJSObject(isolate_->boolean_function()));
value->set_value(ReadOnlyRoots(isolate_).false_value());
break;
case SerializationTag::kNumberObject: {
double number;
if (!ReadDouble().To(&number)) return MaybeHandle<JSPrimitiveWrapper>();
- value = Handle<JSPrimitiveWrapper>::cast(isolate_->factory()->NewJSObject(
- isolate_->number_function(), allocation_));
- Handle<Object> number_object =
- isolate_->factory()->NewNumber(number, allocation_);
+ value = Handle<JSPrimitiveWrapper>::cast(
+ isolate_->factory()->NewJSObject(isolate_->number_function()));
+ Handle<Object> number_object = isolate_->factory()->NewNumber(number);
value->set_value(*number_object);
break;
}
@@ -1654,8 +1645,8 @@ MaybeHandle<JSPrimitiveWrapper> ValueDeserializer::ReadJSPrimitiveWrapper(
Handle<BigInt> bigint;
if (!ReadBigInt().ToHandle(&bigint))
return MaybeHandle<JSPrimitiveWrapper>();
- value = Handle<JSPrimitiveWrapper>::cast(isolate_->factory()->NewJSObject(
- isolate_->bigint_function(), allocation_));
+ value = Handle<JSPrimitiveWrapper>::cast(
+ isolate_->factory()->NewJSObject(isolate_->bigint_function()));
value->set_value(*bigint);
break;
}
@@ -1663,8 +1654,8 @@ MaybeHandle<JSPrimitiveWrapper> ValueDeserializer::ReadJSPrimitiveWrapper(
Handle<String> string;
if (!ReadString().ToHandle(&string))
return MaybeHandle<JSPrimitiveWrapper>();
- value = Handle<JSPrimitiveWrapper>::cast(isolate_->factory()->NewJSObject(
- isolate_->string_function(), allocation_));
+ value = Handle<JSPrimitiveWrapper>::cast(
+ isolate_->factory()->NewJSObject(isolate_->string_function()));
value->set_value(*string);
break;
}
@@ -1801,13 +1792,12 @@ MaybeHandle<JSArrayBuffer> ValueDeserializer::ReadJSArrayBuffer(
byte_length > static_cast<size_t>(end_ - position_)) {
return MaybeHandle<JSArrayBuffer>();
}
- const bool should_initialize = false;
- Handle<JSArrayBuffer> array_buffer = isolate_->factory()->NewJSArrayBuffer(
- SharedFlag::kNotShared, allocation_);
- if (!JSArrayBuffer::SetupAllocatingData(array_buffer, isolate_, byte_length,
- should_initialize)) {
- return MaybeHandle<JSArrayBuffer>();
- }
+ MaybeHandle<JSArrayBuffer> result =
+ isolate_->factory()->NewJSArrayBufferAndBackingStore(
+ byte_length, InitializedFlag::kUninitialized);
+ Handle<JSArrayBuffer> array_buffer;
+ if (!result.ToHandle(&array_buffer)) return result;
+
if (byte_length > 0) {
memcpy(array_buffer->backing_store(), position_, byte_length);
}
@@ -1871,8 +1861,7 @@ MaybeHandle<JSArrayBufferView> ValueDeserializer::ReadJSArrayBufferView(
return MaybeHandle<JSArrayBufferView>();
}
Handle<JSTypedArray> typed_array = isolate_->factory()->NewJSTypedArray(
- external_array_type, buffer, byte_offset, byte_length / element_size,
- allocation_);
+ external_array_type, buffer, byte_offset, byte_length / element_size);
AddObjectWithID(id, typed_array);
return typed_array;
}
@@ -2049,9 +2038,6 @@ MaybeHandle<WasmMemoryObject> ValueDeserializer::ReadWasmMemory() {
Handle<WasmMemoryObject> result =
WasmMemoryObject::New(isolate_, buffer, maximum_pages);
- isolate_->wasm_engine()->memory_tracker()->RegisterWasmMemoryAsShared(
- result, isolate_);
-
AddObjectWithID(id, result);
return result;
}
@@ -2081,9 +2067,10 @@ static void CommitProperties(Handle<JSObject> object, Handle<Map> map,
DisallowHeapAllocation no_gc;
DescriptorArray descriptors = object->map().instance_descriptors();
- for (unsigned i = 0; i < properties.size(); i++) {
+ for (InternalIndex i : InternalIndex::Range(properties.size())) {
// Initializing store.
- object->WriteToField(i, descriptors.GetDetails(i), *properties[i]);
+ object->WriteToField(i, descriptors.GetDetails(i),
+ *properties[i.raw_value()]);
}
}
@@ -2150,7 +2137,7 @@ Maybe<uint32_t> ValueDeserializer::ReadJSObjectProperties(
// (though generalization may be required), store the property value so
// that we can copy them all at once. Otherwise, stop transitioning.
if (transitioning) {
- int descriptor = static_cast<int>(properties.size());
+ InternalIndex descriptor(properties.size());
PropertyDetails details =
target->instance_descriptors().GetDetails(descriptor);
Representation expected_representation = details.representation();
@@ -2316,8 +2303,8 @@ ValueDeserializer::ReadObjectUsingEntireBufferForLegacyFormat() {
size_t begin_properties =
stack.size() - 2 * static_cast<size_t>(num_properties);
- Handle<JSObject> js_object = isolate_->factory()->NewJSObject(
- isolate_->object_function(), allocation_);
+ Handle<JSObject> js_object =
+ isolate_->factory()->NewJSObject(isolate_->object_function());
if (num_properties &&
!SetPropertiesFromKeyValuePairs(
isolate_, js_object, &stack[begin_properties], num_properties)
@@ -2344,8 +2331,8 @@ ValueDeserializer::ReadObjectUsingEntireBufferForLegacyFormat() {
return MaybeHandle<Object>();
}
- Handle<JSArray> js_array = isolate_->factory()->NewJSArray(
- 0, TERMINAL_FAST_ELEMENTS_KIND, allocation_);
+ Handle<JSArray> js_array =
+ isolate_->factory()->NewJSArray(0, TERMINAL_FAST_ELEMENTS_KIND);
JSArray::SetLength(js_array, length);
size_t begin_properties =
stack.size() - 2 * static_cast<size_t>(num_properties);
diff --git a/deps/v8/src/objects/value-serializer.h b/deps/v8/src/objects/value-serializer.h
index cc9bc1caea..839636ceef 100644
--- a/deps/v8/src/objects/value-serializer.h
+++ b/deps/v8/src/objects/value-serializer.h
@@ -298,7 +298,6 @@ class ValueDeserializer {
v8::ValueDeserializer::Delegate* const delegate_;
const uint8_t* position_;
const uint8_t* const end_;
- AllocationType allocation_;
uint32_t version_ = 0;
uint32_t next_id_ = 0;
bool expect_inline_wasm_ = false;