summaryrefslogtreecommitdiff
path: root/deps/v8/src/objects
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2018-03-07 08:54:53 +0100
committerMichaël Zasso <targos@protonmail.com>2018-03-07 16:48:52 +0100
commit88786fecff336342a56e6f2e7ff3b286be716e47 (patch)
tree92e6ba5b8ac8dae1a058988d20c9d27bfa654390 /deps/v8/src/objects
parent4e86f9b5ab83cbabf43839385bf383e6a7ef7d19 (diff)
downloadandroid-node-v8-88786fecff336342a56e6f2e7ff3b286be716e47.tar.gz
android-node-v8-88786fecff336342a56e6f2e7ff3b286be716e47.tar.bz2
android-node-v8-88786fecff336342a56e6f2e7ff3b286be716e47.zip
deps: update V8 to 6.5.254.31
PR-URL: https://github.com/nodejs/node/pull/18453 Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Yang Guo <yangguo@chromium.org> Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com> Reviewed-By: Michael Dawson <michael_dawson@ca.ibm.com>
Diffstat (limited to 'deps/v8/src/objects')
-rw-r--r--deps/v8/src/objects/bigint.cc84
-rw-r--r--deps/v8/src/objects/bigint.h25
-rw-r--r--deps/v8/src/objects/code-inl.h34
-rw-r--r--deps/v8/src/objects/code.h21
-rw-r--r--deps/v8/src/objects/data-handler-inl.h41
-rw-r--r--deps/v8/src/objects/data-handler.h63
-rw-r--r--deps/v8/src/objects/debug-objects.h1
-rw-r--r--deps/v8/src/objects/descriptor-array.h1
-rw-r--r--deps/v8/src/objects/fixed-array-inl.h634
-rw-r--r--deps/v8/src/objects/fixed-array.h601
-rw-r--r--deps/v8/src/objects/hash-table-inl.h109
-rw-r--r--deps/v8/src/objects/hash-table.h34
-rw-r--r--deps/v8/src/objects/js-array-inl.h29
-rw-r--r--deps/v8/src/objects/js-array.h4
-rw-r--r--deps/v8/src/objects/js-collection-inl.h49
-rw-r--r--deps/v8/src/objects/js-collection.h162
-rw-r--r--deps/v8/src/objects/js-regexp.h15
-rw-r--r--deps/v8/src/objects/literal-objects.h1
-rw-r--r--deps/v8/src/objects/map-inl.h645
-rw-r--r--deps/v8/src/objects/map.h151
-rw-r--r--deps/v8/src/objects/module.cc179
-rw-r--r--deps/v8/src/objects/module.h16
-rw-r--r--deps/v8/src/objects/object-macros.h3
-rw-r--r--deps/v8/src/objects/scope-info.h5
-rw-r--r--deps/v8/src/objects/script-inl.h35
-rw-r--r--deps/v8/src/objects/script.h23
-rw-r--r--deps/v8/src/objects/shared-function-info-inl.h2
-rw-r--r--deps/v8/src/objects/shared-function-info.h18
-rw-r--r--deps/v8/src/objects/string-inl.h31
-rw-r--r--deps/v8/src/objects/string.h7
30 files changed, 2775 insertions, 248 deletions
diff --git a/deps/v8/src/objects/bigint.cc b/deps/v8/src/objects/bigint.cc
index 85424600c0..df5f854395 100644
--- a/deps/v8/src/objects/bigint.cc
+++ b/deps/v8/src/objects/bigint.cc
@@ -185,7 +185,7 @@ MaybeHandle<MutableBigInt> MutableBigInt::New(Isolate* isolate, int length) {
result->set_length(length);
result->set_sign(false);
#if DEBUG
- result->InitializeDigits(length, 0xbf);
+ result->InitializeDigits(length, 0xBF);
#endif
return result;
}
@@ -304,7 +304,71 @@ MaybeHandle<BigInt> BigInt::BitwiseNot(Handle<BigInt> x) {
MaybeHandle<BigInt> BigInt::Exponentiate(Handle<BigInt> base,
Handle<BigInt> exponent) {
- UNIMPLEMENTED(); // TODO(jkummerow): Implement.
+ Isolate* isolate = base->GetIsolate();
+ // 1. If exponent is < 0, throw a RangeError exception.
+ if (exponent->sign()) {
+ THROW_NEW_ERROR(isolate,
+ NewRangeError(MessageTemplate::kBigIntNegativeExponent),
+ BigInt);
+ }
+ // 2. If base is 0n and exponent is 0n, return 1n.
+ if (exponent->is_zero()) {
+ return MutableBigInt::NewFromInt(isolate, 1);
+ }
+ // 3. Return a BigInt representing the mathematical value of base raised
+ // to the power exponent.
+ if (base->is_zero()) return base;
+ if (base->length() == 1 && base->digit(0) == 1) return base;
+ // For all bases >= 2, very large exponents would lead to unrepresentable
+ // results.
+ STATIC_ASSERT(kMaxLengthBits < std::numeric_limits<digit_t>::max());
+ if (exponent->length() > 1) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kBigIntTooBig),
+ BigInt);
+ }
+ digit_t exp_value = exponent->digit(0);
+ if (exp_value == 1) return base;
+ if (exp_value >= kMaxLengthBits) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kBigIntTooBig),
+ BigInt);
+ }
+ STATIC_ASSERT(kMaxLengthBits <= kMaxInt);
+ int n = static_cast<int>(exp_value);
+ if (base->length() == 1 && base->digit(0) == 2) {
+ // Fast path for 2^n.
+ int needed_digits = 1 + (n / kDigitBits);
+ Handle<MutableBigInt> result =
+ MutableBigInt::New(isolate, needed_digits).ToHandleChecked();
+ result->InitializeDigits(needed_digits);
+ // All bits are zero. Now set the n-th bit.
+ digit_t msd = static_cast<digit_t>(1) << (n % kDigitBits);
+ result->set_digit(needed_digits - 1, msd);
+ // Result is negative for odd powers of -2n.
+ if (base->sign()) result->set_sign((n & 1) != 0);
+ return MutableBigInt::MakeImmutable(result);
+ }
+ Handle<BigInt> result;
+ Handle<BigInt> running_square = base;
+ // This implicitly sets the result's sign correctly.
+ if (n & 1) result = base;
+ n >>= 1;
+ for (; n != 0; n >>= 1) {
+ if (!Multiply(running_square, running_square).ToHandle(&running_square)) {
+ THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kBigIntTooBig),
+ BigInt);
+ }
+ if (n & 1) {
+ if (result.is_null()) {
+ result = running_square;
+ } else {
+ if (!Multiply(result, running_square).ToHandle(&result)) {
+ THROW_NEW_ERROR(
+ isolate, NewRangeError(MessageTemplate::kBigIntTooBig), BigInt);
+ }
+ }
+ }
+ }
+ return result;
}
MaybeHandle<BigInt> BigInt::Multiply(Handle<BigInt> x, Handle<BigInt> y) {
@@ -1617,8 +1681,8 @@ Handle<BigInt> MutableBigInt::RightShiftByMaximum(Isolate* isolate, bool sign) {
Maybe<BigInt::digit_t> MutableBigInt::ToShiftAmount(Handle<BigIntBase> x) {
if (x->length() > 1) return Nothing<digit_t>();
digit_t value = x->digit(0);
- STATIC_ASSERT(kMaxLength * kDigitBits < std::numeric_limits<digit_t>::max());
- if (value > kMaxLength * kDigitBits) return Nothing<digit_t>();
+ STATIC_ASSERT(kMaxLengthBits < std::numeric_limits<digit_t>::max());
+ if (value > kMaxLengthBits) return Nothing<digit_t>();
return Just(value);
}
@@ -1864,12 +1928,13 @@ Handle<BigInt> BigInt::AsIntN(uint64_t n, Handle<BigInt> x) {
if (x->is_zero()) return x;
if (n == 0) return MutableBigInt::Zero(x->GetIsolate());
uint64_t needed_length = (n + kDigitBits - 1) / kDigitBits;
+ uint64_t x_length = static_cast<uint64_t>(x->length());
// If {x} has less than {n} bits, return it directly.
- if (static_cast<uint64_t>(x->length()) < needed_length) return x;
+ if (x_length < needed_length) return x;
DCHECK_LE(needed_length, kMaxInt);
digit_t top_digit = x->digit(static_cast<int>(needed_length) - 1);
digit_t compare_digit = static_cast<digit_t>(1) << ((n - 1) % kDigitBits);
- if (top_digit < compare_digit) return x;
+ if (x_length == needed_length && top_digit < compare_digit) return x;
// Otherwise we have to truncate (which is a no-op in the special case
// of x == -2^(n-1)), and determine the right sign. We also might have
// to subtract from 2^n to simulate having two's complement representation.
@@ -1946,8 +2011,11 @@ Handle<BigInt> MutableBigInt::TruncateToNBits(int n, Handle<BigInt> x) {
// The MSD might contain extra bits that we don't want.
digit_t msd = x->digit(last);
- int drop = kDigitBits - (n % kDigitBits);
- result->set_digit(last, (msd << drop) >> drop);
+ if (n % kDigitBits != 0) {
+ int drop = kDigitBits - (n % kDigitBits);
+ msd = (msd << drop) >> drop;
+ }
+ result->set_digit(last, msd);
result->set_sign(x->sign());
return MakeImmutable(result);
}
diff --git a/deps/v8/src/objects/bigint.h b/deps/v8/src/objects/bigint.h
index de0daf495e..9e29a69b3b 100644
--- a/deps/v8/src/objects/bigint.h
+++ b/deps/v8/src/objects/bigint.h
@@ -24,13 +24,19 @@ class BigIntBase : public HeapObject {
return LengthBits::decode(static_cast<uint32_t>(bitfield));
}
- // The maximum length that the current implementation supports would be
- // kMaxInt / kDigitBits. However, we use a lower limit for now, because
- // raising it later is easier than lowering it.
- // Support up to 1 million bits.
- static const int kMaxLengthBits = 1024 * 1024;
+ // Increasing kMaxLength will require code changes.
+ static const int kMaxLengthBits = kMaxInt - kPointerSize * kBitsPerByte - 1;
static const int kMaxLength = kMaxLengthBits / (kPointerSize * kBitsPerByte);
+ static const int kLengthFieldBits = 30;
+ STATIC_ASSERT(kMaxLength <= ((1 << kLengthFieldBits) - 1));
+ class LengthBits : public BitField<int, 0, kLengthFieldBits> {};
+ class SignBits : public BitField<bool, LengthBits::kNext, 1> {};
+
+ static const int kBitfieldOffset = HeapObject::kHeaderSize;
+ static const int kDigitsOffset = kBitfieldOffset + kPointerSize;
+ static const int kHeaderSize = kDigitsOffset;
+
private:
friend class BigInt;
friend class MutableBigInt;
@@ -44,15 +50,6 @@ class BigIntBase : public HeapObject {
static const int kHalfDigitBits = kDigitBits / 2;
static const digit_t kHalfDigitMask = (1ull << kHalfDigitBits) - 1;
- static const int kBitfieldOffset = HeapObject::kHeaderSize;
- static const int kDigitsOffset = kBitfieldOffset + kPointerSize;
- static const int kHeaderSize = kDigitsOffset;
-
- static const int kLengthFieldBits = 20;
- STATIC_ASSERT(kMaxLength <= ((1 << kLengthFieldBits) - 1));
- class LengthBits : public BitField<int, 0, kLengthFieldBits> {};
- class SignBits : public BitField<bool, LengthBits::kNext, 1> {};
-
// sign() == true means negative.
inline bool sign() const {
intptr_t bitfield = READ_INTPTR_FIELD(this, kBitfieldOffset);
diff --git a/deps/v8/src/objects/code-inl.h b/deps/v8/src/objects/code-inl.h
index 17cfa4f67b..4c3e7f0d97 100644
--- a/deps/v8/src/objects/code-inl.h
+++ b/deps/v8/src/objects/code-inl.h
@@ -290,14 +290,14 @@ Code::Kind Code::kind() const {
void Code::initialize_flags(Kind kind, bool has_unwinding_info,
bool is_turbofanned, int stack_slots) {
- CHECK_LE(stack_slots, StackSlotsField::kMax);
- DCHECK_IMPLIES(stack_slots != 0, is_turbofanned);
+ CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1, "field overflow");
uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
KindField::encode(kind) |
IsTurbofannedField::encode(is_turbofanned) |
StackSlotsField::encode(stack_slots);
WRITE_UINT32_FIELD(this, kFlagsOffset, flags);
+ DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
}
inline bool Code::is_interpreter_trampoline_builtin() const {
@@ -411,21 +411,25 @@ void Code::set_builtin_index(int index) {
bool Code::is_builtin() const { return builtin_index() != -1; }
-unsigned Code::stack_slots() const {
- DCHECK(is_turbofanned());
+bool Code::has_safepoint_info() const {
+ return is_turbofanned() || is_wasm_code();
+}
+
+int Code::stack_slots() const {
+ DCHECK(has_safepoint_info());
return StackSlotsField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
}
-unsigned Code::safepoint_table_offset() const {
- DCHECK(is_turbofanned());
- return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
+int Code::safepoint_table_offset() const {
+ DCHECK(has_safepoint_info());
+ return READ_INT32_FIELD(this, kSafepointTableOffsetOffset);
}
-void Code::set_safepoint_table_offset(unsigned offset) {
- CHECK(offset <= std::numeric_limits<uint32_t>::max());
- DCHECK(is_turbofanned() || offset == 0); // Allow zero initialization.
+void Code::set_safepoint_table_offset(int offset) {
+ CHECK_LE(0, offset);
+ DCHECK(has_safepoint_info() || offset == 0); // Allow zero initialization.
DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
- WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
+ WRITE_INT32_FIELD(this, kSafepointTableOffsetOffset, offset);
}
bool Code::marked_for_deoptimization() const {
@@ -635,6 +639,14 @@ ByteArray* BytecodeArray::SourcePositionTable() {
->source_position_table();
}
+void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
+ Object* maybe_table = source_position_table();
+ if (maybe_table->IsByteArray()) return;
+ DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
+ set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
+ ->source_position_table());
+}
+
int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
int BytecodeArray::SizeIncludingMetadata() {
diff --git a/deps/v8/src/objects/code.h b/deps/v8/src/objects/code.h
index bb447ce2dd..c43e07c1f9 100644
--- a/deps/v8/src/objects/code.h
+++ b/deps/v8/src/objects/code.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_CODE_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -138,7 +139,8 @@ class Code : public HeapObject {
#endif // defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
#ifdef ENABLE_DISASSEMBLER
- void Disassemble(const char* name, std::ostream& os); // NOLINT
+ void Disassemble(const char* name, std::ostream& os,
+ void* current_pc = nullptr); // NOLINT
#endif
// [instruction_size]: Size of the native instructions
@@ -232,14 +234,16 @@ class Code : public HeapObject {
inline void set_builtin_index(int id);
inline bool is_builtin() const;
- // [stack_slots]: For kind OPTIMIZED_FUNCTION, the number of stack slots
+ inline bool has_safepoint_info() const;
+
+ // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
// reserved in the code prologue.
- inline unsigned stack_slots() const;
+ inline int stack_slots() const;
- // [safepoint_table_start]: For kind OPTIMIZED_FUNCTION, the offset in
- // the instruction stream where the safepoint table starts.
- inline unsigned safepoint_table_offset() const;
- inline void set_safepoint_table_offset(unsigned offset);
+ // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
+ // instruction stream where the safepoint table starts.
+ inline int safepoint_table_offset() const;
+ inline void set_safepoint_table_offset(int offset);
// [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
// the code is going to be deoptimized because of dead embedded maps.
@@ -386,7 +390,7 @@ class Code : public HeapObject {
DECL_PRINTER(Code)
DECL_VERIFIER(Code)
- void PrintDeoptLocation(FILE* out, Address pc);
+ void PrintDeoptLocation(FILE* out, const char* str, Address pc);
bool CanDeoptAt(Address pc);
inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
@@ -790,6 +794,7 @@ class BytecodeArray : public FixedArrayBase {
DECL_ACCESSORS(source_position_table, Object)
inline ByteArray* SourcePositionTable();
+ inline void ClearFrameCacheFromSourcePositionTable();
DECL_CAST(BytecodeArray)
diff --git a/deps/v8/src/objects/data-handler-inl.h b/deps/v8/src/objects/data-handler-inl.h
new file mode 100644
index 0000000000..40c3658e60
--- /dev/null
+++ b/deps/v8/src/objects/data-handler-inl.h
@@ -0,0 +1,41 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_DATA_HANDLER_INL_H_
+#define V8_DATA_HANDLER_INL_H_
+
+#include "src/objects/data-handler.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+bool HeapObject::IsDataHandler() const {
+ return IsLoadHandler() || IsStoreHandler();
+}
+
+CAST_ACCESSOR(DataHandler)
+
+ACCESSORS(DataHandler, smi_handler, Object, kSmiHandlerOffset)
+ACCESSORS(DataHandler, validity_cell, Object, kValidityCellOffset)
+
+int DataHandler::data_field_count() const {
+ return (map()->instance_size() - kSizeWithData0) / kPointerSize;
+}
+
+ACCESSORS_CHECKED(DataHandler, data1, Object, kData1Offset,
+ map()->instance_size() >= kSizeWithData1)
+ACCESSORS_CHECKED(DataHandler, data2, Object, kData2Offset,
+ map()->instance_size() >= kSizeWithData2)
+ACCESSORS_CHECKED(DataHandler, data3, Object, kData3Offset,
+ map()->instance_size() >= kSizeWithData3)
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_DATA_HANDLER_INL_H_
diff --git a/deps/v8/src/objects/data-handler.h b/deps/v8/src/objects/data-handler.h
new file mode 100644
index 0000000000..f11d00fa38
--- /dev/null
+++ b/deps/v8/src/objects/data-handler.h
@@ -0,0 +1,63 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_DATA_HANDLER_H_
+#define V8_DATA_HANDLER_H_
+
+#include "src/objects.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+// DataHandler is a base class for load and store handlers that can't be
+// encoded in one Smi. Kind of a handler can be deduced from instance type.
+class DataHandler : public Struct {
+ public:
+ // [smi_handler]: A Smi which encodes a handler or Code object (we still
+ // use code handlers for accessing lexical environment variables, but soon
+ // only smi handlers will remain). See LoadHandler and StoreHandler for
+ // details about encoding.
+ DECL_ACCESSORS(smi_handler, Object)
+
+ // [validity_cell]: A validity Cell that guards prototype chain modifications.
+ DECL_ACCESSORS(validity_cell, Object)
+
+ // Returns number of optional data fields available in the object.
+ inline int data_field_count() const;
+
+ // [data1-3]: These are optional general-purpose fields whose content and
+ // presence depends on the handler kind.
+ DECL_ACCESSORS(data1, Object)
+ DECL_ACCESSORS(data2, Object)
+ DECL_ACCESSORS(data3, Object)
+
+// Layout description.
+#define DATA_HANDLER_FIELDS(V) \
+ V(kSmiHandlerOffset, kPointerSize) \
+ V(kValidityCellOffset, kPointerSize) \
+ V(kSizeWithData0, 0) \
+ V(kData1Offset, kPointerSize) \
+ V(kSizeWithData1, 0) \
+ V(kData2Offset, kPointerSize) \
+ V(kSizeWithData2, 0) \
+ V(kData3Offset, kPointerSize) \
+ V(kSizeWithData3, 0)
+
+ DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, DATA_HANDLER_FIELDS)
+#undef DATA_HANDLER_FIELDS
+
+ DECL_CAST(DataHandler)
+
+ DECL_VERIFIER(DataHandler)
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_DATA_HANDLER_H_
diff --git a/deps/v8/src/objects/debug-objects.h b/deps/v8/src/objects/debug-objects.h
index 9ee2765897..0ce134b0b3 100644
--- a/deps/v8/src/objects/debug-objects.h
+++ b/deps/v8/src/objects/debug-objects.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_DEBUG_OBJECTS_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
diff --git a/deps/v8/src/objects/descriptor-array.h b/deps/v8/src/objects/descriptor-array.h
index f0b985337b..a89a31fcd5 100644
--- a/deps/v8/src/objects/descriptor-array.h
+++ b/deps/v8/src/objects/descriptor-array.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_DESCRIPTOR_ARRAY_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
diff --git a/deps/v8/src/objects/fixed-array-inl.h b/deps/v8/src/objects/fixed-array-inl.h
new file mode 100644
index 0000000000..edca36c92e
--- /dev/null
+++ b/deps/v8/src/objects/fixed-array-inl.h
@@ -0,0 +1,634 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_FIXED_ARRAY_INL_H_
+#define V8_OBJECTS_FIXED_ARRAY_INL_H_
+
+#include "src/objects.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
+TYPE_CHECKER(FixedArrayExact, FIXED_ARRAY_TYPE)
+TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
+TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
+
+CAST_ACCESSOR(ArrayList)
+CAST_ACCESSOR(ByteArray)
+CAST_ACCESSOR(FixedArray)
+CAST_ACCESSOR(FixedArrayBase)
+CAST_ACCESSOR(FixedDoubleArray)
+CAST_ACCESSOR(FixedTypedArrayBase)
+CAST_ACCESSOR(TemplateList)
+CAST_ACCESSOR(WeakFixedArray)
+
+SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
+SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
+
+Object* FixedArrayBase::unchecked_synchronized_length() const {
+ return ACQUIRE_READ_FIELD(this, kLengthOffset);
+}
+
+ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
+
+Object** FixedArray::GetFirstElementAddress() {
+ return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
+}
+
+bool FixedArray::ContainsOnlySmisOrHoles() {
+ Object* the_hole = GetHeap()->the_hole_value();
+ Object** current = GetFirstElementAddress();
+ for (int i = 0; i < length(); ++i) {
+ Object* candidate = *current++;
+ if (!candidate->IsSmi() && candidate != the_hole) return false;
+ }
+ return true;
+}
+
+Object* FixedArray::get(int index) const {
+ SLOW_DCHECK(index >= 0 && index < this->length());
+ return RELAXED_READ_FIELD(this, kHeaderSize + index * kPointerSize);
+}
+
+Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
+ return handle(array->get(index), isolate);
+}
+
+template <class T>
+MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
+ Object* obj = get(index);
+ if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
+ return Handle<T>(T::cast(obj), isolate);
+}
+
+template <class T>
+Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
+ Object* obj = get(index);
+ CHECK(!obj->IsUndefined(isolate));
+ return Handle<T>(T::cast(obj), isolate);
+}
+
+bool FixedArray::is_the_hole(Isolate* isolate, int index) {
+ return get(index)->IsTheHole(isolate);
+}
+
+void FixedArray::set(int index, Smi* value) {
+ DCHECK_NE(map(), GetHeap()->fixed_cow_array_map());
+ DCHECK_LT(index, this->length());
+ DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
+ int offset = kHeaderSize + index * kPointerSize;
+ RELAXED_WRITE_FIELD(this, offset, value);
+}
+
+void FixedArray::set(int index, Object* value) {
+ DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
+ DCHECK(IsFixedArray());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, this->length());
+ int offset = kHeaderSize + index * kPointerSize;
+ RELAXED_WRITE_FIELD(this, offset, value);
+ WRITE_BARRIER(GetHeap(), this, offset, value);
+}
+
+void FixedArray::set(int index, Object* value, WriteBarrierMode mode) {
+ DCHECK_NE(map(), GetHeap()->fixed_cow_array_map());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, this->length());
+ int offset = kHeaderSize + index * kPointerSize;
+ RELAXED_WRITE_FIELD(this, offset, value);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
+}
+
+void FixedArray::NoWriteBarrierSet(FixedArray* array, int index,
+ Object* value) {
+ DCHECK_NE(array->map(), array->GetHeap()->fixed_cow_array_map());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, array->length());
+ DCHECK(!array->GetHeap()->InNewSpace(value));
+ RELAXED_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
+}
+
+void FixedArray::set_undefined(int index) {
+ set_undefined(GetIsolate(), index);
+}
+
+void FixedArray::set_undefined(Isolate* isolate, int index) {
+ FixedArray::NoWriteBarrierSet(this, index,
+ isolate->heap()->undefined_value());
+}
+
+void FixedArray::set_null(int index) { set_null(GetIsolate(), index); }
+
+void FixedArray::set_null(Isolate* isolate, int index) {
+ FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->null_value());
+}
+
+void FixedArray::set_the_hole(int index) { set_the_hole(GetIsolate(), index); }
+
+void FixedArray::set_the_hole(Isolate* isolate, int index) {
+ FixedArray::NoWriteBarrierSet(this, index, isolate->heap()->the_hole_value());
+}
+
+void FixedArray::FillWithHoles(int from, int to) {
+ Isolate* isolate = GetIsolate();
+ for (int i = from; i < to; i++) {
+ set_the_hole(isolate, i);
+ }
+}
+
+Object** FixedArray::data_start() {
+ return HeapObject::RawField(this, kHeaderSize);
+}
+
+Object** FixedArray::RawFieldOfElementAt(int index) {
+ return HeapObject::RawField(this, OffsetOfElementAt(index));
+}
+
+double FixedDoubleArray::get_scalar(int index) {
+ DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
+ map() != GetHeap()->fixed_array_map());
+ DCHECK(index >= 0 && index < this->length());
+ DCHECK(!is_the_hole(index));
+ return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
+}
+
+uint64_t FixedDoubleArray::get_representation(int index) {
+ DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
+ map() != GetHeap()->fixed_array_map());
+ DCHECK(index >= 0 && index < this->length());
+ int offset = kHeaderSize + index * kDoubleSize;
+ return READ_UINT64_FIELD(this, offset);
+}
+
+Handle<Object> FixedDoubleArray::get(FixedDoubleArray* array, int index,
+ Isolate* isolate) {
+ if (array->is_the_hole(index)) {
+ return isolate->factory()->the_hole_value();
+ } else {
+ return isolate->factory()->NewNumber(array->get_scalar(index));
+ }
+}
+
+void FixedDoubleArray::set(int index, double value) {
+ DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
+ map() != GetHeap()->fixed_array_map());
+ int offset = kHeaderSize + index * kDoubleSize;
+ if (std::isnan(value)) {
+ WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
+ } else {
+ WRITE_DOUBLE_FIELD(this, offset, value);
+ }
+ DCHECK(!is_the_hole(index));
+}
+
+void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
+ set_the_hole(index);
+}
+
+void FixedDoubleArray::set_the_hole(int index) {
+ DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
+ map() != GetHeap()->fixed_array_map());
+ int offset = kHeaderSize + index * kDoubleSize;
+ WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
+}
+
+bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
+ return is_the_hole(index);
+}
+
+bool FixedDoubleArray::is_the_hole(int index) {
+ return get_representation(index) == kHoleNanInt64;
+}
+
+double* FixedDoubleArray::data_start() {
+ return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
+}
+
+void FixedDoubleArray::FillWithHoles(int from, int to) {
+ for (int i = from; i < to; i++) {
+ set_the_hole(i);
+ }
+}
+
+Object* WeakFixedArray::Get(int index) const {
+ Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
+ if (raw->IsSmi()) return raw;
+ DCHECK(raw->IsWeakCell());
+ return WeakCell::cast(raw)->value();
+}
+
+bool WeakFixedArray::IsEmptySlot(int index) const {
+ DCHECK(index < Length());
+ return Get(index)->IsSmi();
+}
+
+void WeakFixedArray::Clear(int index) {
+ FixedArray::cast(this)->set(index + kFirstIndex, Smi::kZero);
+}
+
+int WeakFixedArray::Length() const {
+ return FixedArray::cast(this)->length() - kFirstIndex;
+}
+
+int WeakFixedArray::last_used_index() const {
+ return Smi::ToInt(FixedArray::cast(this)->get(kLastUsedIndexIndex));
+}
+
+void WeakFixedArray::set_last_used_index(int index) {
+ FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
+}
+
+template <class T>
+T* WeakFixedArray::Iterator::Next() {
+ if (list_ != nullptr) {
+ // Assert that list did not change during iteration.
+ DCHECK_EQ(last_used_index_, list_->last_used_index());
+ while (index_ < list_->Length()) {
+ Object* item = list_->Get(index_++);
+ if (item != Empty()) return T::cast(item);
+ }
+ list_ = nullptr;
+ }
+ return nullptr;
+}
+
+int ArrayList::Length() const {
+ if (FixedArray::cast(this)->length() == 0) return 0;
+ return Smi::ToInt(FixedArray::cast(this)->get(kLengthIndex));
+}
+
+void ArrayList::SetLength(int length) {
+ return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
+}
+
+Object* ArrayList::Get(int index) const {
+ return FixedArray::cast(this)->get(kFirstIndex + index);
+}
+
+Object** ArrayList::Slot(int index) {
+ return data_start() + kFirstIndex + index;
+}
+
+void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
+ FixedArray::cast(this)->set(kFirstIndex + index, obj, mode);
+}
+
+void ArrayList::Clear(int index, Object* undefined) {
+ DCHECK(undefined->IsUndefined(GetIsolate()));
+ FixedArray::cast(this)->set(kFirstIndex + index, undefined,
+ SKIP_WRITE_BARRIER);
+}
+
+int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
+
+byte ByteArray::get(int index) const {
+ DCHECK(index >= 0 && index < this->length());
+ return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
+}
+
+void ByteArray::set(int index, byte value) {
+ DCHECK(index >= 0 && index < this->length());
+ WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
+}
+
+void ByteArray::copy_in(int index, const byte* buffer, int length) {
+ DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
+ index + length <= this->length());
+ byte* dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
+ memcpy(dst_addr, buffer, length);
+}
+
+void ByteArray::copy_out(int index, byte* buffer, int length) {
+ DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
+ index + length <= this->length());
+ const byte* src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
+ memcpy(buffer, src_addr, length);
+}
+
+int ByteArray::get_int(int index) const {
+ DCHECK(index >= 0 && index < this->length() / kIntSize);
+ return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
+}
+
+void ByteArray::set_int(int index, int value) {
+ DCHECK(index >= 0 && index < this->length() / kIntSize);
+ WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
+}
+
+uint32_t ByteArray::get_uint32(int index) const {
+ DCHECK(index >= 0 && index < this->length() / kUInt32Size);
+ return READ_UINT32_FIELD(this, kHeaderSize + index * kUInt32Size);
+}
+
+void ByteArray::set_uint32(int index, uint32_t value) {
+ DCHECK(index >= 0 && index < this->length() / kUInt32Size);
+ WRITE_UINT32_FIELD(this, kHeaderSize + index * kUInt32Size, value);
+}
+
+void ByteArray::clear_padding() {
+ int data_size = length() + kHeaderSize;
+ memset(address() + data_size, 0, Size() - data_size);
+}
+
+ByteArray* ByteArray::FromDataStartAddress(Address address) {
+ DCHECK_TAG_ALIGNED(address);
+ return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
+}
+
+int ByteArray::DataSize() const { return RoundUp(length(), kPointerSize); }
+
+int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
+
+Address ByteArray::GetDataStartAddress() {
+ return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
+}
+
+template <class T>
+PodArray<T>* PodArray<T>::cast(Object* object) {
+ SLOW_DCHECK(object->IsByteArray());
+ return reinterpret_cast<PodArray<T>*>(object);
+}
+template <class T>
+const PodArray<T>* PodArray<T>::cast(const Object* object) {
+ SLOW_DCHECK(object->IsByteArray());
+ return reinterpret_cast<const PodArray<T>*>(object);
+}
+
+// static
+template <class T>
+Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
+ PretenureFlag pretenure) {
+ return Handle<PodArray<T>>::cast(
+ isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
+}
+
+void* FixedTypedArrayBase::external_pointer() const {
+ intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
+ return reinterpret_cast<void*>(ptr);
+}
+
+void FixedTypedArrayBase::set_external_pointer(void* value,
+ WriteBarrierMode mode) {
+ intptr_t ptr = reinterpret_cast<intptr_t>(value);
+ WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
+}
+
+void* FixedTypedArrayBase::DataPtr() {
+ return reinterpret_cast<void*>(
+ reinterpret_cast<intptr_t>(base_pointer()) +
+ reinterpret_cast<intptr_t>(external_pointer()));
+}
+
+int FixedTypedArrayBase::ElementSize(InstanceType type) {
+ int element_size;
+ switch (type) {
+#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
+ case FIXED_##TYPE##_ARRAY_TYPE: \
+ element_size = size; \
+ break;
+
+ TYPED_ARRAYS(TYPED_ARRAY_CASE)
+#undef TYPED_ARRAY_CASE
+ default:
+ UNREACHABLE();
+ }
+ return element_size;
+}
+
+int FixedTypedArrayBase::DataSize(InstanceType type) const {
+ if (base_pointer() == Smi::kZero) return 0;
+ return length() * ElementSize(type);
+}
+
+int FixedTypedArrayBase::DataSize() const {
+ return DataSize(map()->instance_type());
+}
+
+size_t FixedTypedArrayBase::ByteLength() const {
+ return static_cast<size_t>(length()) *
+ static_cast<size_t>(ElementSize(map()->instance_type()));
+}
+
+int FixedTypedArrayBase::size() const {
+ return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
+}
+
+int FixedTypedArrayBase::TypedArraySize(InstanceType type) const {
+ return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
+}
+
+// static
+int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
+ return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
+}
+
+uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
+
+uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
+
+int8_t Int8ArrayTraits::defaultValue() { return 0; }
+
+uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
+
+int16_t Int16ArrayTraits::defaultValue() { return 0; }
+
+uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
+
+int32_t Int32ArrayTraits::defaultValue() { return 0; }
+
+float Float32ArrayTraits::defaultValue() {
+ return std::numeric_limits<float>::quiet_NaN();
+}
+
+double Float64ArrayTraits::defaultValue() {
+ return std::numeric_limits<double>::quiet_NaN();
+}
+
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
+ DCHECK((index >= 0) && (index < this->length()));
+ return FixedTypedArray<Traits>::get_scalar_from_data_ptr(DataPtr(), index);
+}
+
+// static
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::get_scalar_from_data_ptr(
+ void* data_ptr, int index) {
+ typename Traits::ElementType* ptr = reinterpret_cast<ElementType*>(data_ptr);
+ // The JavaScript memory model allows for racy reads and writes to a
+ // SharedArrayBuffer's backing store, which will always be a FixedTypedArray.
+ // ThreadSanitizer will catch these racy accesses and warn about them, so we
+ // disable TSAN for these reads and writes using annotations.
+ //
+ // We don't use relaxed atomics here, as it is not a requirement of the
+ // JavaScript memory model to have tear-free reads of overlapping accesses,
+ // and using relaxed atomics may introduce overhead.
+ TSAN_ANNOTATE_IGNORE_READS_BEGIN;
+ auto result = ptr[index];
+ TSAN_ANNOTATE_IGNORE_READS_END;
+ return result;
+}
+
+template <class Traits>
+void FixedTypedArray<Traits>::set(int index, ElementType value) {
+ CHECK((index >= 0) && (index < this->length()));
+ // See the comment in FixedTypedArray<Traits>::get_scalar.
+ auto* ptr = reinterpret_cast<ElementType*>(DataPtr());
+ TSAN_ANNOTATE_IGNORE_WRITES_BEGIN;
+ ptr[index] = value;
+ TSAN_ANNOTATE_IGNORE_WRITES_END;
+}
+
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::from(int value) {
+ return static_cast<ElementType>(value);
+}
+
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(int value) {
+ if (value < 0) return 0;
+ if (value > 0xFF) return 0xFF;
+ return static_cast<uint8_t>(value);
+}
+
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::from(uint32_t value) {
+ return static_cast<ElementType>(value);
+}
+
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(uint32_t value) {
+ // We need this special case for Uint32 -> Uint8Clamped, because the highest
+ // Uint32 values will be negative as an int, clamping to 0, rather than 255.
+ if (value > 0xFF) return 0xFF;
+ return static_cast<uint8_t>(value);
+}
+
+template <class Traits>
+typename Traits::ElementType FixedTypedArray<Traits>::from(double value) {
+ return static_cast<ElementType>(DoubleToInt32(value));
+}
+
+template <>
+inline uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from(double value) {
+ // Handle NaNs and less than zero values which clamp to zero.
+ if (!(value > 0)) return 0;
+ if (value > 0xFF) return 0xFF;
+ return static_cast<uint8_t>(lrint(value));
+}
+
+template <>
+inline float FixedTypedArray<Float32ArrayTraits>::from(double value) {
+ return static_cast<float>(value);
+}
+
+template <>
+inline double FixedTypedArray<Float64ArrayTraits>::from(double value) {
+ return value;
+}
+
+template <class Traits>
+Handle<Object> FixedTypedArray<Traits>::get(FixedTypedArray<Traits>* array,
+ int index) {
+ return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
+}
+
+template <class Traits>
+void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
+ ElementType cast_value = Traits::defaultValue();
+ if (value->IsSmi()) {
+ int int_value = Smi::ToInt(value);
+ cast_value = from(int_value);
+ } else if (value->IsHeapNumber()) {
+ double double_value = HeapNumber::cast(value)->value();
+ cast_value = from(double_value);
+ } else {
+ // Clamp undefined to the default value. All other types have been
+ // converted to a number type further up in the call chain.
+ DCHECK(value->IsUndefined(GetIsolate()));
+ }
+ set(index, cast_value);
+}
+
+Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
+ return handle(Smi::FromInt(scalar), isolate);
+}
+
+Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
+ uint8_t scalar) {
+ return handle(Smi::FromInt(scalar), isolate);
+}
+
+Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
+ return handle(Smi::FromInt(scalar), isolate);
+}
+
+Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
+ return handle(Smi::FromInt(scalar), isolate);
+}
+
+Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
+ return handle(Smi::FromInt(scalar), isolate);
+}
+
+Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
+ return isolate->factory()->NewNumberFromUint(scalar);
+}
+
+Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
+ return isolate->factory()->NewNumberFromInt(scalar);
+}
+
+Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
+ return isolate->factory()->NewNumber(scalar);
+}
+
+Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
+ return isolate->factory()->NewNumber(scalar);
+}
+
+// static
+template <class Traits>
+STATIC_CONST_MEMBER_DEFINITION const InstanceType
+ FixedTypedArray<Traits>::kInstanceType;
+
+template <class Traits>
+FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
+ SLOW_DCHECK(object->IsHeapObject() &&
+ HeapObject::cast(object)->map()->instance_type() ==
+ Traits::kInstanceType);
+ return reinterpret_cast<FixedTypedArray<Traits>*>(object);
+}
+
+template <class Traits>
+const FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(
+ const Object* object) {
+ SLOW_DCHECK(object->IsHeapObject() &&
+ HeapObject::cast(object)->map()->instance_type() ==
+ Traits::kInstanceType);
+ return reinterpret_cast<FixedTypedArray<Traits>*>(object);
+}
+
+int TemplateList::length() const {
+ return Smi::ToInt(FixedArray::cast(this)->get(kLengthIndex));
+}
+
+Object* TemplateList::get(int index) const {
+ return FixedArray::cast(this)->get(kFirstElementIndex + index);
+}
+
+void TemplateList::set(int index, Object* value) {
+ FixedArray::cast(this)->set(kFirstElementIndex + index, value);
+}
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_FIXED_ARRAY_INL_H_
diff --git a/deps/v8/src/objects/fixed-array.h b/deps/v8/src/objects/fixed-array.h
new file mode 100644
index 0000000000..5d78af8799
--- /dev/null
+++ b/deps/v8/src/objects/fixed-array.h
@@ -0,0 +1,601 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_FIXED_ARRAY_H_
+#define V8_OBJECTS_FIXED_ARRAY_H_
+
+#include "src/objects.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V) \
+ V(BYTECODE_ARRAY_CONSTANT_POOL_SUB_TYPE) \
+ V(BYTECODE_ARRAY_HANDLER_TABLE_SUB_TYPE) \
+ V(CODE_STUBS_TABLE_SUB_TYPE) \
+ V(COMPILATION_CACHE_TABLE_SUB_TYPE) \
+ V(CONTEXT_SUB_TYPE) \
+ V(COPY_ON_WRITE_SUB_TYPE) \
+ V(DEOPTIMIZATION_DATA_SUB_TYPE) \
+ V(DESCRIPTOR_ARRAY_SUB_TYPE) \
+ V(EMBEDDED_OBJECT_SUB_TYPE) \
+ V(ENUM_CACHE_SUB_TYPE) \
+ V(ENUM_INDICES_CACHE_SUB_TYPE) \
+ V(DEPENDENT_CODE_SUB_TYPE) \
+ V(DICTIONARY_ELEMENTS_SUB_TYPE) \
+ V(DICTIONARY_PROPERTIES_SUB_TYPE) \
+ V(EMPTY_PROPERTIES_DICTIONARY_SUB_TYPE) \
+ V(PACKED_ELEMENTS_SUB_TYPE) \
+ V(FAST_PROPERTIES_SUB_TYPE) \
+ V(FAST_TEMPLATE_INSTANTIATIONS_CACHE_SUB_TYPE) \
+ V(HANDLER_TABLE_SUB_TYPE) \
+ V(JS_COLLECTION_SUB_TYPE) \
+ V(JS_WEAK_COLLECTION_SUB_TYPE) \
+ V(NOSCRIPT_SHARED_FUNCTION_INFOS_SUB_TYPE) \
+ V(NUMBER_STRING_CACHE_SUB_TYPE) \
+ V(OBJECT_TO_CODE_SUB_TYPE) \
+ V(OPTIMIZED_CODE_LITERALS_SUB_TYPE) \
+ V(OPTIMIZED_CODE_MAP_SUB_TYPE) \
+ V(PROTOTYPE_USERS_SUB_TYPE) \
+ V(REGEXP_MULTIPLE_CACHE_SUB_TYPE) \
+ V(RETAINED_MAPS_SUB_TYPE) \
+ V(SCOPE_INFO_SUB_TYPE) \
+ V(SCRIPT_LIST_SUB_TYPE) \
+ V(SERIALIZED_OBJECTS_SUB_TYPE) \
+ V(SHARED_FUNCTION_INFOS_SUB_TYPE) \
+ V(SINGLE_CHARACTER_STRING_CACHE_SUB_TYPE) \
+ V(SLOW_TEMPLATE_INSTANTIATIONS_CACHE_SUB_TYPE) \
+ V(STRING_SPLIT_CACHE_SUB_TYPE) \
+ V(STRING_TABLE_SUB_TYPE) \
+ V(TEMPLATE_INFO_SUB_TYPE) \
+ V(FEEDBACK_METADATA_SUB_TYPE) \
+ V(WEAK_NEW_SPACE_OBJECT_TO_CODE_SUB_TYPE)
+
+enum FixedArraySubInstanceType {
+#define DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE(name) name,
+ FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE)
+#undef DEFINE_FIXED_ARRAY_SUB_INSTANCE_TYPE
+ LAST_FIXED_ARRAY_SUB_TYPE = WEAK_NEW_SPACE_OBJECT_TO_CODE_SUB_TYPE
+};
+
+// Common superclass for FixedArrays that allow implementations to share
+// common accessors and some code paths.
+class FixedArrayBase : public HeapObject {
+ public:
+ // [length]: length of the array.
+ inline int length() const;
+ inline void set_length(int value);
+
+ // Get and set the length using acquire loads and release stores.
+ inline int synchronized_length() const;
+ inline void synchronized_set_length(int value);
+
+ inline Object* unchecked_synchronized_length() const;
+
+ DECL_CAST(FixedArrayBase)
+
+ static int GetMaxLengthForNewSpaceAllocation(ElementsKind kind);
+
+ bool IsCowArray() const;
+
+ // Layout description.
+ // Length is smi tagged when it is stored.
+ static const int kLengthOffset = HeapObject::kHeaderSize;
+ static const int kHeaderSize = kLengthOffset + kPointerSize;
+};
+
+// FixedArray describes fixed-sized arrays with element type Object*.
+class FixedArray : public FixedArrayBase {
+ public:
+ // Setter and getter for elements.
+ inline Object* get(int index) const;
+ static inline Handle<Object> get(FixedArray* array, int index,
+ Isolate* isolate);
+ template <class T>
+ MaybeHandle<T> GetValue(Isolate* isolate, int index) const;
+
+ template <class T>
+ Handle<T> GetValueChecked(Isolate* isolate, int index) const;
+
+ // Return a grown copy if the index is bigger than the array's length.
+ static Handle<FixedArray> SetAndGrow(Handle<FixedArray> array, int index,
+ Handle<Object> value);
+
+ // Setter that uses write barrier.
+ inline void set(int index, Object* value);
+ inline bool is_the_hole(Isolate* isolate, int index);
+
+ // Setter that doesn't need write barrier.
+ inline void set(int index, Smi* value);
+ // Setter with explicit barrier mode.
+ inline void set(int index, Object* value, WriteBarrierMode mode);
+
+ // Setters for frequently used oddballs located in old space.
+ inline void set_undefined(int index);
+ inline void set_undefined(Isolate* isolate, int index);
+ inline void set_null(int index);
+ inline void set_null(Isolate* isolate, int index);
+ inline void set_the_hole(int index);
+ inline void set_the_hole(Isolate* isolate, int index);
+
+ inline Object** GetFirstElementAddress();
+ inline bool ContainsOnlySmisOrHoles();
+
+ // Gives access to raw memory which stores the array's data.
+ inline Object** data_start();
+
+ inline void FillWithHoles(int from, int to);
+
+ // Shrink length and insert filler objects.
+ void Shrink(int length);
+
+ // Copy a sub array from the receiver to dest.
+ void CopyTo(int pos, FixedArray* dest, int dest_pos, int len) const;
+
+ // Garbage collection support.
+ static constexpr int SizeFor(int length) {
+ return kHeaderSize + length * kPointerSize;
+ }
+
+ // Code Generation support.
+ static constexpr int OffsetOfElementAt(int index) { return SizeFor(index); }
+
+ // Garbage collection support.
+ inline Object** RawFieldOfElementAt(int index);
+
+ DECL_CAST(FixedArray)
+
+ // Maximal allowed size, in bytes, of a single FixedArray.
+ // Prevents overflowing size computations, as well as extreme memory
+ // consumption.
+ static const int kMaxSize = 128 * MB * kPointerSize;
+ // Maximally allowed length of a FixedArray.
+ static const int kMaxLength = (kMaxSize - kHeaderSize) / kPointerSize;
+ // Maximally allowed length for regular (non large object space) object.
+ STATIC_ASSERT(kMaxRegularHeapObjectSize < kMaxSize);
+ static const int kMaxRegularLength =
+ (kMaxRegularHeapObjectSize - kHeaderSize) / kPointerSize;
+
+ // Dispatched behavior.
+ DECL_PRINTER(FixedArray)
+ DECL_VERIFIER(FixedArray)
+#ifdef DEBUG
+ // Checks if two FixedArrays have identical contents.
+ bool IsEqualTo(FixedArray* other);
+#endif
+
+ typedef FlexibleBodyDescriptor<kHeaderSize> BodyDescriptor;
+ // No weak fields.
+ typedef BodyDescriptor BodyDescriptorWeak;
+
+ protected:
+ // Set operation on FixedArray without using write barriers. Can
+ // only be used for storing old space objects or smis.
+ static inline void NoWriteBarrierSet(FixedArray* array, int index,
+ Object* value);
+
+ private:
+ STATIC_ASSERT(kHeaderSize == Internals::kFixedArrayHeaderSize);
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FixedArray);
+};
+
+// FixedArray alias added only because of IsFixedArrayExact() predicate, which
+// checks for the exact instance type FIXED_ARRAY_TYPE instead of a range
+// check: [FIRST_FIXED_ARRAY_TYPE, LAST_FIXED_ARRAY_TYPE].
+class FixedArrayExact final : public FixedArray {
+ public:
+ DECL_CAST(FixedArrayExact)
+};
+
+// FixedDoubleArray describes fixed-sized arrays with element type double.
+class FixedDoubleArray : public FixedArrayBase {
+ public:
+ // Setter and getter for elements.
+ inline double get_scalar(int index);
+ inline uint64_t get_representation(int index);
+ static inline Handle<Object> get(FixedDoubleArray* array, int index,
+ Isolate* isolate);
+ inline void set(int index, double value);
+ inline void set_the_hole(Isolate* isolate, int index);
+ inline void set_the_hole(int index);
+
+ // Checking for the hole.
+ inline bool is_the_hole(Isolate* isolate, int index);
+ inline bool is_the_hole(int index);
+
+ // Garbage collection support.
+ inline static int SizeFor(int length) {
+ return kHeaderSize + length * kDoubleSize;
+ }
+
+ // Gives access to raw memory which stores the array's data.
+ inline double* data_start();
+
+ inline void FillWithHoles(int from, int to);
+
+ // Code Generation support.
+ static int OffsetOfElementAt(int index) { return SizeFor(index); }
+
+ DECL_CAST(FixedDoubleArray)
+
+ // Maximal allowed size, in bytes, of a single FixedDoubleArray.
+ // Prevents overflowing size computations, as well as extreme memory
+ // consumption.
+ static const int kMaxSize = 512 * MB;
+ // Maximally allowed length of a FixedArray.
+ static const int kMaxLength = (kMaxSize - kHeaderSize) / kDoubleSize;
+
+ // Dispatched behavior.
+ DECL_PRINTER(FixedDoubleArray)
+ DECL_VERIFIER(FixedDoubleArray)
+
+ class BodyDescriptor;
+ // No weak fields.
+ typedef BodyDescriptor BodyDescriptorWeak;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FixedDoubleArray);
+};
+
+class WeakFixedArray : public FixedArray {
+ public:
+ // If |maybe_array| is not a WeakFixedArray, a fresh one will be allocated.
+ // This function does not check if the value exists already, callers must
+ // ensure this themselves if necessary.
+ static Handle<WeakFixedArray> Add(Handle<Object> maybe_array,
+ Handle<HeapObject> value,
+ int* assigned_index = nullptr);
+
+ // Returns true if an entry was found and removed.
+ bool Remove(Handle<HeapObject> value);
+
+ class NullCallback {
+ public:
+ static void Callback(Object* value, int old_index, int new_index) {}
+ };
+
+ template <class CompactionCallback>
+ void Compact();
+
+ inline Object* Get(int index) const;
+ inline void Clear(int index);
+ inline int Length() const;
+
+ inline bool IsEmptySlot(int index) const;
+ static Object* Empty() { return Smi::kZero; }
+
+ class Iterator {
+ public:
+ explicit Iterator(Object* maybe_array) : list_(nullptr) {
+ Reset(maybe_array);
+ }
+ void Reset(Object* maybe_array);
+
+ template <class T>
+ inline T* Next();
+
+ private:
+ int index_;
+ WeakFixedArray* list_;
+#ifdef DEBUG
+ int last_used_index_;
+ DisallowHeapAllocation no_gc_;
+#endif // DEBUG
+ DISALLOW_COPY_AND_ASSIGN(Iterator);
+ };
+
+ DECL_CAST(WeakFixedArray)
+
+ private:
+ static const int kLastUsedIndexIndex = 0;
+ static const int kFirstIndex = 1;
+
+ static Handle<WeakFixedArray> Allocate(
+ Isolate* isolate, int size, Handle<WeakFixedArray> initialize_from);
+
+ static void Set(Handle<WeakFixedArray> array, int index,
+ Handle<HeapObject> value);
+ inline void clear(int index);
+
+ inline int last_used_index() const;
+ inline void set_last_used_index(int index);
+
+ // Disallow inherited setters.
+ void set(int index, Smi* value);
+ void set(int index, Object* value);
+ void set(int index, Object* value, WriteBarrierMode mode);
+ DISALLOW_IMPLICIT_CONSTRUCTORS(WeakFixedArray);
+};
+
+// Generic array grows dynamically with O(1) amortized insertion.
+//
+// ArrayList is a FixedArray with static convenience methods for adding more
+// elements. The Length() method returns the number of elements in the list, not
+// the allocated size. The number of elements is stored at kLengthIndex and is
+// updated with every insertion. The elements of the ArrayList are stored in the
+// underlying FixedArray starting at kFirstIndex.
+class ArrayList : public FixedArray {
+ public:
+ enum AddMode {
+ kNone,
+ // Use this if GC can delete elements from the array.
+ kReloadLengthAfterAllocation,
+ };
+ static Handle<ArrayList> Add(Handle<ArrayList> array, Handle<Object> obj,
+ AddMode mode = kNone);
+ static Handle<ArrayList> Add(Handle<ArrayList> array, Handle<Object> obj1,
+ Handle<Object> obj2, AddMode = kNone);
+ static Handle<ArrayList> New(Isolate* isolate, int size);
+
+ // Returns the number of elements in the list, not the allocated size, which
+ // is length(). Lower and upper case length() return different results!
+ inline int Length() const;
+
+ // Sets the Length() as used by Elements(). Does not change the underlying
+ // storage capacity, i.e., length().
+ inline void SetLength(int length);
+ inline Object* Get(int index) const;
+ inline Object** Slot(int index);
+
+ // Set the element at index to obj. The underlying array must be large enough.
+ // If you need to grow the ArrayList, use the static Add() methods instead.
+ inline void Set(int index, Object* obj,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
+
+ // Set the element at index to undefined. This does not change the Length().
+ inline void Clear(int index, Object* undefined);
+
+ // Return a copy of the list of size Length() without the first entry. The
+ // number returned by Length() is stored in the first entry.
+ static Handle<FixedArray> Elements(Handle<ArrayList> array);
+ bool IsFull();
+ DECL_CAST(ArrayList)
+
+ private:
+ static Handle<ArrayList> EnsureSpace(Handle<ArrayList> array, int length);
+ static const int kLengthIndex = 0;
+ static const int kFirstIndex = 1;
+ DISALLOW_IMPLICIT_CONSTRUCTORS(ArrayList);
+};
+
+enum SearchMode { ALL_ENTRIES, VALID_ENTRIES };
+
+template <SearchMode search_mode, typename T>
+inline int Search(T* array, Name* name, int valid_entries = 0,
+ int* out_insertion_index = nullptr);
+
+// ByteArray represents fixed sized byte arrays. Used for the relocation info
+// that is attached to code objects.
+class ByteArray : public FixedArrayBase {
+ public:
+ inline int Size();
+
+ // Setter and getter.
+ inline byte get(int index) const;
+ inline void set(int index, byte value);
+
+ // Copy in / copy out whole byte slices.
+ inline void copy_out(int index, byte* buffer, int length);
+ inline void copy_in(int index, const byte* buffer, int length);
+
+ // Treat contents as an int array.
+ inline int get_int(int index) const;
+ inline void set_int(int index, int value);
+
+ inline uint32_t get_uint32(int index) const;
+ inline void set_uint32(int index, uint32_t value);
+
+ // Clear uninitialized padding space. This ensures that the snapshot content
+ // is deterministic.
+ inline void clear_padding();
+
+ static int SizeFor(int length) {
+ return OBJECT_POINTER_ALIGN(kHeaderSize + length);
+ }
+ // We use byte arrays for free blocks in the heap. Given a desired size in
+ // bytes that is a multiple of the word size and big enough to hold a byte
+ // array, this function returns the number of elements a byte array should
+ // have.
+ static int LengthFor(int size_in_bytes) {
+ DCHECK(IsAligned(size_in_bytes, kPointerSize));
+ DCHECK_GE(size_in_bytes, kHeaderSize);
+ return size_in_bytes - kHeaderSize;
+ }
+
+ // Returns data start address.
+ inline Address GetDataStartAddress();
+
+ inline int DataSize() const;
+
+ // Returns a pointer to the ByteArray object for a given data start address.
+ static inline ByteArray* FromDataStartAddress(Address address);
+
+ DECL_CAST(ByteArray)
+
+ // Dispatched behavior.
+ inline int ByteArraySize();
+ DECL_PRINTER(ByteArray)
+ DECL_VERIFIER(ByteArray)
+
+ // Layout description.
+ static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
+
+ // Maximal memory consumption for a single ByteArray.
+ static const int kMaxSize = 512 * MB;
+ // Maximal length of a single ByteArray.
+ static const int kMaxLength = kMaxSize - kHeaderSize;
+
+ class BodyDescriptor;
+ // No weak fields.
+ typedef BodyDescriptor BodyDescriptorWeak;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(ByteArray);
+};
+
+// Wrapper class for ByteArray which can store arbitrary C++ classes, as long
+// as they can be copied with memcpy.
+template <class T>
+class PodArray : public ByteArray {
+ public:
+ static Handle<PodArray<T>> New(Isolate* isolate, int length,
+ PretenureFlag pretenure = NOT_TENURED);
+ void copy_out(int index, T* result) {
+ ByteArray::copy_out(index * sizeof(T), reinterpret_cast<byte*>(result),
+ sizeof(T));
+ }
+ T get(int index) {
+ T result;
+ copy_out(index, &result);
+ return result;
+ }
+ void set(int index, const T& value) {
+ copy_in(index * sizeof(T), reinterpret_cast<const byte*>(&value),
+ sizeof(T));
+ }
+ int length() { return ByteArray::length() / sizeof(T); }
+ DECL_CAST(PodArray<T>)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(PodArray<T>);
+};
+
+// V has parameters (Type, type, TYPE, C type, element_size)
+#define TYPED_ARRAYS(V) \
+ V(Uint8, uint8, UINT8, uint8_t, 1) \
+ V(Int8, int8, INT8, int8_t, 1) \
+ V(Uint16, uint16, UINT16, uint16_t, 2) \
+ V(Int16, int16, INT16, int16_t, 2) \
+ V(Uint32, uint32, UINT32, uint32_t, 4) \
+ V(Int32, int32, INT32, int32_t, 4) \
+ V(Float32, float32, FLOAT32, float, 4) \
+ V(Float64, float64, FLOAT64, double, 8) \
+ V(Uint8Clamped, uint8_clamped, UINT8_CLAMPED, uint8_t, 1)
+
+class FixedTypedArrayBase : public FixedArrayBase {
+ public:
+ // [base_pointer]: Either points to the FixedTypedArrayBase itself or nullptr.
+ DECL_ACCESSORS(base_pointer, Object)
+
+ // [external_pointer]: Contains the offset between base_pointer and the start
+ // of the data. If the base_pointer is a nullptr, the external_pointer
+ // therefore points to the actual backing store.
+ DECL_ACCESSORS(external_pointer, void)
+
+ // Dispatched behavior.
+ DECL_CAST(FixedTypedArrayBase)
+
+ static const int kBasePointerOffset = FixedArrayBase::kHeaderSize;
+ static const int kExternalPointerOffset = kBasePointerOffset + kPointerSize;
+ static const int kHeaderSize =
+ DOUBLE_POINTER_ALIGN(kExternalPointerOffset + kPointerSize);
+
+ static const int kDataOffset = kHeaderSize;
+
+ static const int kMaxElementSize = 8;
+
+#ifdef V8_HOST_ARCH_32_BIT
+ static const size_t kMaxByteLength = std::numeric_limits<size_t>::max();
+#else
+ static const size_t kMaxByteLength =
+ static_cast<size_t>(Smi::kMaxValue) * kMaxElementSize;
+#endif // V8_HOST_ARCH_32_BIT
+
+ static const size_t kMaxLength = Smi::kMaxValue;
+
+ class BodyDescriptor;
+ // No weak fields.
+ typedef BodyDescriptor BodyDescriptorWeak;
+
+ inline int size() const;
+
+ static inline int TypedArraySize(InstanceType type, int length);
+ inline int TypedArraySize(InstanceType type) const;
+
+ // Use with care: returns raw pointer into heap.
+ inline void* DataPtr();
+
+ inline int DataSize() const;
+
+ inline size_t ByteLength() const;
+
+ private:
+ static inline int ElementSize(InstanceType type);
+
+ inline int DataSize(InstanceType type) const;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FixedTypedArrayBase);
+};
+
+template <class Traits>
+class FixedTypedArray : public FixedTypedArrayBase {
+ public:
+ typedef typename Traits::ElementType ElementType;
+ static const InstanceType kInstanceType = Traits::kInstanceType;
+
+ DECL_CAST(FixedTypedArray<Traits>)
+
+ static inline ElementType get_scalar_from_data_ptr(void* data_ptr, int index);
+ inline ElementType get_scalar(int index);
+ static inline Handle<Object> get(FixedTypedArray* array, int index);
+ inline void set(int index, ElementType value);
+
+ static inline ElementType from(int value);
+ static inline ElementType from(uint32_t value);
+ static inline ElementType from(double value);
+
+ // This accessor applies the correct conversion from Smi, HeapNumber
+ // and undefined.
+ inline void SetValue(uint32_t index, Object* value);
+
+ DECL_PRINTER(FixedTypedArray)
+ DECL_VERIFIER(FixedTypedArray)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FixedTypedArray);
+};
+
+#define FIXED_TYPED_ARRAY_TRAITS(Type, type, TYPE, elementType, size) \
+ STATIC_ASSERT(size <= FixedTypedArrayBase::kMaxElementSize); \
+ class Type##ArrayTraits { \
+ public: /* NOLINT */ \
+ typedef elementType ElementType; \
+ static const InstanceType kInstanceType = FIXED_##TYPE##_ARRAY_TYPE; \
+ static const char* Designator() { return #type " array"; } \
+ static inline Handle<Object> ToHandle(Isolate* isolate, \
+ elementType scalar); \
+ static inline elementType defaultValue(); \
+ }; \
+ \
+ typedef FixedTypedArray<Type##ArrayTraits> Fixed##Type##Array;
+
+TYPED_ARRAYS(FIXED_TYPED_ARRAY_TRAITS)
+
+#undef FIXED_TYPED_ARRAY_TRAITS
+
+class TemplateList : public FixedArray {
+ public:
+ static Handle<TemplateList> New(Isolate* isolate, int size);
+ inline int length() const;
+ inline Object* get(int index) const;
+ inline void set(int index, Object* value);
+ static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
+ Handle<Object> value);
+ DECL_CAST(TemplateList)
+ private:
+ static const int kLengthIndex = 0;
+ static const int kFirstElementIndex = kLengthIndex + 1;
+ DISALLOW_IMPLICIT_CONSTRUCTORS(TemplateList);
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_FIXED_ARRAY_H_
diff --git a/deps/v8/src/objects/hash-table-inl.h b/deps/v8/src/objects/hash-table-inl.h
index baff7c03b4..a764684a00 100644
--- a/deps/v8/src/objects/hash-table-inl.h
+++ b/deps/v8/src/objects/hash-table-inl.h
@@ -11,12 +11,116 @@
namespace v8 {
namespace internal {
+int HashTableBase::NumberOfElements() const {
+ return Smi::ToInt(get(kNumberOfElementsIndex));
+}
+
+int HashTableBase::NumberOfDeletedElements() const {
+ return Smi::ToInt(get(kNumberOfDeletedElementsIndex));
+}
+
+int HashTableBase::Capacity() const { return Smi::ToInt(get(kCapacityIndex)); }
+
+void HashTableBase::ElementAdded() {
+ SetNumberOfElements(NumberOfElements() + 1);
+}
+
+void HashTableBase::ElementRemoved() {
+ SetNumberOfElements(NumberOfElements() - 1);
+ SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
+}
+
+void HashTableBase::ElementsRemoved(int n) {
+ SetNumberOfElements(NumberOfElements() - n);
+ SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
+}
+
+// static
+int HashTableBase::ComputeCapacity(int at_least_space_for) {
+ // Add 50% slack to make slot collisions sufficiently unlikely.
+ // See matching computation in HashTable::HasSufficientCapacityToAdd().
+ // Must be kept in sync with CodeStubAssembler::HashTableComputeCapacity().
+ int raw_cap = at_least_space_for + (at_least_space_for >> 1);
+ int capacity = base::bits::RoundUpToPowerOfTwo32(raw_cap);
+ return Max(capacity, kMinCapacity);
+}
+
+void HashTableBase::SetNumberOfElements(int nof) {
+ set(kNumberOfElementsIndex, Smi::FromInt(nof));
+}
+
+void HashTableBase::SetNumberOfDeletedElements(int nod) {
+ set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
+}
+
+template <typename Key>
+int BaseShape<Key>::GetMapRootIndex() {
+ return Heap::kHashTableMapRootIndex;
+}
+
+template <typename Derived, typename Shape>
+int HashTable<Derived, Shape>::FindEntry(Key key) {
+ return FindEntry(GetIsolate(), key);
+}
+
+template <typename Derived, typename Shape>
+int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key) {
+ return FindEntry(isolate, key, Shape::Hash(isolate, key));
+}
+
+// Find entry for key otherwise return kNotFound.
+template <typename Derived, typename Shape>
+int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key,
+ int32_t hash) {
+ uint32_t capacity = Capacity();
+ uint32_t entry = FirstProbe(hash, capacity);
+ uint32_t count = 1;
+ // EnsureCapacity will guarantee the hash table is never full.
+ Object* undefined = isolate->heap()->undefined_value();
+ Object* the_hole = isolate->heap()->the_hole_value();
+ USE(the_hole);
+ while (true) {
+ Object* element = KeyAt(entry);
+ // Empty entry. Uses raw unchecked accessors because it is called by the
+ // string table during bootstrapping.
+ if (element == undefined) break;
+ if (!(Shape::kNeedsHoleCheck && the_hole == element)) {
+ if (Shape::IsMatch(key, element)) return entry;
+ }
+ entry = NextProbe(entry, count++, capacity);
+ }
+ return kNotFound;
+}
+
template <typename KeyT>
bool BaseShape<KeyT>::IsLive(Isolate* isolate, Object* k) {
Heap* heap = isolate->heap();
return k != heap->the_hole_value() && k != heap->undefined_value();
}
+template <typename Derived, typename Shape>
+HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(Object* obj) {
+ SLOW_DCHECK(obj->IsHashTable());
+ return reinterpret_cast<HashTable*>(obj);
+}
+
+template <typename Derived, typename Shape>
+const HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(
+ const Object* obj) {
+ SLOW_DCHECK(obj->IsHashTable());
+ return reinterpret_cast<const HashTable*>(obj);
+}
+
+bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
+ return FindEntry(isolate, key, hash) != kNotFound;
+}
+
+bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
+ Object* hash = key->GetHash();
+ if (!hash->IsSmi()) return false;
+ return FindEntry(isolate, key, Smi::ToInt(hash)) != kNotFound;
+}
+
int OrderedHashSet::GetMapRootIndex() {
return Heap::kOrderedHashSetMapRootIndex;
}
@@ -25,6 +129,11 @@ int OrderedHashMap::GetMapRootIndex() {
return Heap::kOrderedHashMapMapRootIndex;
}
+inline Object* OrderedHashMap::ValueAt(int entry) {
+ DCHECK_LT(entry, this->UsedCapacity());
+ return get(EntryToIndex(entry) + kValueOffset);
+}
+
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/hash-table.h b/deps/v8/src/objects/hash-table.h
index 9b7ac5deb3..a058b7df39 100644
--- a/deps/v8/src/objects/hash-table.h
+++ b/deps/v8/src/objects/hash-table.h
@@ -5,10 +5,9 @@
#ifndef V8_OBJECTS_HASH_TABLE_H_
#define V8_OBJECTS_HASH_TABLE_H_
-#include "src/objects.h"
-
#include "src/base/compiler-specific.h"
#include "src/globals.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -896,37 +895,6 @@ class OrderedHashTableIterator : public JSCollectionIterator {
DISALLOW_IMPLICIT_CONSTRUCTORS(OrderedHashTableIterator);
};
-
-class JSSetIterator
- : public OrderedHashTableIterator<JSSetIterator, OrderedHashSet> {
- public:
- // Dispatched behavior.
- DECL_PRINTER(JSSetIterator)
- DECL_VERIFIER(JSSetIterator)
-
- DECL_CAST(JSSetIterator)
-
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(JSSetIterator);
-};
-
-class JSMapIterator
- : public OrderedHashTableIterator<JSMapIterator, OrderedHashMap> {
- public:
- // Dispatched behavior.
- DECL_PRINTER(JSMapIterator)
- DECL_VERIFIER(JSMapIterator)
-
- DECL_CAST(JSMapIterator)
-
- // Returns the current value of the iterator. This should only be called when
- // |HasMore| returns true.
- inline Object* CurrentValue();
-
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(JSMapIterator);
-};
-
} // namespace internal
} // namespace v8
diff --git a/deps/v8/src/objects/js-array-inl.h b/deps/v8/src/objects/js-array-inl.h
index 6bba2f0054..1128e190b2 100644
--- a/deps/v8/src/objects/js-array-inl.h
+++ b/deps/v8/src/objects/js-array-inl.h
@@ -204,6 +204,15 @@ void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
}
+bool JSTypedArray::HasJSTypedArrayPrototype(Isolate* isolate) {
+ DisallowHeapAllocation no_gc;
+ Object* proto = map()->prototype();
+ if (!proto->IsJSObject()) return false;
+
+ JSObject* proto_obj = JSObject::cast(proto);
+ return proto_obj->map()->prototype() == *isolate->typed_array_prototype();
+}
+
// static
MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
Handle<Object> receiver,
@@ -227,6 +236,26 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
return array;
}
+// static
+Handle<JSFunction> JSTypedArray::DefaultConstructor(
+ Isolate* isolate, Handle<JSTypedArray> exemplar) {
+ Handle<JSFunction> default_ctor = isolate->uint8_array_fun();
+ switch (exemplar->type()) {
+#define TYPED_ARRAY_CTOR(Type, type, TYPE, ctype, size) \
+ case kExternal##Type##Array: { \
+ default_ctor = isolate->type##_array_fun(); \
+ break; \
+ }
+
+ TYPED_ARRAYS(TYPED_ARRAY_CTOR)
+#undef TYPED_ARRAY_CTOR
+ default:
+ UNREACHABLE();
+ }
+
+ return default_ctor;
+}
+
#ifdef VERIFY_HEAP
ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
#endif
diff --git a/deps/v8/src/objects/js-array.h b/deps/v8/src/objects/js-array.h
index a2d13a766d..806c275c8f 100644
--- a/deps/v8/src/objects/js-array.h
+++ b/deps/v8/src/objects/js-array.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_JS_ARRAY_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -298,9 +299,12 @@ class JSTypedArray : public JSArrayBufferView {
Handle<JSArrayBuffer> GetBuffer();
+ inline bool HasJSTypedArrayPrototype(Isolate* isolate);
static inline MaybeHandle<JSTypedArray> Validate(Isolate* isolate,
Handle<Object> receiver,
const char* method_name);
+ static inline Handle<JSFunction> DefaultConstructor(
+ Isolate* isolate, Handle<JSTypedArray> exemplar);
// ES7 section 22.2.4.6 Create ( constructor, argumentList )
static MaybeHandle<JSTypedArray> Create(Isolate* isolate,
Handle<Object> default_ctor, int argc,
diff --git a/deps/v8/src/objects/js-collection-inl.h b/deps/v8/src/objects/js-collection-inl.h
new file mode 100644
index 0000000000..7ad24bcf12
--- /dev/null
+++ b/deps/v8/src/objects/js-collection-inl.h
@@ -0,0 +1,49 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_JS_COLLECTION_INL_H_
+#define V8_OBJECTS_JS_COLLECTION_INL_H_
+
+#include "src/objects/js-collection.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+ACCESSORS(JSCollection, table, Object, kTableOffset)
+ACCESSORS(JSCollectionIterator, table, Object, kTableOffset)
+ACCESSORS(JSCollectionIterator, index, Object, kIndexOffset)
+
+ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
+ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
+
+TYPE_CHECKER(JSMap, JS_MAP_TYPE)
+TYPE_CHECKER(JSSet, JS_SET_TYPE)
+TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
+TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
+
+CAST_ACCESSOR(JSSet)
+CAST_ACCESSOR(JSSetIterator)
+CAST_ACCESSOR(JSMap)
+CAST_ACCESSOR(JSMapIterator)
+CAST_ACCESSOR(JSWeakCollection)
+CAST_ACCESSOR(JSWeakMap)
+CAST_ACCESSOR(JSWeakSet)
+
+Object* JSMapIterator::CurrentValue() {
+ OrderedHashMap* table(OrderedHashMap::cast(this->table()));
+ int index = Smi::ToInt(this->index());
+ Object* value = table->ValueAt(index);
+ DCHECK(!value->IsTheHole(table->GetIsolate()));
+ return value;
+}
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_JS_COLLECTION_INL_H_
diff --git a/deps/v8/src/objects/js-collection.h b/deps/v8/src/objects/js-collection.h
new file mode 100644
index 0000000000..0777ccf1bd
--- /dev/null
+++ b/deps/v8/src/objects/js-collection.h
@@ -0,0 +1,162 @@
+// Copyright 2017 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_OBJECTS_JS_COLLECTION_H_
+#define V8_OBJECTS_JS_COLLECTION_H_
+
+#include "src/objects.h"
+
+// Has to be the last include (doesn't have include guards):
+#include "src/objects/object-macros.h"
+
+namespace v8 {
+namespace internal {
+
+class JSCollection : public JSObject {
+ public:
+ // [table]: the backing hash table
+ DECL_ACCESSORS(table, Object)
+
+ static const int kTableOffset = JSObject::kHeaderSize;
+ static const int kSize = kTableOffset + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSCollection);
+};
+
+// The JSSet describes EcmaScript Harmony sets
+class JSSet : public JSCollection {
+ public:
+ DECL_CAST(JSSet)
+
+ static void Initialize(Handle<JSSet> set, Isolate* isolate);
+ static void Clear(Handle<JSSet> set);
+
+ // Dispatched behavior.
+ DECL_PRINTER(JSSet)
+ DECL_VERIFIER(JSSet)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSSet);
+};
+
+class JSSetIterator
+ : public OrderedHashTableIterator<JSSetIterator, OrderedHashSet> {
+ public:
+ // Dispatched behavior.
+ DECL_PRINTER(JSSetIterator)
+ DECL_VERIFIER(JSSetIterator)
+
+ DECL_CAST(JSSetIterator)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSSetIterator);
+};
+
+// The JSMap describes EcmaScript Harmony maps
+class JSMap : public JSCollection {
+ public:
+ DECL_CAST(JSMap)
+
+ static void Initialize(Handle<JSMap> map, Isolate* isolate);
+ static void Clear(Handle<JSMap> map);
+
+ // Dispatched behavior.
+ DECL_PRINTER(JSMap)
+ DECL_VERIFIER(JSMap)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSMap);
+};
+
+class JSMapIterator
+ : public OrderedHashTableIterator<JSMapIterator, OrderedHashMap> {
+ public:
+ // Dispatched behavior.
+ DECL_PRINTER(JSMapIterator)
+ DECL_VERIFIER(JSMapIterator)
+
+ DECL_CAST(JSMapIterator)
+
+ // Returns the current value of the iterator. This should only be called when
+ // |HasMore| returns true.
+ inline Object* CurrentValue();
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSMapIterator);
+};
+
+// Base class for both JSWeakMap and JSWeakSet
+class JSWeakCollection : public JSObject {
+ public:
+ DECL_CAST(JSWeakCollection)
+
+ // [table]: the backing hash table mapping keys to values.
+ DECL_ACCESSORS(table, Object)
+
+ // [next]: linked list of encountered weak maps during GC.
+ DECL_ACCESSORS(next, Object)
+
+ static void Initialize(Handle<JSWeakCollection> collection, Isolate* isolate);
+ static void Set(Handle<JSWeakCollection> collection, Handle<Object> key,
+ Handle<Object> value, int32_t hash);
+ static bool Delete(Handle<JSWeakCollection> collection, Handle<Object> key,
+ int32_t hash);
+ static Handle<JSArray> GetEntries(Handle<JSWeakCollection> holder,
+ int max_entries);
+
+ static const int kTableOffset = JSObject::kHeaderSize;
+ static const int kNextOffset = kTableOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
+
+ // Visiting policy defines whether the table and next collection fields
+ // should be visited or not.
+ enum BodyVisitingPolicy { kIgnoreWeakness, kRespectWeakness };
+
+ // Iterates the function object according to the visiting policy.
+ template <BodyVisitingPolicy>
+ class BodyDescriptorImpl;
+
+ // Visit the whole object.
+ typedef BodyDescriptorImpl<kIgnoreWeakness> BodyDescriptor;
+
+ // Don't visit table and next collection fields.
+ typedef BodyDescriptorImpl<kRespectWeakness> BodyDescriptorWeak;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakCollection);
+};
+
+// The JSWeakMap describes EcmaScript Harmony weak maps
+class JSWeakMap : public JSWeakCollection {
+ public:
+ DECL_CAST(JSWeakMap)
+
+ // Dispatched behavior.
+ DECL_PRINTER(JSWeakMap)
+ DECL_VERIFIER(JSWeakMap)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);
+};
+
+// The JSWeakSet describes EcmaScript Harmony weak sets
+class JSWeakSet : public JSWeakCollection {
+ public:
+ DECL_CAST(JSWeakSet)
+
+ // Dispatched behavior.
+ DECL_PRINTER(JSWeakSet)
+ DECL_VERIFIER(JSWeakSet)
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakSet);
+};
+
+} // namespace internal
+} // namespace v8
+
+#include "src/objects/object-macros-undef.h"
+
+#endif // V8_OBJECTS_JS_COLLECTION_H_
diff --git a/deps/v8/src/objects/js-regexp.h b/deps/v8/src/objects/js-regexp.h
index 32c07e879e..69cd5c3104 100644
--- a/deps/v8/src/objects/js-regexp.h
+++ b/deps/v8/src/objects/js-regexp.h
@@ -144,13 +144,20 @@ DEFINE_OPERATORS_FOR_FLAGS(JSRegExp::Flags)
// After creation the result must be treated as a JSArray in all regards.
class JSRegExpResult : public JSArray {
public:
- // Offsets of object fields.
- static const int kIndexOffset = JSArray::kSize;
- static const int kInputOffset = kIndexOffset + kPointerSize;
- static const int kSize = kInputOffset + kPointerSize;
+#define REG_EXP_RESULT_FIELDS(V) \
+ V(kIndexOffset, kPointerSize) \
+ V(kInputOffset, kPointerSize) \
+ V(kGroupsOffset, kPointerSize) \
+ V(kSize, 0)
+
+ DEFINE_FIELD_OFFSET_CONSTANTS(JSArray::kSize, REG_EXP_RESULT_FIELDS)
+#undef REG_EXP_RESULT_FIELDS
+
// Indices of in-object properties.
static const int kIndexIndex = 0;
static const int kInputIndex = 1;
+ static const int kGroupsIndex = 2;
+ static const int kInObjectPropertyCount = 3;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSRegExpResult);
diff --git a/deps/v8/src/objects/literal-objects.h b/deps/v8/src/objects/literal-objects.h
index 6fe34ffa8a..7fb0c712f2 100644
--- a/deps/v8/src/objects/literal-objects.h
+++ b/deps/v8/src/objects/literal-objects.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_LITERAL_OBJECTS_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h
index a5421a32ca..c78f947b3a 100644
--- a/deps/v8/src/objects/map-inl.h
+++ b/deps/v8/src/objects/map-inl.h
@@ -5,9 +5,19 @@
#ifndef V8_OBJECTS_MAP_INL_H_
#define V8_OBJECTS_MAP_INL_H_
-#include "src/field-type.h"
#include "src/objects/map.h"
+#include "src/field-type.h"
+#include "src/objects-inl.h"
+#include "src/objects/descriptor-array.h"
+#include "src/objects/shared-function-info.h"
+#include "src/property.h"
+#include "src/transitions.h"
+
+// For pulling in heap/incremental-marking.h which is needed by
+// ACCESSORS_CHECKED.
+#include "src/heap/heap-inl.h"
+
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -16,6 +26,48 @@ namespace internal {
CAST_ACCESSOR(Map)
+ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
+ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
+ kLayoutDescriptorOffset, FLAG_unbox_double_fields)
+ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
+
+// |bit_field| fields.
+BIT_FIELD_ACCESSORS(Map, bit_field, has_non_instance_prototype,
+ Map::HasNonInstancePrototypeBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
+ Map::HasNamedInterceptorBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
+ Map::HasIndexedInterceptorBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
+ Map::IsAccessCheckNeededBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
+BIT_FIELD_ACCESSORS(Map, bit_field, has_prototype_slot,
+ Map::HasPrototypeSlotBit)
+
+// |bit_field2| fields.
+BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit)
+BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit)
+
+// |bit_field3| fields.
+BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype,
+ Map::HasHiddenPrototypeBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
+ Map::IsMigrationTargetBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto,
+ Map::IsImmutablePrototypeBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base,
+ Map::NewTargetIsBaseBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
+ Map::MayHaveInterestingSymbolsBit)
+BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
+ Map::ConstructionCounterBits)
+
+TYPE_CHECKER(Map, MAP_TYPE)
+
InterceptorInfo* Map::GetNamedInterceptor() {
DCHECK(has_named_interceptor());
FunctionTemplateInfo* info = GetFunctionTemplateInfo();
@@ -75,6 +127,597 @@ void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
}
}
+bool Map::IsUnboxedDoubleField(FieldIndex index) const {
+ if (!FLAG_unbox_double_fields) return false;
+ if (index.is_hidden_field() || !index.is_inobject()) return false;
+ return !layout_descriptor()->IsTagged(index.property_index());
+}
+
+bool Map::TooManyFastProperties(StoreFromKeyed store_mode) const {
+ if (UnusedPropertyFields() != 0) return false;
+ if (is_prototype_map()) return false;
+ int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
+ int limit = Max(minimum, GetInObjectProperties());
+ int external = NumberOfFields() - GetInObjectProperties();
+ return external > limit;
+}
+
+PropertyDetails Map::GetLastDescriptorDetails() const {
+ return instance_descriptors()->GetDetails(LastAdded());
+}
+
+int Map::LastAdded() const {
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ DCHECK_GT(number_of_own_descriptors, 0);
+ return number_of_own_descriptors - 1;
+}
+
+int Map::NumberOfOwnDescriptors() const {
+ return NumberOfOwnDescriptorsBits::decode(bit_field3());
+}
+
+void Map::SetNumberOfOwnDescriptors(int number) {
+ DCHECK_LE(number, instance_descriptors()->number_of_descriptors());
+ CHECK_LE(static_cast<unsigned>(number),
+ static_cast<unsigned>(kMaxNumberOfDescriptors));
+ set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
+}
+
+int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
+
+void Map::SetEnumLength(int length) {
+ if (length != kInvalidEnumCacheSentinel) {
+ DCHECK_LE(length, NumberOfOwnDescriptors());
+ CHECK_LE(static_cast<unsigned>(length),
+ static_cast<unsigned>(kMaxNumberOfDescriptors));
+ }
+ set_bit_field3(EnumLengthBits::update(bit_field3(), length));
+}
+
+FixedArrayBase* Map::GetInitialElements() const {
+ FixedArrayBase* result = nullptr;
+ if (has_fast_elements() || has_fast_string_wrapper_elements()) {
+ result = GetHeap()->empty_fixed_array();
+ } else if (has_fast_sloppy_arguments_elements()) {
+ result = GetHeap()->empty_sloppy_arguments_elements();
+ } else if (has_fixed_typed_array_elements()) {
+ result = GetHeap()->EmptyFixedTypedArrayForMap(this);
+ } else if (has_dictionary_elements()) {
+ result = GetHeap()->empty_slow_element_dictionary();
+ } else {
+ UNREACHABLE();
+ }
+ DCHECK(!GetHeap()->InNewSpace(result));
+ return result;
+}
+
+VisitorId Map::visitor_id() const {
+ return static_cast<VisitorId>(
+ RELAXED_READ_BYTE_FIELD(this, kVisitorIdOffset));
+}
+
+void Map::set_visitor_id(VisitorId id) {
+ CHECK_LT(static_cast<unsigned>(id), 256);
+ RELAXED_WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
+}
+
+int Map::instance_size_in_words() const {
+ return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
+}
+
+void Map::set_instance_size_in_words(int value) {
+ RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
+ static_cast<byte>(value));
+}
+
+int Map::instance_size() const {
+ return instance_size_in_words() << kPointerSizeLog2;
+}
+
+void Map::set_instance_size(int value) {
+ CHECK_EQ(0, value & (kPointerSize - 1));
+ value >>= kPointerSizeLog2;
+ CHECK_LT(static_cast<unsigned>(value), 256);
+ set_instance_size_in_words(value);
+}
+
+int Map::inobject_properties_start_or_constructor_function_index() const {
+ return RELAXED_READ_BYTE_FIELD(
+ this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
+}
+
+void Map::set_inobject_properties_start_or_constructor_function_index(
+ int value) {
+ CHECK_LT(static_cast<unsigned>(value), 256);
+ RELAXED_WRITE_BYTE_FIELD(
+ this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
+ static_cast<byte>(value));
+}
+
+int Map::GetInObjectPropertiesStartInWords() const {
+ DCHECK(IsJSObjectMap());
+ return inobject_properties_start_or_constructor_function_index();
+}
+
+void Map::SetInObjectPropertiesStartInWords(int value) {
+ CHECK(IsJSObjectMap());
+ set_inobject_properties_start_or_constructor_function_index(value);
+}
+
+int Map::GetInObjectProperties() const {
+ DCHECK(IsJSObjectMap());
+ return instance_size_in_words() - GetInObjectPropertiesStartInWords();
+}
+
+int Map::GetConstructorFunctionIndex() const {
+ DCHECK(IsPrimitiveMap());
+ return inobject_properties_start_or_constructor_function_index();
+}
+
+void Map::SetConstructorFunctionIndex(int value) {
+ CHECK(IsPrimitiveMap());
+ set_inobject_properties_start_or_constructor_function_index(value);
+}
+
+int Map::GetInObjectPropertyOffset(int index) const {
+ return (GetInObjectPropertiesStartInWords() + index) * kPointerSize;
+}
+
+Handle<Map> Map::AddMissingTransitionsForTesting(
+ Handle<Map> split_map, Handle<DescriptorArray> descriptors,
+ Handle<LayoutDescriptor> full_layout_descriptor) {
+ return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
+}
+
+InstanceType Map::instance_type() const {
+ return static_cast<InstanceType>(
+ READ_UINT16_FIELD(this, kInstanceTypeOffset));
+}
+
+void Map::set_instance_type(InstanceType value) {
+ WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
+}
+
+int Map::UnusedPropertyFields() const {
+ int value = used_or_unused_instance_size_in_words();
+ DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
+ int unused;
+ if (value >= JSObject::kFieldsAdded) {
+ unused = instance_size_in_words() - value;
+ } else {
+ // For out of object properties "used_or_unused_instance_size_in_words"
+ // byte encodes the slack in the property array.
+ unused = value;
+ }
+ return unused;
+}
+
+int Map::used_or_unused_instance_size_in_words() const {
+ return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
+}
+
+void Map::set_used_or_unused_instance_size_in_words(int value) {
+ CHECK_LE(static_cast<unsigned>(value), 255);
+ RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
+ static_cast<byte>(value));
+}
+
+int Map::UsedInstanceSize() const {
+ int words = used_or_unused_instance_size_in_words();
+ if (words < JSObject::kFieldsAdded) {
+ // All in-object properties are used and the words is tracking the slack
+ // in the property array.
+ return instance_size();
+ }
+ return words * kPointerSize;
+}
+
+void Map::SetInObjectUnusedPropertyFields(int value) {
+ STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
+ if (!IsJSObjectMap()) {
+ CHECK_EQ(0, value);
+ set_used_or_unused_instance_size_in_words(0);
+ DCHECK_EQ(0, UnusedPropertyFields());
+ return;
+ }
+ CHECK_LE(0, value);
+ DCHECK_LE(value, GetInObjectProperties());
+ int used_inobject_properties = GetInObjectProperties() - value;
+ set_used_or_unused_instance_size_in_words(
+ GetInObjectPropertyOffset(used_inobject_properties) / kPointerSize);
+ DCHECK_EQ(value, UnusedPropertyFields());
+}
+
+void Map::SetOutOfObjectUnusedPropertyFields(int value) {
+ STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
+ CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
+ // For out of object properties "used_instance_size_in_words" byte encodes
+ // the slack in the property array.
+ set_used_or_unused_instance_size_in_words(value);
+ DCHECK_EQ(value, UnusedPropertyFields());
+}
+
+void Map::CopyUnusedPropertyFields(Map* map) {
+ set_used_or_unused_instance_size_in_words(
+ map->used_or_unused_instance_size_in_words());
+ DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
+}
+
+void Map::AccountAddedPropertyField() {
+ // Update used instance size and unused property fields number.
+ STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
+#ifdef DEBUG
+ int new_unused = UnusedPropertyFields() - 1;
+ if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
+#endif
+ int value = used_or_unused_instance_size_in_words();
+ if (value >= JSObject::kFieldsAdded) {
+ if (value == instance_size_in_words()) {
+ AccountAddedOutOfObjectPropertyField(0);
+ } else {
+ // The property is added in-object, so simply increment the counter.
+ set_used_or_unused_instance_size_in_words(value + 1);
+ }
+ } else {
+ AccountAddedOutOfObjectPropertyField(value);
+ }
+ DCHECK_EQ(new_unused, UnusedPropertyFields());
+}
+
+void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
+ unused_in_property_array--;
+ if (unused_in_property_array < 0) {
+ unused_in_property_array += JSObject::kFieldsAdded;
+ }
+ CHECK_LT(static_cast<unsigned>(unused_in_property_array),
+ JSObject::kFieldsAdded);
+ set_used_or_unused_instance_size_in_words(unused_in_property_array);
+ DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
+}
+
+byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
+
+void Map::set_bit_field(byte value) {
+ WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
+}
+
+byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
+
+void Map::set_bit_field2(byte value) {
+ WRITE_BYTE_FIELD(this, kBitField2Offset, value);
+}
+
+bool Map::is_abandoned_prototype_map() const {
+ return is_prototype_map() && !owns_descriptors();
+}
+
+bool Map::should_be_fast_prototype_map() const {
+ if (!prototype_info()->IsPrototypeInfo()) return false;
+ return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
+}
+
+void Map::set_elements_kind(ElementsKind elements_kind) {
+ CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
+ set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
+}
+
+ElementsKind Map::elements_kind() const {
+ return Map::ElementsKindBits::decode(bit_field2());
+}
+
+bool Map::has_fast_smi_elements() const {
+ return IsSmiElementsKind(elements_kind());
+}
+
+bool Map::has_fast_object_elements() const {
+ return IsObjectElementsKind(elements_kind());
+}
+
+bool Map::has_fast_smi_or_object_elements() const {
+ return IsSmiOrObjectElementsKind(elements_kind());
+}
+
+bool Map::has_fast_double_elements() const {
+ return IsDoubleElementsKind(elements_kind());
+}
+
+bool Map::has_fast_elements() const {
+ return IsFastElementsKind(elements_kind());
+}
+
+bool Map::has_sloppy_arguments_elements() const {
+ return IsSloppyArgumentsElementsKind(elements_kind());
+}
+
+bool Map::has_fast_sloppy_arguments_elements() const {
+ return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
+}
+
+bool Map::has_fast_string_wrapper_elements() const {
+ return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
+}
+
+bool Map::has_fixed_typed_array_elements() const {
+ return IsFixedTypedArrayElementsKind(elements_kind());
+}
+
+bool Map::has_dictionary_elements() const {
+ return IsDictionaryElementsKind(elements_kind());
+}
+
+void Map::set_is_dictionary_map(bool value) {
+ uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
+ new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
+ set_bit_field3(new_bit_field3);
+}
+
+bool Map::is_dictionary_map() const {
+ return IsDictionaryMapBit::decode(bit_field3());
+}
+
+void Map::mark_unstable() {
+ set_bit_field3(IsUnstableBit::update(bit_field3(), true));
+}
+
+bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
+
+bool Map::CanBeDeprecated() const {
+ int descriptor = LastAdded();
+ for (int i = 0; i <= descriptor; i++) {
+ PropertyDetails details = instance_descriptors()->GetDetails(i);
+ if (details.representation().IsNone()) return true;
+ if (details.representation().IsSmi()) return true;
+ if (details.representation().IsDouble()) return true;
+ if (details.representation().IsHeapObject()) return true;
+ if (details.kind() == kData && details.location() == kDescriptor) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void Map::NotifyLeafMapLayoutChange() {
+ if (is_stable()) {
+ mark_unstable();
+ dependent_code()->DeoptimizeDependentCodeGroup(
+ GetIsolate(), DependentCode::kPrototypeCheckGroup);
+ }
+}
+
+bool Map::CanTransition() const {
+ // Only JSObject and subtypes have map transitions and back pointers.
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
+ return instance_type() >= FIRST_JS_OBJECT_TYPE;
+}
+
+bool Map::IsBooleanMap() const { return this == GetHeap()->boolean_map(); }
+bool Map::IsPrimitiveMap() const {
+ STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
+ return instance_type() <= LAST_PRIMITIVE_TYPE;
+}
+bool Map::IsJSReceiverMap() const {
+ STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
+ return instance_type() >= FIRST_JS_RECEIVER_TYPE;
+}
+bool Map::IsJSObjectMap() const {
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
+ return instance_type() >= FIRST_JS_OBJECT_TYPE;
+}
+bool Map::IsJSArrayMap() const { return instance_type() == JS_ARRAY_TYPE; }
+bool Map::IsJSFunctionMap() const {
+ return instance_type() == JS_FUNCTION_TYPE;
+}
+bool Map::IsStringMap() const { return instance_type() < FIRST_NONSTRING_TYPE; }
+bool Map::IsJSProxyMap() const { return instance_type() == JS_PROXY_TYPE; }
+bool Map::IsJSGlobalProxyMap() const {
+ return instance_type() == JS_GLOBAL_PROXY_TYPE;
+}
+bool Map::IsJSGlobalObjectMap() const {
+ return instance_type() == JS_GLOBAL_OBJECT_TYPE;
+}
+bool Map::IsJSTypedArrayMap() const {
+ return instance_type() == JS_TYPED_ARRAY_TYPE;
+}
+bool Map::IsJSDataViewMap() const {
+ return instance_type() == JS_DATA_VIEW_TYPE;
+}
+
+Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }
+
+void Map::set_prototype(Object* value, WriteBarrierMode mode) {
+ DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
+ WRITE_FIELD(this, kPrototypeOffset, value);
+ CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
+}
+
+LayoutDescriptor* Map::layout_descriptor_gc_safe() const {
+ DCHECK(FLAG_unbox_double_fields);
+ Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
+ return LayoutDescriptor::cast_gc_safe(layout_desc);
+}
+
+bool Map::HasFastPointerLayout() const {
+ DCHECK(FLAG_unbox_double_fields);
+ Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
+ return LayoutDescriptor::IsFastPointerLayout(layout_desc);
+}
+
+void Map::UpdateDescriptors(DescriptorArray* descriptors,
+ LayoutDescriptor* layout_desc) {
+ set_instance_descriptors(descriptors);
+ if (FLAG_unbox_double_fields) {
+ if (layout_descriptor()->IsSlowLayout()) {
+ set_layout_descriptor(layout_desc);
+ }
+#ifdef VERIFY_HEAP
+ // TODO(ishell): remove these checks from VERIFY_HEAP mode.
+ if (FLAG_verify_heap) {
+ CHECK(layout_descriptor()->IsConsistentWithMap(this));
+ CHECK_EQ(Map::GetVisitorId(this), visitor_id());
+ }
+#else
+ SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
+ DCHECK(visitor_id() == Map::GetVisitorId(this));
+#endif
+ }
+}
+
+void Map::InitializeDescriptors(DescriptorArray* descriptors,
+ LayoutDescriptor* layout_desc) {
+ int len = descriptors->number_of_descriptors();
+ set_instance_descriptors(descriptors);
+ SetNumberOfOwnDescriptors(len);
+
+ if (FLAG_unbox_double_fields) {
+ set_layout_descriptor(layout_desc);
+#ifdef VERIFY_HEAP
+ // TODO(ishell): remove these checks from VERIFY_HEAP mode.
+ if (FLAG_verify_heap) {
+ CHECK(layout_descriptor()->IsConsistentWithMap(this));
+ }
+#else
+ SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
+#endif
+ set_visitor_id(Map::GetVisitorId(this));
+ }
+}
+
+void Map::set_bit_field3(uint32_t bits) {
+ if (kInt32Size != kPointerSize) {
+ WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
+ }
+ WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
+}
+
+uint32_t Map::bit_field3() const {
+ return READ_UINT32_FIELD(this, kBitField3Offset);
+}
+
+LayoutDescriptor* Map::GetLayoutDescriptor() const {
+ return FLAG_unbox_double_fields ? layout_descriptor()
+ : LayoutDescriptor::FastPointerLayout();
+}
+
+void Map::AppendDescriptor(Descriptor* desc) {
+ DescriptorArray* descriptors = instance_descriptors();
+ int number_of_own_descriptors = NumberOfOwnDescriptors();
+ DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
+ descriptors->Append(desc);
+ SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
+
+ // Properly mark the map if the {desc} is an "interesting symbol".
+ if (desc->GetKey()->IsInterestingSymbol()) {
+ set_may_have_interesting_symbols(true);
+ }
+ PropertyDetails details = desc->GetDetails();
+ if (details.location() == kField) {
+ DCHECK_GT(UnusedPropertyFields(), 0);
+ AccountAddedPropertyField();
+ }
+
+// This function does not support appending double field descriptors and
+// it should never try to (otherwise, layout descriptor must be updated too).
+#ifdef DEBUG
+ DCHECK(details.location() != kField || !details.representation().IsDouble());
+#endif
+}
+
+Object* Map::GetBackPointer() const {
+ Object* object = constructor_or_backpointer();
+ if (object->IsMap()) {
+ return object;
+ }
+ return GetIsolate()->heap()->undefined_value();
+}
+
+Map* Map::ElementsTransitionMap() {
+ DisallowHeapAllocation no_gc;
+ return TransitionsAccessor(this, &no_gc)
+ .SearchSpecial(GetHeap()->elements_transition_symbol());
+}
+
+Object* Map::prototype_info() const {
+ DCHECK(is_prototype_map());
+ return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
+}
+
+void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
+ CHECK(is_prototype_map());
+ WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
+ CONDITIONAL_WRITE_BARRIER(
+ GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
+}
+
+void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
+ CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
+ CHECK(value->IsMap());
+ CHECK(GetBackPointer()->IsUndefined(GetIsolate()));
+ CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() ==
+ constructor_or_backpointer());
+ set_constructor_or_backpointer(value, mode);
+}
+
+ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
+ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
+ACCESSORS(Map, constructor_or_backpointer, Object,
+ kConstructorOrBackPointerOffset)
+
+Object* Map::GetConstructor() const {
+ Object* maybe_constructor = constructor_or_backpointer();
+ // Follow any back pointers.
+ while (maybe_constructor->IsMap()) {
+ maybe_constructor =
+ Map::cast(maybe_constructor)->constructor_or_backpointer();
+ }
+ return maybe_constructor;
+}
+
+FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const {
+ Object* constructor = GetConstructor();
+ if (constructor->IsJSFunction()) {
+ DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
+ return JSFunction::cast(constructor)->shared()->get_api_func_data();
+ }
+ DCHECK(constructor->IsFunctionTemplateInfo());
+ return FunctionTemplateInfo::cast(constructor);
+}
+
+void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
+ // Never overwrite a back pointer with a constructor.
+ CHECK(!constructor_or_backpointer()->IsMap());
+ set_constructor_or_backpointer(constructor, mode);
+}
+
+Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
+ return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
+ map->UnusedPropertyFields());
+}
+
+bool Map::IsInobjectSlackTrackingInProgress() const {
+ return construction_counter() != Map::kNoSlackTracking;
+}
+
+void Map::InobjectSlackTrackingStep() {
+ // Slack tracking should only be performed on an initial map.
+ DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
+ if (!IsInobjectSlackTrackingInProgress()) return;
+ int counter = construction_counter();
+ set_construction_counter(counter - 1);
+ if (counter == kSlackTrackingCounterEnd) {
+ CompleteInobjectSlackTracking();
+ }
+}
+
+int Map::SlackForArraySize(int old_size, int size_limit) {
+ const int max_slack = size_limit - old_size;
+ CHECK_LE(0, max_slack);
+ if (old_size < 4) {
+ DCHECK_LE(1, max_slack);
+ return 1;
+ }
+ return Min(max_slack, old_size / 4);
+}
+
int NormalizedMapCache::GetIndex(Handle<Map> map) {
return map->Hash() % NormalizedMapCache::kEntries;
}
diff --git a/deps/v8/src/objects/map.h b/deps/v8/src/objects/map.h
index d9a0a73158..bf0d843884 100644
--- a/deps/v8/src/objects/map.h
+++ b/deps/v8/src/objects/map.h
@@ -194,8 +194,7 @@ class Map : public HeapObject {
inline InterceptorInfo* GetIndexedInterceptor();
// Instance type.
- inline InstanceType instance_type() const;
- inline void set_instance_type(InstanceType value);
+ DECL_PRIMITIVE_ACCESSORS(instance_type, InstanceType)
// Returns the size of the used in-object area including object header
// (only used for JSObject in fast mode, for the other kinds of objects it
@@ -214,50 +213,69 @@ class Map : public HeapObject {
inline void AccountAddedOutOfObjectPropertyField(
int unused_in_property_array);
+ //
// Bit field.
- inline byte bit_field() const;
- inline void set_bit_field(byte value);
+ //
+ DECL_PRIMITIVE_ACCESSORS(bit_field, byte)
+
+// Bit positions for |bit_field|.
+#define MAP_BIT_FIELD_FIELDS(V, _) \
+ V(HasNonInstancePrototypeBit, bool, 1, _) \
+ V(IsCallableBit, bool, 1, _) \
+ V(HasNamedInterceptorBit, bool, 1, _) \
+ V(HasIndexedInterceptorBit, bool, 1, _) \
+ V(IsUndetectableBit, bool, 1, _) \
+ V(IsAccessCheckNeededBit, bool, 1, _) \
+ V(IsConstructorBit, bool, 1, _) \
+ V(HasPrototypeSlotBit, bool, 1, _)
+
+ DEFINE_BIT_FIELDS(MAP_BIT_FIELD_FIELDS)
+#undef MAP_BIT_FIELD_FIELDS
+ //
// Bit field 2.
- inline byte bit_field2() const;
- inline void set_bit_field2(byte value);
+ //
+ DECL_PRIMITIVE_ACCESSORS(bit_field2, byte)
+// Bit positions for |bit_field2|.
+#define MAP_BIT_FIELD2_FIELDS(V, _) \
+ /* One bit is still free here. */ \
+ V(IsExtensibleBit, bool, 1, _) \
+ V(IsPrototypeMapBit, bool, 1, _) \
+ V(ElementsKindBits, ElementsKind, 5, _)
+
+ DEFINE_BIT_FIELDS(MAP_BIT_FIELD2_FIELDS)
+#undef MAP_BIT_FIELD2_FIELDS
+
+ //
// Bit field 3.
- inline uint32_t bit_field3() const;
- inline void set_bit_field3(uint32_t bits);
-
- class EnumLengthBits : public BitField<int, 0, kDescriptorIndexBitCount> {
- }; // NOLINT
- class NumberOfOwnDescriptorsBits
- : public BitField<int, kDescriptorIndexBitCount,
- kDescriptorIndexBitCount> {}; // NOLINT
- STATIC_ASSERT(kDescriptorIndexBitCount + kDescriptorIndexBitCount == 20);
- class DictionaryMap : public BitField<bool, 20, 1> {};
- class OwnsDescriptors : public BitField<bool, 21, 1> {};
- class HasHiddenPrototype : public BitField<bool, 22, 1> {};
- class Deprecated : public BitField<bool, 23, 1> {};
- class IsUnstable : public BitField<bool, 24, 1> {};
- class IsMigrationTarget : public BitField<bool, 25, 1> {};
- class ImmutablePrototype : public BitField<bool, 26, 1> {};
- class NewTargetIsBase : public BitField<bool, 27, 1> {};
- class MayHaveInterestingSymbols : public BitField<bool, 28, 1> {};
+ //
+ DECL_PRIMITIVE_ACCESSORS(bit_field3, uint32_t)
+
+// Bit positions for |bit_field3|.
+#define MAP_BIT_FIELD3_FIELDS(V, _) \
+ V(EnumLengthBits, int, kDescriptorIndexBitCount, _) \
+ V(NumberOfOwnDescriptorsBits, int, kDescriptorIndexBitCount, _) \
+ V(IsDictionaryMapBit, bool, 1, _) \
+ V(OwnsDescriptorsBit, bool, 1, _) \
+ V(HasHiddenPrototypeBit, bool, 1, _) \
+ V(IsDeprecatedBit, bool, 1, _) \
+ V(IsUnstableBit, bool, 1, _) \
+ V(IsMigrationTargetBit, bool, 1, _) \
+ V(IsImmutablePrototypeBit, bool, 1, _) \
+ V(NewTargetIsBaseBit, bool, 1, _) \
+ V(MayHaveInterestingSymbolsBit, bool, 1, _) \
+ V(ConstructionCounterBits, int, 3, _)
+
+ DEFINE_BIT_FIELDS(MAP_BIT_FIELD3_FIELDS)
+#undef MAP_BIT_FIELD3_FIELDS
STATIC_ASSERT(NumberOfOwnDescriptorsBits::kMax >= kMaxNumberOfDescriptors);
- // Keep this bit field at the very end for better code in
- // Builtins::kJSConstructStubGeneric stub.
- // This counter is used for in-object slack tracking.
- // The in-object slack tracking is considered enabled when the counter is
- // non zero. The counter only has a valid count for initial maps. For
- // transitioned maps only kNoSlackTracking has a meaning, namely that inobject
- // slack tracking already finished for the transition tree. Any other value
- // indicates that either inobject slack tracking is still in progress, or that
- // the map isn't part of the transition tree anymore.
- class ConstructionCounter : public BitField<int, 29, 3> {};
static const int kSlackTrackingCounterStart = 7;
static const int kSlackTrackingCounterEnd = 1;
static const int kNoSlackTracking = 0;
- STATIC_ASSERT(kSlackTrackingCounterStart <= ConstructionCounter::kMax);
+ STATIC_ASSERT(kSlackTrackingCounterStart <= ConstructionCounterBits::kMax);
// Inobject slack tracking is the way to reclaim unused inobject space.
//
@@ -310,8 +328,7 @@ class Map : public HeapObject {
// property is set to a value that is not a JSObject, the prototype
// property will not be used to create instances of the function.
// See ECMA-262, 13.2.2.
- inline void set_non_instance_prototype(bool value);
- inline bool has_non_instance_prototype() const;
+ DECL_BOOLEAN_ACCESSORS(has_non_instance_prototype)
// Tells whether the instance has a [[Construct]] internal method.
// This property is implemented according to ES6, section 7.2.4.
@@ -329,12 +346,10 @@ class Map : public HeapObject {
DECL_BOOLEAN_ACCESSORS(has_hidden_prototype)
// Records and queries whether the instance has a named interceptor.
- inline void set_has_named_interceptor();
- inline bool has_named_interceptor() const;
+ DECL_BOOLEAN_ACCESSORS(has_named_interceptor)
// Records and queries whether the instance has an indexed interceptor.
- inline void set_has_indexed_interceptor();
- inline bool has_indexed_interceptor() const;
+ DECL_BOOLEAN_ACCESSORS(has_indexed_interceptor)
// Tells whether the instance is undetectable.
// An undetectable object is a special class of JSObject: 'typeof' operator
@@ -342,21 +357,18 @@ class Map : public HeapObject {
// a normal JS object. It is useful for implementing undetectable
// document.all in Firefox & Safari.
// See https://bugzilla.mozilla.org/show_bug.cgi?id=248549.
- inline void set_is_undetectable();
- inline bool is_undetectable() const;
+ DECL_BOOLEAN_ACCESSORS(is_undetectable)
// Tells whether the instance has a [[Call]] internal method.
// This property is implemented according to ES6, section 7.2.3.
- inline void set_is_callable();
- inline bool is_callable() const;
+ DECL_BOOLEAN_ACCESSORS(is_callable)
DECL_BOOLEAN_ACCESSORS(new_target_is_base)
DECL_BOOLEAN_ACCESSORS(is_extensible)
DECL_BOOLEAN_ACCESSORS(is_prototype_map)
inline bool is_abandoned_prototype_map() const;
- inline void set_elements_kind(ElementsKind elements_kind);
- inline ElementsKind elements_kind() const;
+ DECL_PRIMITIVE_ACCESSORS(elements_kind, ElementsKind)
// Tells whether the instance has fast elements that are only Smis.
inline bool has_fast_smi_elements() const;
@@ -409,6 +421,8 @@ class Map : public HeapObject {
static const int kPrototypeChainValid = 0;
static const int kPrototypeChainInvalid = 1;
+ static bool IsPrototypeChainInvalidated(Map* map);
+
// Return the map of the root of object's prototype chain.
Map* GetPrototypeChainRootMap(Isolate* isolate) const;
@@ -489,13 +503,11 @@ class Map : public HeapObject {
// normalized objects, ie objects for which HasFastProperties returns false).
// A map can never be used for both dictionary mode and fast mode JSObjects.
// False by default and for HeapObjects that are not JSObjects.
- inline void set_dictionary_map(bool value);
- inline bool is_dictionary_map() const;
+ DECL_BOOLEAN_ACCESSORS(is_dictionary_map)
// Tells whether the instance needs security checks when accessing its
// properties.
- inline void set_is_access_check_needed(bool access_check_needed);
- inline bool is_access_check_needed() const;
+ DECL_BOOLEAN_ACCESSORS(is_access_check_needed)
// [prototype]: implicit prototype object.
DECL_ACCESSORS(prototype, Object)
@@ -563,15 +575,24 @@ class Map : public HeapObject {
inline void SetEnumLength(int length);
DECL_BOOLEAN_ACCESSORS(owns_descriptors)
+
inline void mark_unstable();
inline bool is_stable() const;
- inline void set_migration_target(bool value);
- inline bool is_migration_target() const;
- inline void set_immutable_proto(bool value);
- inline bool is_immutable_proto() const;
+
+ DECL_BOOLEAN_ACCESSORS(is_migration_target)
+
+ DECL_BOOLEAN_ACCESSORS(is_immutable_proto)
+
+ // This counter is used for in-object slack tracking.
+ // The in-object slack tracking is considered enabled when the counter is
+ // non zero. The counter only has a valid count for initial maps. For
+ // transitioned maps only kNoSlackTracking has a meaning, namely that inobject
+ // slack tracking already finished for the transition tree. Any other value
+ // indicates that either inobject slack tracking is still in progress, or that
+ // the map isn't part of the transition tree anymore.
DECL_INT_ACCESSORS(construction_counter)
- inline void deprecate();
- inline bool is_deprecated() const;
+
+ DECL_BOOLEAN_ACCESSORS(is_deprecated)
inline bool CanBeDeprecated() const;
// Returns a non-deprecated version of the input. If the input was not
// deprecated, it is directly returned. Otherwise, the non-deprecated version
@@ -759,22 +780,6 @@ class Map : public HeapObject {
STATIC_ASSERT(kInstanceTypeOffset == Internals::kMapInstanceTypeOffset);
- // Bit positions for bit field.
- static const int kHasNonInstancePrototype = 0;
- static const int kIsCallable = 1;
- static const int kHasNamedInterceptor = 2;
- static const int kHasIndexedInterceptor = 3;
- static const int kIsUndetectable = 4;
- static const int kIsAccessCheckNeeded = 5;
- static const int kIsConstructor = 6;
- static const int kHasPrototypeSlot = 7;
-
- // Bit positions for bit field 2
- static const int kIsExtensible = 0;
- // Bit 1 is free.
- class IsPrototypeMapBits : public BitField<bool, 2, 1> {};
- class ElementsKindBits : public BitField<ElementsKind, 3, 5> {};
-
typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
kPointerFieldsEndOffset, kSize>
BodyDescriptor;
diff --git a/deps/v8/src/objects/module.cc b/deps/v8/src/objects/module.cc
index 4040d05bca..b9d7697fb5 100644
--- a/deps/v8/src/objects/module.cc
+++ b/deps/v8/src/objects/module.cc
@@ -199,29 +199,69 @@ void Module::SetStatus(Status new_status) {
set_status(new_status);
}
+void Module::ResetGraph(Handle<Module> module) {
+ DCHECK_NE(module->status(), kInstantiating);
+ DCHECK_NE(module->status(), kEvaluating);
+ if (module->status() != kPreInstantiating) return;
+ Isolate* isolate = module->GetIsolate();
+ Handle<FixedArray> requested_modules(module->requested_modules(), isolate);
+ Reset(module);
+ for (int i = 0; i < requested_modules->length(); ++i) {
+ Handle<Object> descendant(requested_modules->get(i), isolate);
+ if (descendant->IsModule()) {
+ ResetGraph(Handle<Module>::cast(descendant));
+ } else {
+ DCHECK(descendant->IsUndefined(isolate));
+ }
+ }
+}
+
+void Module::Reset(Handle<Module> module) {
+ Isolate* isolate = module->GetIsolate();
+ Factory* factory = isolate->factory();
+
+ DCHECK(module->status() == kPreInstantiating ||
+ module->status() == kInstantiating);
+ DCHECK(module->exception()->IsTheHole(isolate));
+ DCHECK(module->import_meta()->IsTheHole(isolate));
+ // The namespace object cannot exist, because it would have been created
+ // by RunInitializationCode, which is called only after this module's SCC
+ // succeeds instantiation.
+ DCHECK(!module->module_namespace()->IsJSModuleNamespace());
+
+ Handle<ObjectHashTable> exports =
+ ObjectHashTable::New(isolate, module->info()->RegularExportCount());
+ Handle<FixedArray> regular_exports =
+ factory->NewFixedArray(module->regular_exports()->length());
+ Handle<FixedArray> regular_imports =
+ factory->NewFixedArray(module->regular_imports()->length());
+ Handle<FixedArray> requested_modules =
+ factory->NewFixedArray(module->requested_modules()->length());
+
+ if (module->status() == kInstantiating) {
+ module->set_code(JSFunction::cast(module->code())->shared());
+ }
+#ifdef DEBUG
+ module->PrintStatusTransition(kUninstantiated);
+#endif // DEBUG
+ module->set_status(kUninstantiated);
+ module->set_exports(*exports);
+ module->set_regular_exports(*regular_exports);
+ module->set_regular_imports(*regular_imports);
+ module->set_requested_modules(*requested_modules);
+ module->set_dfs_index(-1);
+ module->set_dfs_ancestor_index(-1);
+}
+
void Module::RecordError() {
DisallowHeapAllocation no_alloc;
-
Isolate* isolate = GetIsolate();
+
+ DCHECK(exception()->IsTheHole(isolate));
Object* the_exception = isolate->pending_exception();
DCHECK(!the_exception->IsTheHole(isolate));
- switch (status()) {
- case Module::kUninstantiated:
- case Module::kPreInstantiating:
- case Module::kInstantiating:
- case Module::kEvaluating:
- break;
- case Module::kErrored:
- DCHECK_EQ(exception(), the_exception);
- return;
- default:
- UNREACHABLE();
- }
-
set_code(info());
-
- DCHECK(exception()->IsTheHole(isolate));
#ifdef DEBUG
PrintStatusTransition(Module::kErrored);
#endif // DEBUG
@@ -232,9 +272,8 @@ void Module::RecordError() {
Object* Module::GetException() {
DisallowHeapAllocation no_alloc;
DCHECK_EQ(status(), Module::kErrored);
- Object* the_exception = exception();
- DCHECK(!the_exception->IsTheHole(GetIsolate()));
- return the_exception;
+ DCHECK(!exception()->IsTheHole(GetIsolate()));
+ return exception();
}
MaybeHandle<Cell> Module::ResolveImport(Handle<Module> module,
@@ -244,29 +283,25 @@ MaybeHandle<Cell> Module::ResolveImport(Handle<Module> module,
Isolate* isolate = module->GetIsolate();
Handle<Module> requested_module(
Module::cast(module->requested_modules()->get(module_request)), isolate);
- MaybeHandle<Cell> result = Module::ResolveExport(requested_module, name, loc,
- must_resolve, resolve_set);
- if (isolate->has_pending_exception()) {
- DCHECK(result.is_null());
- if (must_resolve) module->RecordError();
- // If {must_resolve} is false and there's an exception, then either that
- // exception was already recorded where it happened, or it's the
- // kAmbiguousExport exception (see ResolveExportUsingStarExports) and the
- // culprit module is still to be determined.
- }
+ Handle<String> specifier(
+ String::cast(module->info()->module_requests()->get(module_request)),
+ isolate);
+ MaybeHandle<Cell> result = Module::ResolveExport(
+ requested_module, specifier, name, loc, must_resolve, resolve_set);
+ DCHECK_IMPLIES(isolate->has_pending_exception(), result.is_null());
return result;
}
MaybeHandle<Cell> Module::ResolveExport(Handle<Module> module,
- Handle<String> name,
+ Handle<String> module_specifier,
+ Handle<String> export_name,
MessageLocation loc, bool must_resolve,
Module::ResolveSet* resolve_set) {
- DCHECK_NE(module->status(), kErrored);
- DCHECK_NE(module->status(), kEvaluating);
DCHECK_GE(module->status(), kPreInstantiating);
+ DCHECK_NE(module->status(), kEvaluating);
Isolate* isolate = module->GetIsolate();
- Handle<Object> object(module->exports()->Lookup(name), isolate);
+ Handle<Object> object(module->exports()->Lookup(export_name), isolate);
if (object->IsCell()) {
// Already resolved (e.g. because it's a local export).
return Handle<Cell>::cast(object);
@@ -282,17 +317,18 @@ MaybeHandle<Cell> Module::ResolveExport(Handle<Module> module,
Zone* zone = resolve_set->zone();
name_set =
new (zone->New(sizeof(UnorderedStringSet))) UnorderedStringSet(zone);
- } else if (name_set->count(name)) {
+ } else if (name_set->count(export_name)) {
// Cycle detected.
if (must_resolve) {
return isolate->Throw<Cell>(
isolate->factory()->NewSyntaxError(
- MessageTemplate::kCyclicModuleDependency, name),
+ MessageTemplate::kCyclicModuleDependency, export_name,
+ module_specifier),
&loc);
}
return MaybeHandle<Cell>();
}
- name_set->insert(name);
+ name_set->insert(export_name);
}
if (object->IsModuleInfoEntry()) {
@@ -313,23 +349,24 @@ MaybeHandle<Cell> Module::ResolveExport(Handle<Module> module,
// The export table may have changed but the entry in question should be
// unchanged.
Handle<ObjectHashTable> exports(module->exports(), isolate);
- DCHECK(exports->Lookup(name)->IsModuleInfoEntry());
+ DCHECK(exports->Lookup(export_name)->IsModuleInfoEntry());
- exports = ObjectHashTable::Put(exports, name, cell);
+ exports = ObjectHashTable::Put(exports, export_name, cell);
module->set_exports(*exports);
return cell;
}
DCHECK(object->IsTheHole(isolate));
- return Module::ResolveExportUsingStarExports(module, name, loc, must_resolve,
- resolve_set);
+ return Module::ResolveExportUsingStarExports(
+ module, module_specifier, export_name, loc, must_resolve, resolve_set);
}
MaybeHandle<Cell> Module::ResolveExportUsingStarExports(
- Handle<Module> module, Handle<String> name, MessageLocation loc,
- bool must_resolve, Module::ResolveSet* resolve_set) {
+ Handle<Module> module, Handle<String> module_specifier,
+ Handle<String> export_name, MessageLocation loc, bool must_resolve,
+ Module::ResolveSet* resolve_set) {
Isolate* isolate = module->GetIsolate();
- if (!name->Equals(isolate->heap()->default_string())) {
+ if (!export_name->Equals(isolate->heap()->default_string())) {
// Go through all star exports looking for the given name. If multiple star
// exports provide the name, make sure they all map it to the same cell.
Handle<Cell> unique_cell;
@@ -346,15 +383,15 @@ MaybeHandle<Cell> Module::ResolveExportUsingStarExports(
MessageLocation new_loc(script, entry->beg_pos(), entry->end_pos());
Handle<Cell> cell;
- if (ResolveImport(module, name, entry->module_request(), new_loc, false,
- resolve_set)
+ if (ResolveImport(module, export_name, entry->module_request(), new_loc,
+ false, resolve_set)
.ToHandle(&cell)) {
if (unique_cell.is_null()) unique_cell = cell;
if (*unique_cell != *cell) {
- return isolate->Throw<Cell>(
- isolate->factory()->NewSyntaxError(
- MessageTemplate::kAmbiguousExport, name),
- &loc);
+ return isolate->Throw<Cell>(isolate->factory()->NewSyntaxError(
+ MessageTemplate::kAmbiguousExport,
+ module_specifier, export_name),
+ &loc);
}
} else if (isolate->has_pending_exception()) {
return MaybeHandle<Cell>();
@@ -364,8 +401,8 @@ MaybeHandle<Cell> Module::ResolveExportUsingStarExports(
if (!unique_cell.is_null()) {
// Found a unique star export for this name.
Handle<ObjectHashTable> exports(module->exports(), isolate);
- DCHECK(exports->Lookup(name)->IsTheHole(isolate));
- exports = ObjectHashTable::Put(exports, name, unique_cell);
+ DCHECK(exports->Lookup(export_name)->IsTheHole(isolate));
+ exports = ObjectHashTable::Put(exports, export_name, unique_cell);
module->set_exports(*exports);
return unique_cell;
}
@@ -373,9 +410,10 @@ MaybeHandle<Cell> Module::ResolveExportUsingStarExports(
// Unresolvable.
if (must_resolve) {
- return isolate->Throw<Cell>(isolate->factory()->NewSyntaxError(
- MessageTemplate::kUnresolvableExport, name),
- &loc);
+ return isolate->Throw<Cell>(
+ isolate->factory()->NewSyntaxError(MessageTemplate::kUnresolvableExport,
+ module_specifier, export_name),
+ &loc);
}
return MaybeHandle<Cell>();
}
@@ -393,27 +431,24 @@ bool Module::Instantiate(Handle<Module> module, v8::Local<v8::Context> context,
}
#endif // DEBUG
- Isolate* isolate = module->GetIsolate();
- if (module->status() == kErrored) {
- isolate->Throw(module->GetException());
- return false;
- }
-
if (!PrepareInstantiate(module, context, callback)) {
+ ResetGraph(module);
return false;
}
+ Isolate* isolate = module->GetIsolate();
Zone zone(isolate->allocator(), ZONE_NAME);
ZoneForwardList<Handle<Module>> stack(&zone);
unsigned dfs_index = 0;
if (!FinishInstantiate(module, &stack, &dfs_index, &zone)) {
for (auto& descendant : stack) {
- descendant->RecordError();
+ Reset(descendant);
}
- DCHECK_EQ(module->GetException(), isolate->pending_exception());
+ DCHECK_EQ(module->status(), kUninstantiated);
return false;
}
- DCHECK(module->status() == kInstantiated || module->status() == kEvaluated);
+ DCHECK(module->status() == kInstantiated || module->status() == kEvaluated ||
+ module->status() == kErrored);
DCHECK(stack.empty());
return true;
}
@@ -421,7 +456,6 @@ bool Module::Instantiate(Handle<Module> module, v8::Local<v8::Context> context,
bool Module::PrepareInstantiate(Handle<Module> module,
v8::Local<v8::Context> context,
v8::Module::ResolveCallback callback) {
- DCHECK_NE(module->status(), kErrored);
DCHECK_NE(module->status(), kEvaluating);
DCHECK_NE(module->status(), kInstantiating);
if (module->status() >= kPreInstantiating) return true;
@@ -439,17 +473,9 @@ bool Module::PrepareInstantiate(Handle<Module> module,
v8::Utils::ToLocal(module))
.ToLocal(&api_requested_module)) {
isolate->PromoteScheduledException();
- module->RecordError();
return false;
}
Handle<Module> requested_module = Utils::OpenHandle(*api_requested_module);
- if (requested_module->status() == kErrored) {
- // TODO(neis): Move this into callback?
- isolate->Throw(requested_module->GetException());
- module->RecordError();
- DCHECK_EQ(module->GetException(), requested_module->GetException());
- return false;
- }
requested_modules->set(i, *requested_module);
}
@@ -458,8 +484,6 @@ bool Module::PrepareInstantiate(Handle<Module> module,
Handle<Module> requested_module(Module::cast(requested_modules->get(i)),
isolate);
if (!PrepareInstantiate(requested_module, context, callback)) {
- module->RecordError();
- DCHECK_EQ(module->GetException(), requested_module->GetException());
return false;
}
}
@@ -531,7 +555,6 @@ void Module::MaybeTransitionComponent(Handle<Module> module,
bool Module::FinishInstantiate(Handle<Module> module,
ZoneForwardList<Handle<Module>>* stack,
unsigned* dfs_index, Zone* zone) {
- DCHECK_NE(module->status(), kErrored);
DCHECK_NE(module->status(), kEvaluating);
if (module->status() >= kInstantiating) return true;
DCHECK_EQ(module->status(), kPreInstantiating);
@@ -560,7 +583,6 @@ bool Module::FinishInstantiate(Handle<Module> module,
return false;
}
- DCHECK_NE(requested_module->status(), kErrored);
DCHECK_NE(requested_module->status(), kEvaluating);
DCHECK_GE(requested_module->status(), kInstantiating);
SLOW_DCHECK(
@@ -606,8 +628,8 @@ bool Module::FinishInstantiate(Handle<Module> module,
if (name->IsUndefined(isolate)) continue; // Star export.
MessageLocation loc(script, entry->beg_pos(), entry->end_pos());
ResolveSet resolve_set(zone);
- if (ResolveExport(module, Handle<String>::cast(name), loc, true,
- &resolve_set)
+ if (ResolveExport(module, Handle<String>(), Handle<String>::cast(name), loc,
+ true, &resolve_set)
.is_null()) {
return false;
}
@@ -722,7 +744,6 @@ namespace {
void FetchStarExports(Handle<Module> module, Zone* zone,
UnorderedModuleSet* visited) {
- DCHECK_NE(module->status(), Module::kErrored);
DCHECK_GE(module->status(), Module::kInstantiating);
if (module->module_namespace()->IsJSModuleNamespace()) return; // Shortcut.
diff --git a/deps/v8/src/objects/module.h b/deps/v8/src/objects/module.h
index 7680f55313..fe374d3fc6 100644
--- a/deps/v8/src/objects/module.h
+++ b/deps/v8/src/objects/module.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_MODULE_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -153,15 +154,17 @@ class Module : public Struct {
// exception (so check manually!).
class ResolveSet;
static MUST_USE_RESULT MaybeHandle<Cell> ResolveExport(
- Handle<Module> module, Handle<String> name, MessageLocation loc,
- bool must_resolve, ResolveSet* resolve_set);
+ Handle<Module> module, Handle<String> module_specifier,
+ Handle<String> export_name, MessageLocation loc, bool must_resolve,
+ ResolveSet* resolve_set);
static MUST_USE_RESULT MaybeHandle<Cell> ResolveImport(
Handle<Module> module, Handle<String> name, int module_request,
MessageLocation loc, bool must_resolve, ResolveSet* resolve_set);
static MUST_USE_RESULT MaybeHandle<Cell> ResolveExportUsingStarExports(
- Handle<Module> module, Handle<String> name, MessageLocation loc,
- bool must_resolve, ResolveSet* resolve_set);
+ Handle<Module> module, Handle<String> module_specifier,
+ Handle<String> export_name, MessageLocation loc, bool must_resolve,
+ ResolveSet* resolve_set);
static MUST_USE_RESULT bool PrepareInstantiate(
Handle<Module> module, v8::Local<v8::Context> context,
@@ -179,6 +182,11 @@ class Module : public Struct {
ZoneForwardList<Handle<Module>>* stack,
Status new_status);
+ // Set module's status back to kUninstantiated and reset other internal state.
+ // This is used when instantiation fails.
+ static void Reset(Handle<Module> module);
+ static void ResetGraph(Handle<Module> module);
+
// To set status to kErrored, RecordError should be used.
void SetStatus(Status status);
void RecordError();
diff --git a/deps/v8/src/objects/object-macros.h b/deps/v8/src/objects/object-macros.h
index 5d367d351f..604942a272 100644
--- a/deps/v8/src/objects/object-macros.h
+++ b/deps/v8/src/objects/object-macros.h
@@ -54,8 +54,9 @@
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
set_condition) \
type* holder::name() const { \
+ type* value = type::cast(READ_FIELD(this, offset)); \
DCHECK(get_condition); \
- return type::cast(READ_FIELD(this, offset)); \
+ return value; \
} \
void holder::set_##name(type* value, WriteBarrierMode mode) { \
DCHECK(set_condition); \
diff --git a/deps/v8/src/objects/scope-info.h b/deps/v8/src/objects/scope-info.h
index b03b1e831e..3a8459a204 100644
--- a/deps/v8/src/objects/scope-info.h
+++ b/deps/v8/src/objects/scope-info.h
@@ -7,6 +7,7 @@
#include "src/globals.h"
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
#include "src/utils.h"
// Has to be the last include (doesn't have include guards):
@@ -306,12 +307,14 @@ class ScopeInfo : public FixedArray {
class HasSimpleParametersField
: public BitField<bool, AsmModuleField::kNext, 1> {};
class FunctionKindField
- : public BitField<FunctionKind, HasSimpleParametersField::kNext, 10> {};
+ : public BitField<FunctionKind, HasSimpleParametersField::kNext, 11> {};
class HasOuterScopeInfoField
: public BitField<bool, FunctionKindField::kNext, 1> {};
class IsDebugEvaluateScopeField
: public BitField<bool, HasOuterScopeInfoField::kNext, 1> {};
+ STATIC_ASSERT(kLastFunctionKind <= FunctionKindField::kMax);
+
// Properties of variables.
class VariableModeField : public BitField<VariableMode, 0, 3> {};
class InitFlagField : public BitField<InitializationFlag, 3, 1> {};
diff --git a/deps/v8/src/objects/script-inl.h b/deps/v8/src/objects/script-inl.h
index 2544b4e20e..c5bd407628 100644
--- a/deps/v8/src/objects/script-inl.h
+++ b/deps/v8/src/objects/script-inl.h
@@ -26,7 +26,8 @@ ACCESSORS(Script, context_data, Object, kContextOffset)
ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
SMI_ACCESSORS(Script, type, kTypeOffset)
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
-ACCESSORS_CHECKED(Script, eval_from_shared, Object, kEvalFromSharedOffset,
+ACCESSORS_CHECKED(Script, eval_from_shared_or_wrapped_arguments, Object,
+ kEvalFromSharedOrWrappedArgumentsOffset,
this->type() != TYPE_WASM)
SMI_ACCESSORS_CHECKED(Script, eval_from_position, kEvalFromPositionOffset,
this->type() != TYPE_WASM)
@@ -35,9 +36,39 @@ SMI_ACCESSORS(Script, flags, kFlagsOffset)
ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
ACCESSORS(Script, host_defined_options, FixedArray, kHostDefinedOptionsOffset)
-ACCESSORS_CHECKED(Script, wasm_compiled_module, Object, kEvalFromSharedOffset,
+ACCESSORS_CHECKED(Script, wasm_compiled_module, Object,
+ kEvalFromSharedOrWrappedArgumentsOffset,
this->type() == TYPE_WASM)
+bool Script::is_wrapped() const {
+ return eval_from_shared_or_wrapped_arguments()->IsFixedArray();
+}
+
+bool Script::has_eval_from_shared() const {
+ return eval_from_shared_or_wrapped_arguments()->IsSharedFunctionInfo();
+}
+
+void Script::set_eval_from_shared(SharedFunctionInfo* shared,
+ WriteBarrierMode mode) {
+ DCHECK(!is_wrapped());
+ set_eval_from_shared_or_wrapped_arguments(shared, mode);
+}
+
+SharedFunctionInfo* Script::eval_from_shared() const {
+ DCHECK(has_eval_from_shared());
+ return SharedFunctionInfo::cast(eval_from_shared_or_wrapped_arguments());
+}
+
+void Script::set_wrapped_arguments(FixedArray* value, WriteBarrierMode mode) {
+ DCHECK(!has_eval_from_shared());
+ set_eval_from_shared_or_wrapped_arguments(value, mode);
+}
+
+FixedArray* Script::wrapped_arguments() const {
+ DCHECK(is_wrapped());
+ return FixedArray::cast(eval_from_shared_or_wrapped_arguments());
+}
+
Script::CompilationType Script::compilation_type() {
return BooleanBit::get(flags(), kCompilationTypeBit) ? COMPILATION_TYPE_EVAL
: COMPILATION_TYPE_HOST;
diff --git a/deps/v8/src/objects/script.h b/deps/v8/src/objects/script.h
index ae4a87914d..4d84be2262 100644
--- a/deps/v8/src/objects/script.h
+++ b/deps/v8/src/objects/script.h
@@ -6,6 +6,7 @@
#define V8_OBJECTS_SCRIPT_H_
#include "src/objects.h"
+#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@@ -62,9 +63,21 @@ class Script : public Struct {
// [line_ends]: FixedArray of line ends positions.
DECL_ACCESSORS(line_ends, Object)
+ DECL_ACCESSORS(eval_from_shared_or_wrapped_arguments, Object)
+
// [eval_from_shared]: for eval scripts the shared function info for the
// function from which eval was called.
- DECL_ACCESSORS(eval_from_shared, Object)
+ DECL_ACCESSORS(eval_from_shared, SharedFunctionInfo)
+
+ // [wrapped_arguments]: for the list of arguments in a wrapped script.
+ DECL_ACCESSORS(wrapped_arguments, FixedArray)
+
+ // Whether the script is implicitly wrapped in a function.
+ inline bool is_wrapped() const;
+
+ // Whether the eval_from_shared field is set with a shared function info
+ // for the eval site.
+ inline bool has_eval_from_shared() const;
// [eval_from_position]: the source position in the code for the function
// from which eval was called, as positive integer. Or the code offset in the
@@ -118,6 +131,9 @@ class Script : public Struct {
// Retrieve source position from where eval was called.
int GetEvalPosition();
+ // Check if the script contains any Asm modules.
+ bool ContainsAsmModule();
+
// Init line_ends array with source code positions of line ends.
static void InitLineEnds(Handle<Script> script);
@@ -186,9 +202,10 @@ class Script : public Struct {
static const int kTypeOffset = kWrapperOffset + kPointerSize;
static const int kLineEndsOffset = kTypeOffset + kPointerSize;
static const int kIdOffset = kLineEndsOffset + kPointerSize;
- static const int kEvalFromSharedOffset = kIdOffset + kPointerSize;
+ static const int kEvalFromSharedOrWrappedArgumentsOffset =
+ kIdOffset + kPointerSize;
static const int kEvalFromPositionOffset =
- kEvalFromSharedOffset + kPointerSize;
+ kEvalFromSharedOrWrappedArgumentsOffset + kPointerSize;
static const int kSharedFunctionInfosOffset =
kEvalFromPositionOffset + kPointerSize;
static const int kFlagsOffset = kSharedFunctionInfosOffset + kPointerSize;
diff --git a/deps/v8/src/objects/shared-function-info-inl.h b/deps/v8/src/objects/shared-function-info-inl.h
index 0c35933950..57a72754b5 100644
--- a/deps/v8/src/objects/shared-function-info-inl.h
+++ b/deps/v8/src/objects/shared-function-info-inl.h
@@ -80,6 +80,8 @@ AbstractCode* SharedFunctionInfo::abstract_code() {
}
}
+BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints, is_wrapped,
+ SharedFunctionInfo::IsWrappedBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
SharedFunctionInfo::AllowLazyCompilationBit)
BIT_FIELD_ACCESSORS(SharedFunctionInfo, compiler_hints,
diff --git a/deps/v8/src/objects/shared-function-info.h b/deps/v8/src/objects/shared-function-info.h
index a43c2a12b7..8e996042c0 100644
--- a/deps/v8/src/objects/shared-function-info.h
+++ b/deps/v8/src/objects/shared-function-info.h
@@ -254,7 +254,7 @@ class SharedFunctionInfo : public HeapObject {
String* DebugName();
// The function cannot cause any side effects.
- bool HasNoSideEffect();
+ static bool HasNoSideEffect(Handle<SharedFunctionInfo> info);
// Used for flags such as --turbo-filter.
bool PassesFilter(const char* raw_filter);
@@ -288,6 +288,9 @@ class SharedFunctionInfo : public HeapObject {
inline LanguageMode language_mode();
inline void set_language_mode(LanguageMode language_mode);
+ // Indicates whether the source is implicitly wrapped in a function.
+ DECL_BOOLEAN_ACCESSORS(is_wrapped)
+
// True if the function has any duplicated parameter names.
DECL_BOOLEAN_ACCESSORS(has_duplicate_parameters)
@@ -336,8 +339,8 @@ class SharedFunctionInfo : public HeapObject {
// [source code]: Source code for the function.
bool HasSourceCode() const;
- Handle<Object> GetSourceCode();
- Handle<Object> GetSourceCodeHarmony();
+ static Handle<Object> GetSourceCode(Handle<SharedFunctionInfo> shared);
+ static Handle<Object> GetSourceCodeHarmony(Handle<SharedFunctionInfo> shared);
// Tells whether this function should be subject to debugging.
inline bool IsSubjectToDebugging();
@@ -465,22 +468,25 @@ class SharedFunctionInfo : public HeapObject {
#define COMPILER_HINTS_BIT_FIELDS(V, _) \
V(IsNativeBit, bool, 1, _) \
V(IsStrictBit, bool, 1, _) \
- V(FunctionKindBits, FunctionKind, 10, _) \
+ V(IsWrappedBit, bool, 1, _) \
+ V(FunctionKindBits, FunctionKind, 11, _) \
V(HasDuplicateParametersBit, bool, 1, _) \
V(AllowLazyCompilationBit, bool, 1, _) \
V(NeedsHomeObjectBit, bool, 1, _) \
V(IsDeclarationBit, bool, 1, _) \
V(IsAsmWasmBrokenBit, bool, 1, _) \
V(FunctionMapIndexBits, int, 5, _) \
- V(DisabledOptimizationReasonBits, BailoutReason, 7, _) \
+ V(DisabledOptimizationReasonBits, BailoutReason, 4, _) \
V(RequiresInstanceFieldsInitializer, bool, 1, _)
DEFINE_BIT_FIELDS(COMPILER_HINTS_BIT_FIELDS)
#undef COMPILER_HINTS_BIT_FIELDS
// Bailout reasons must fit in the DisabledOptimizationReason bitfield.
- STATIC_ASSERT(kLastErrorMessage <= DisabledOptimizationReasonBits::kMax);
+ STATIC_ASSERT(BailoutReason::kLastErrorMessage <=
+ DisabledOptimizationReasonBits::kMax);
+ STATIC_ASSERT(kLastFunctionKind <= FunctionKindBits::kMax);
// Masks for checking if certain FunctionKind bits are set without fully
// decoding of the FunctionKind bit field.
static const int kClassConstructorMask = FunctionKind::kClassConstructor
diff --git a/deps/v8/src/objects/string-inl.h b/deps/v8/src/objects/string-inl.h
index dd75210a54..9b64444de2 100644
--- a/deps/v8/src/objects/string-inl.h
+++ b/deps/v8/src/objects/string-inl.h
@@ -525,11 +525,42 @@ void ConsString::set_second(String* value, WriteBarrierMode mode) {
ACCESSORS(ThinString, actual, String, kActualOffset);
+HeapObject* ThinString::unchecked_actual() const {
+ return reinterpret_cast<HeapObject*>(READ_FIELD(this, kActualOffset));
+}
+
bool ExternalString::is_short() {
InstanceType type = map()->instance_type();
return (type & kShortExternalStringMask) == kShortExternalStringTag;
}
+Address ExternalString::resource_as_address() {
+ return *reinterpret_cast<Address*>(FIELD_ADDR(this, kResourceOffset));
+}
+
+void ExternalString::set_address_as_resource(Address address) {
+ DCHECK(IsAligned(reinterpret_cast<intptr_t>(address), kPointerSize));
+ *reinterpret_cast<Address*>(FIELD_ADDR(this, kResourceOffset)) = address;
+ if (IsExternalOneByteString()) {
+ ExternalOneByteString::cast(this)->update_data_cache();
+ } else {
+ ExternalTwoByteString::cast(this)->update_data_cache();
+ }
+}
+
+uint32_t ExternalString::resource_as_uint32() {
+ return static_cast<uint32_t>(
+ *reinterpret_cast<uintptr_t*>(FIELD_ADDR(this, kResourceOffset)));
+}
+
+void ExternalString::set_uint32_as_resource(uint32_t value) {
+ *reinterpret_cast<uintptr_t*>(FIELD_ADDR(this, kResourceOffset)) = value;
+ if (is_short()) return;
+ const char** data_field =
+ reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
+ *data_field = nullptr;
+}
+
const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
}
diff --git a/deps/v8/src/objects/string.h b/deps/v8/src/objects/string.h
index f21171d62f..066fc6d879 100644
--- a/deps/v8/src/objects/string.h
+++ b/deps/v8/src/objects/string.h
@@ -631,6 +631,7 @@ class ThinString : public String {
public:
// Actual string that this ThinString refers to.
inline String* actual() const;
+ inline HeapObject* unchecked_actual() const;
inline void set_actual(String* s,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
@@ -718,6 +719,12 @@ class ExternalString : public String {
// Return whether external string is short (data pointer is not cached).
inline bool is_short();
+ // Used in the serializer/deserializer.
+ inline Address resource_as_address();
+ inline void set_address_as_resource(Address address);
+ inline uint32_t resource_as_uint32();
+ inline void set_uint32_as_resource(uint32_t value);
+
STATIC_ASSERT(kResourceOffset == Internals::kStringResourceOffset);
private: