summaryrefslogtreecommitdiff
path: root/deps/v8/src/codegen/code-stub-assembler.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/codegen/code-stub-assembler.h')
-rw-r--r--deps/v8/src/codegen/code-stub-assembler.h391
1 files changed, 200 insertions, 191 deletions
diff --git a/deps/v8/src/codegen/code-stub-assembler.h b/deps/v8/src/codegen/code-stub-assembler.h
index 207eb509e1..00a84c3926 100644
--- a/deps/v8/src/codegen/code-stub-assembler.h
+++ b/deps/v8/src/codegen/code-stub-assembler.h
@@ -10,9 +10,8 @@
#include "src/base/macros.h"
#include "src/codegen/bailout-reason.h"
#include "src/common/globals.h"
+#include "src/common/message-template.h"
#include "src/compiler/code-assembler.h"
-#include "src/execution/frames.h"
-#include "src/execution/message-template.h"
#include "src/objects/arguments.h"
#include "src/objects/bigint.h"
#include "src/objects/objects.h"
@@ -39,7 +38,6 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
PromiseSpeciesProtector) \
V(TypedArraySpeciesProtector, typed_array_species_protector, \
TypedArraySpeciesProtector) \
- V(RegExpSpeciesProtector, regexp_species_protector, RegExpSpeciesProtector)
#define HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(V) \
V(AccessorInfoMap, accessor_info_map, AccessorInfoMap) \
@@ -111,59 +109,45 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
#endif
#ifdef DEBUG
-// Add stringified versions to the given values, except the first. That is,
-// transform
-// x, a, b, c, d, e, f
-// to
-// a, "a", b, "b", c, "c", d, "d", e, "e", f, "f"
-//
-// __VA_ARGS__ is ignored to allow the caller to pass through too many
-// parameters, and the first element is ignored to support having no extra
-// values without empty __VA_ARGS__ (which cause all sorts of problems with
-// extra commas).
-#define CSA_ASSERT_STRINGIFY_EXTRA_VALUES_5(_, v1, v2, v3, v4, v5, ...) \
- v1, #v1, v2, #v2, v3, #v3, v4, #v4, v5, #v5
-
-// Stringify the given variable number of arguments. The arguments are trimmed
-// to 5 if there are too many, and padded with nullptr if there are not enough.
-#define CSA_ASSERT_STRINGIFY_EXTRA_VALUES(...) \
- CSA_ASSERT_STRINGIFY_EXTRA_VALUES_5(__VA_ARGS__, nullptr, nullptr, nullptr, \
- nullptr, nullptr)
-
-#define CSA_ASSERT_GET_FIRST(x, ...) (x)
-#define CSA_ASSERT_GET_FIRST_STR(x, ...) #x
+// CSA_ASSERT_ARGS generates an
+// std::initializer_list<CodeStubAssembler::ExtraNode> from __VA_ARGS__. It
+// currently supports between 0 and 2 arguments.
+
+// clang-format off
+#define CSA_ASSERT_0_ARGS(...) {}
+#define CSA_ASSERT_1_ARG(a, ...) {{a, #a}}
+#define CSA_ASSERT_2_ARGS(a, b, ...) {{a, #a}, {b, #b}}
+// clang-format on
+#define SWITCH_CSA_ASSERT_ARGS(dummy, a, b, FUNC, ...) FUNC(a, b)
+#define CSA_ASSERT_ARGS(...) \
+ SWITCH_CSA_ASSERT_ARGS(dummy, ##__VA_ARGS__, CSA_ASSERT_2_ARGS, \
+ CSA_ASSERT_1_ARG, CSA_ASSERT_0_ARGS)
// CSA_ASSERT(csa, <condition>, <extra values to print...>)
-// We have to jump through some hoops to allow <extra values to print...> to be
-// empty.
-#define CSA_ASSERT(csa, ...) \
- (csa)->Assert( \
- [&]() -> compiler::Node* { \
- return implicit_cast<compiler::SloppyTNode<Word32T>>( \
- EXPAND(CSA_ASSERT_GET_FIRST(__VA_ARGS__))); \
- }, \
- EXPAND(CSA_ASSERT_GET_FIRST_STR(__VA_ARGS__)), __FILE__, __LINE__, \
- CSA_ASSERT_STRINGIFY_EXTRA_VALUES(__VA_ARGS__))
+#define CSA_ASSERT(csa, condition_node, ...) \
+ (csa)->Assert( \
+ [&]() -> compiler::Node* { \
+ return implicit_cast<compiler::SloppyTNode<Word32T>>(condition_node); \
+ }, \
+ #condition_node, __FILE__, __LINE__, CSA_ASSERT_ARGS(__VA_ARGS__))
// CSA_ASSERT_BRANCH(csa, [](Label* ok, Label* not_ok) {...},
// <extra values to print...>)
-#define CSA_ASSERT_BRANCH(csa, ...) \
- (csa)->Assert(EXPAND(CSA_ASSERT_GET_FIRST(__VA_ARGS__)), \
- EXPAND(CSA_ASSERT_GET_FIRST_STR(__VA_ARGS__)), __FILE__, \
- __LINE__, CSA_ASSERT_STRINGIFY_EXTRA_VALUES(__VA_ARGS__))
-
-#define CSA_ASSERT_JS_ARGC_OP(csa, Op, op, expected) \
- (csa)->Assert( \
- [&]() -> compiler::Node* { \
- compiler::Node* const argc = \
- (csa)->Parameter(Descriptor::kJSActualArgumentsCount); \
- return (csa)->Op(argc, (csa)->Int32Constant(expected)); \
- }, \
- "argc " #op " " #expected, __FILE__, __LINE__, \
- SmiFromInt32((csa)->Parameter(Descriptor::kJSActualArgumentsCount)), \
- "argc")
+#define CSA_ASSERT_BRANCH(csa, gen, ...) \
+ (csa)->Assert(gen, #gen, __FILE__, __LINE__, CSA_ASSERT_ARGS(__VA_ARGS__))
+
+#define CSA_ASSERT_JS_ARGC_OP(csa, Op, op, expected) \
+ (csa)->Assert( \
+ [&]() -> compiler::Node* { \
+ compiler::Node* const argc = \
+ (csa)->Parameter(Descriptor::kJSActualArgumentsCount); \
+ return (csa)->Op(argc, (csa)->Int32Constant(expected)); \
+ }, \
+ "argc " #op " " #expected, __FILE__, __LINE__, \
+ {{SmiFromInt32((csa)->Parameter(Descriptor::kJSActualArgumentsCount)), \
+ "argc"}})
#define CSA_ASSERT_JS_ARGC_EQ(csa, expected) \
CSA_ASSERT_JS_ARGC_OP(csa, Word32Equal, ==, expected)
@@ -490,21 +474,21 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Int32T> SmiToInt32(SloppyTNode<Smi> value);
// Smi operations.
-#define SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName, Int32OpName) \
- TNode<Smi> SmiOpName(TNode<Smi> a, TNode<Smi> b) { \
- if (SmiValuesAre32Bits()) { \
- return BitcastWordToTaggedSigned( \
- IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b))); \
- } else { \
- DCHECK(SmiValuesAre31Bits()); \
- if (kSystemPointerSize == kInt64Size) { \
- CSA_ASSERT(this, IsValidSmi(a)); \
- CSA_ASSERT(this, IsValidSmi(b)); \
- } \
- return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr( \
- Int32OpName(TruncateIntPtrToInt32(BitcastTaggedToWord(a)), \
- TruncateIntPtrToInt32(BitcastTaggedToWord(b))))); \
- } \
+#define SMI_ARITHMETIC_BINOP(SmiOpName, IntPtrOpName, Int32OpName) \
+ TNode<Smi> SmiOpName(TNode<Smi> a, TNode<Smi> b) { \
+ if (SmiValuesAre32Bits()) { \
+ return BitcastWordToTaggedSigned(IntPtrOpName( \
+ BitcastTaggedSignedToWord(a), BitcastTaggedSignedToWord(b))); \
+ } else { \
+ DCHECK(SmiValuesAre31Bits()); \
+ if (kSystemPointerSize == kInt64Size) { \
+ CSA_ASSERT(this, IsValidSmi(a)); \
+ CSA_ASSERT(this, IsValidSmi(b)); \
+ } \
+ return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr( \
+ Int32OpName(TruncateIntPtrToInt32(BitcastTaggedSignedToWord(a)), \
+ TruncateIntPtrToInt32(BitcastTaggedSignedToWord(b))))); \
+ } \
}
SMI_ARITHMETIC_BINOP(SmiAdd, IntPtrAdd, Int32Add)
SMI_ARITHMETIC_BINOP(SmiSub, IntPtrSub, Int32Sub)
@@ -523,19 +507,20 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Smi> TrySmiSub(TNode<Smi> a, TNode<Smi> b, Label* if_overflow);
TNode<Smi> SmiShl(TNode<Smi> a, int shift) {
- return BitcastWordToTaggedSigned(WordShl(BitcastTaggedToWord(a), shift));
+ return BitcastWordToTaggedSigned(
+ WordShl(BitcastTaggedSignedToWord(a), shift));
}
TNode<Smi> SmiShr(TNode<Smi> a, int shift) {
return BitcastWordToTaggedSigned(
- WordAnd(WordShr(BitcastTaggedToWord(a), shift),
- BitcastTaggedToWord(SmiConstant(-1))));
+ WordAnd(WordShr(BitcastTaggedSignedToWord(a), shift),
+ BitcastTaggedSignedToWord(SmiConstant(-1))));
}
TNode<Smi> SmiSar(TNode<Smi> a, int shift) {
return BitcastWordToTaggedSigned(
- WordAnd(WordSar(BitcastTaggedToWord(a), shift),
- BitcastTaggedToWord(SmiConstant(-1))));
+ WordAnd(WordSar(BitcastTaggedSignedToWord(a), shift),
+ BitcastTaggedSignedToWord(SmiConstant(-1))));
}
Node* WordOrSmiShl(Node* a, int shift, ParameterMode mode) {
@@ -556,19 +541,20 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
}
}
-#define SMI_COMPARISON_OP(SmiOpName, IntPtrOpName, Int32OpName) \
- TNode<BoolT> SmiOpName(TNode<Smi> a, TNode<Smi> b) { \
- if (SmiValuesAre32Bits()) { \
- return IntPtrOpName(BitcastTaggedToWord(a), BitcastTaggedToWord(b)); \
- } else { \
- DCHECK(SmiValuesAre31Bits()); \
- if (kSystemPointerSize == kInt64Size) { \
- CSA_ASSERT(this, IsValidSmi(a)); \
- CSA_ASSERT(this, IsValidSmi(b)); \
- } \
- return Int32OpName(TruncateIntPtrToInt32(BitcastTaggedToWord(a)), \
- TruncateIntPtrToInt32(BitcastTaggedToWord(b))); \
- } \
+#define SMI_COMPARISON_OP(SmiOpName, IntPtrOpName, Int32OpName) \
+ TNode<BoolT> SmiOpName(TNode<Smi> a, TNode<Smi> b) { \
+ if (SmiValuesAre32Bits()) { \
+ return IntPtrOpName(BitcastTaggedSignedToWord(a), \
+ BitcastTaggedSignedToWord(b)); \
+ } else { \
+ DCHECK(SmiValuesAre31Bits()); \
+ if (kSystemPointerSize == kInt64Size) { \
+ CSA_ASSERT(this, IsValidSmi(a)); \
+ CSA_ASSERT(this, IsValidSmi(b)); \
+ } \
+ return Int32OpName(TruncateIntPtrToInt32(BitcastTaggedSignedToWord(a)), \
+ TruncateIntPtrToInt32(BitcastTaggedSignedToWord(b))); \
+ } \
}
SMI_COMPARISON_OP(SmiEqual, WordEqual, Word32Equal)
SMI_COMPARISON_OP(SmiNotEqual, WordNotEqual, Word32NotEqual)
@@ -626,43 +612,22 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
using BranchGenerator = std::function<void(Label*, Label*)>;
using NodeGenerator = std::function<Node*()>;
-
- void Assert(const BranchGenerator& branch, const char* message = nullptr,
- const char* file = nullptr, int line = 0,
- Node* extra_node1 = nullptr, const char* extra_node1_name = "",
- Node* extra_node2 = nullptr, const char* extra_node2_name = "",
- Node* extra_node3 = nullptr, const char* extra_node3_name = "",
- Node* extra_node4 = nullptr, const char* extra_node4_name = "",
- Node* extra_node5 = nullptr, const char* extra_node5_name = "");
- void Assert(const NodeGenerator& condition_body,
- const char* message = nullptr, const char* file = nullptr,
- int line = 0, Node* extra_node1 = nullptr,
- const char* extra_node1_name = "", Node* extra_node2 = nullptr,
- const char* extra_node2_name = "", Node* extra_node3 = nullptr,
- const char* extra_node3_name = "", Node* extra_node4 = nullptr,
- const char* extra_node4_name = "", Node* extra_node5 = nullptr,
- const char* extra_node5_name = "");
- void Check(const BranchGenerator& branch, const char* message = nullptr,
- const char* file = nullptr, int line = 0,
- Node* extra_node1 = nullptr, const char* extra_node1_name = "",
- Node* extra_node2 = nullptr, const char* extra_node2_name = "",
- Node* extra_node3 = nullptr, const char* extra_node3_name = "",
- Node* extra_node4 = nullptr, const char* extra_node4_name = "",
- Node* extra_node5 = nullptr, const char* extra_node5_name = "");
- void Check(const NodeGenerator& condition_body, const char* message = nullptr,
- const char* file = nullptr, int line = 0,
- Node* extra_node1 = nullptr, const char* extra_node1_name = "",
- Node* extra_node2 = nullptr, const char* extra_node2_name = "",
- Node* extra_node3 = nullptr, const char* extra_node3_name = "",
- Node* extra_node4 = nullptr, const char* extra_node4_name = "",
- Node* extra_node5 = nullptr, const char* extra_node5_name = "");
- void FailAssert(
- const char* message = nullptr, const char* file = nullptr, int line = 0,
- Node* extra_node1 = nullptr, const char* extra_node1_name = "",
- Node* extra_node2 = nullptr, const char* extra_node2_name = "",
- Node* extra_node3 = nullptr, const char* extra_node3_name = "",
- Node* extra_node4 = nullptr, const char* extra_node4_name = "",
- Node* extra_node5 = nullptr, const char* extra_node5_name = "");
+ using ExtraNode = std::pair<Node*, const char*>;
+
+ void Assert(const BranchGenerator& branch, const char* message,
+ const char* file, int line,
+ std::initializer_list<ExtraNode> extra_nodes = {});
+ void Assert(const NodeGenerator& condition_body, const char* message,
+ const char* file, int line,
+ std::initializer_list<ExtraNode> extra_nodes = {});
+ void Check(const BranchGenerator& branch, const char* message,
+ const char* file, int line,
+ std::initializer_list<ExtraNode> extra_nodes = {});
+ void Check(const NodeGenerator& condition_body, const char* message,
+ const char* file, int line,
+ std::initializer_list<ExtraNode> extra_nodes = {});
+ void FailAssert(const char* message, const char* file, int line,
+ std::initializer_list<ExtraNode> extra_nodes = {});
void FastCheck(TNode<BoolT> condition);
@@ -794,6 +759,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// otherwise goes to {if_false}.
void BranchIfToBooleanIsTrue(Node* value, Label* if_true, Label* if_false);
+ // Branches to {if_false} if ToBoolean applied to {value} yields false,
+ // otherwise goes to {if_true}.
+ void BranchIfToBooleanIsFalse(Node* value, Label* if_false, Label* if_true) {
+ BranchIfToBooleanIsTrue(value, if_true, if_false);
+ }
+
void BranchIfJSReceiver(Node* object, Label* if_true, Label* if_false);
// Branches to {if_true} when --force-slow-path flag has been passed.
@@ -811,8 +782,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
MachineType type = MachineType::AnyTagged());
// Load an object pointer from a buffer that isn't in the heap.
- Node* LoadBufferObject(Node* buffer, int offset,
- MachineType type = MachineType::AnyTagged());
+ Node* LoadBufferObject(Node* buffer, int offset, MachineType type);
+ TNode<Object> LoadBufferObject(TNode<RawPtrT> buffer, int offset) {
+ return CAST(LoadBufferObject(buffer, offset, MachineType::AnyTagged()));
+ }
TNode<RawPtrT> LoadBufferPointer(TNode<RawPtrT> buffer, int offset) {
return UncheckedCast<RawPtrT>(
LoadBufferObject(buffer, offset, MachineType::Pointer()));
@@ -887,15 +860,19 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
std::is_convertible<TNode<T>, TNode<Object>>::value,
int>::type = 0>
TNode<T> LoadReference(Reference reference) {
- return CAST(LoadFromObject(MachineTypeOf<T>::value, reference.object,
- reference.offset));
+ TNode<IntPtrT> offset =
+ IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
+ return CAST(
+ LoadFromObject(MachineTypeOf<T>::value, reference.object, offset));
}
template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
int>::type = 0>
TNode<T> LoadReference(Reference reference) {
- return UncheckedCast<T>(LoadFromObject(MachineTypeOf<T>::value,
- reference.object, reference.offset));
+ TNode<IntPtrT> offset =
+ IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
+ return UncheckedCast<T>(
+ LoadFromObject(MachineTypeOf<T>::value, reference.object, offset));
}
template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<Object>>::value,
@@ -908,15 +885,18 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
} else if (std::is_same<T, Map>::value) {
write_barrier = StoreToObjectWriteBarrier::kMap;
}
- StoreToObject(rep, reference.object, reference.offset, value,
- write_barrier);
+ TNode<IntPtrT> offset =
+ IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
+ StoreToObject(rep, reference.object, offset, value, write_barrier);
}
template <class T, typename std::enable_if<
std::is_convertible<TNode<T>, TNode<UntaggedT>>::value,
int>::type = 0>
void StoreReference(Reference reference, TNode<T> value) {
- StoreToObject(MachineRepresentationOf<T>::value, reference.object,
- reference.offset, value, StoreToObjectWriteBarrier::kNone);
+ TNode<IntPtrT> offset =
+ IntPtrSub(reference.offset, IntPtrConstant(kHeapObjectTag));
+ StoreToObject(MachineRepresentationOf<T>::value, reference.object, offset,
+ value, StoreToObjectWriteBarrier::kNone);
}
// Tag a smi and store it.
@@ -927,7 +907,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Load the Map of an HeapObject.
TNode<Map> LoadMap(SloppyTNode<HeapObject> object);
// Load the instance type of an HeapObject.
- TNode<Int32T> LoadInstanceType(SloppyTNode<HeapObject> object);
+ TNode<Uint16T> LoadInstanceType(SloppyTNode<HeapObject> object);
// Compare the instance the type of the object against the provided one.
TNode<BoolT> HasInstanceType(SloppyTNode<HeapObject> object,
InstanceType type);
@@ -967,7 +947,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Load bit field 3 of a map.
TNode<Uint32T> LoadMapBitField3(SloppyTNode<Map> map);
// Load the instance type of a map.
- TNode<Int32T> LoadMapInstanceType(SloppyTNode<Map> map);
+ TNode<Uint16T> LoadMapInstanceType(SloppyTNode<Map> map);
// Load the ElementsKind of a map.
TNode<Int32T> LoadMapElementsKind(SloppyTNode<Map> map);
TNode<Int32T> LoadElementsKind(SloppyTNode<HeapObject> object);
@@ -1023,8 +1003,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Uint32T> LoadStringLengthAsWord32(SloppyTNode<String> string);
// Loads a pointer to the sequential String char array.
Node* PointerToSeqStringData(Node* seq_string);
- // Load value field of a JSValue object.
- Node* LoadJSValueValue(Node* object);
+ // Load value field of a JSPrimitiveWrapper object.
+ Node* LoadJSPrimitiveWrapperValue(Node* object);
// Figures out whether the value of maybe_object is:
// - a SMI (jump to "if_smi", "extracted" will be the SMI value)
@@ -1076,8 +1056,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Array is any array-like type that has a fixed header followed by
// tagged elements.
- template <typename Array>
- TNode<MaybeObject> LoadArrayElement(
+ template <typename Array, typename T = MaybeObject>
+ TNode<T> LoadArrayElement(
TNode<Array> array, int array_header_size, Node* index,
int additional_offset = 0,
ParameterMode parameter_mode = INTPTR_PARAMETERS,
@@ -1232,15 +1212,23 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Float64T> LoadDoubleWithHoleCheck(
SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
MachineType machine_type = MachineType::Float64());
- Node* LoadFixedTypedArrayElementAsTagged(
- Node* data_pointer, Node* index_node, ElementsKind elements_kind,
+ TNode<Numeric> LoadFixedTypedArrayElementAsTagged(
+ TNode<RawPtrT> data_pointer, Node* index_node, ElementsKind elements_kind,
ParameterMode parameter_mode = INTPTR_PARAMETERS);
TNode<Numeric> LoadFixedTypedArrayElementAsTagged(
- TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind);
+ TNode<RawPtrT> data_pointer, TNode<Smi> index_node,
+ ElementsKind elements_kind) {
+ return LoadFixedTypedArrayElementAsTagged(data_pointer, index_node,
+ elements_kind, SMI_PARAMETERS);
+ }
+ TNode<Numeric> LoadFixedTypedArrayElementAsTagged(
+ TNode<RawPtrT> data_pointer, TNode<Smi> index,
+ TNode<Int32T> elements_kind);
// Parts of the above, factored out for readability:
- Node* LoadFixedBigInt64ArrayElementAsTagged(Node* data_pointer, Node* offset);
- Node* LoadFixedBigUint64ArrayElementAsTagged(Node* data_pointer,
- Node* offset);
+ TNode<BigInt> LoadFixedBigInt64ArrayElementAsTagged(
+ SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset);
+ TNode<BigInt> LoadFixedBigUint64ArrayElementAsTagged(
+ SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset);
// 64-bit platforms only:
TNode<BigInt> BigIntFromInt64(TNode<IntPtrT> value);
TNode<BigInt> BigIntFromUint64(TNode<UintPtrT> value);
@@ -1250,10 +1238,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
void StoreJSTypedArrayElementFromTagged(TNode<Context> context,
TNode<JSTypedArray> typed_array,
- TNode<Object> index_node,
+ TNode<Smi> index_node,
TNode<Object> value,
- ElementsKind elements_kind,
- ParameterMode parameter_mode);
+ ElementsKind elements_kind);
// Context manipulation
TNode<Object> LoadContextElement(SloppyTNode<Context> context,
@@ -1534,10 +1521,15 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Like above, but allowing custom bitfield initialization.
TNode<BigInt> AllocateRawBigInt(TNode<IntPtrT> length);
void StoreBigIntBitfield(TNode<BigInt> bigint, TNode<Word32T> bitfield);
- void StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
+ void StoreBigIntDigit(TNode<BigInt> bigint, intptr_t digit_index,
+ TNode<UintPtrT> digit);
+ void StoreBigIntDigit(TNode<BigInt> bigint, TNode<IntPtrT> digit_index,
TNode<UintPtrT> digit);
+
TNode<Word32T> LoadBigIntBitfield(TNode<BigInt> bigint);
- TNode<UintPtrT> LoadBigIntDigit(TNode<BigInt> bigint, int digit_index);
+ TNode<UintPtrT> LoadBigIntDigit(TNode<BigInt> bigint, intptr_t digit_index);
+ TNode<UintPtrT> LoadBigIntDigit(TNode<BigInt> bigint,
+ TNode<IntPtrT> digit_index);
// Allocate a ByteArray with the given length.
TNode<ByteArray> AllocateByteArray(TNode<UintPtrT> length,
@@ -1573,9 +1565,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<NameDictionary> AllocateNameDictionary(int at_least_space_for);
TNode<NameDictionary> AllocateNameDictionary(
- TNode<IntPtrT> at_least_space_for);
+ TNode<IntPtrT> at_least_space_for, AllocationFlags = kNone);
TNode<NameDictionary> AllocateNameDictionaryWithCapacity(
- TNode<IntPtrT> capacity);
+ TNode<IntPtrT> capacity, AllocationFlags = kNone);
TNode<NameDictionary> CopyNameDictionary(TNode<NameDictionary> dictionary,
Label* large_object_fallback);
@@ -1604,9 +1596,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
void InitializeStructBody(Node* object, Node* map, Node* size,
int start_offset = Struct::kHeaderSize);
- Node* AllocateJSObjectFromMap(
- Node* map, Node* properties = nullptr, Node* elements = nullptr,
- AllocationFlags flags = kNone,
+ TNode<JSObject> AllocateJSObjectFromMap(
+ SloppyTNode<Map> map, SloppyTNode<HeapObject> properties = nullptr,
+ SloppyTNode<FixedArray> elements = nullptr, AllocationFlags flags = kNone,
SlackTrackingMode slack_tracking_mode = kNoSlackTracking);
void InitializeJSObjectFromMap(
@@ -1696,6 +1688,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
fixed_array_map);
}
+ TNode<Map> GetStructMap(InstanceType instance_type);
+
TNode<FixedArray> AllocateUninitializedFixedArray(intptr_t capacity) {
return UncheckedCast<FixedArray>(AllocateFixedArray(
PACKED_ELEMENTS, IntPtrConstant(capacity), AllocationFlag::kNone));
@@ -1745,7 +1739,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Object> object,
IterationKind mode);
- Node* AllocateJSIteratorResult(Node* context, Node* value, Node* done);
+ TNode<JSObject> AllocateJSIteratorResult(SloppyTNode<Context> context,
+ SloppyTNode<Object> value,
+ SloppyTNode<Oddball> done);
Node* AllocateJSIteratorResultForEntry(Node* context, Node* key, Node* value);
TNode<JSReceiver> ArraySpeciesCreate(TNode<Context> context,
@@ -1934,6 +1930,15 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
SMI_PARAMETERS);
}
+ TNode<FixedArray> ExtractFixedArray(
+ TNode<FixedArray> source, TNode<IntPtrT> first, TNode<IntPtrT> count,
+ TNode<IntPtrT> capacity,
+ ExtractFixedArrayFlags extract_flags =
+ ExtractFixedArrayFlag::kAllFixedArrays) {
+ return CAST(ExtractFixedArray(source, first, count, capacity, extract_flags,
+ INTPTR_PARAMETERS));
+ }
+
// Copy a portion of an existing FixedArray or FixedDoubleArray into a new
// FixedArray, including special appropriate handling for COW arrays.
// * |source| is either a FixedArray or FixedDoubleArray from which to copy
@@ -2043,6 +2048,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Smi> CalculateNewElementsCapacity(TNode<Smi> old_capacity) {
return CAST(CalculateNewElementsCapacity(old_capacity, SMI_PARAMETERS));
}
+ TNode<IntPtrT> CalculateNewElementsCapacity(TNode<IntPtrT> old_capacity) {
+ return UncheckedCast<IntPtrT>(
+ CalculateNewElementsCapacity(old_capacity, INTPTR_PARAMETERS));
+ }
// Tries to grow the |elements| array of given |object| to store the |key|
// or bails out if the growing gap is too big. Returns new elements.
@@ -2086,19 +2095,21 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
Label* if_bigint, Variable* var_bigint, Variable* var_feedback);
// Truncate the floating point value of a HeapNumber to an Int32.
- Node* TruncateHeapNumberValueToWord32(Node* object);
+ TNode<Int32T> TruncateHeapNumberValueToWord32(TNode<HeapNumber> object);
// Conversions.
- void TryHeapNumberToSmi(TNode<HeapNumber> number, TVariable<Smi>& output,
+ void TryHeapNumberToSmi(TNode<HeapNumber> number,
+ TVariable<Smi>& output, // NOLINT(runtime/references)
Label* if_smi);
- void TryFloat64ToSmi(TNode<Float64T> number, TVariable<Smi>& output,
+ void TryFloat64ToSmi(TNode<Float64T> number,
+ TVariable<Smi>& output, // NOLINT(runtime/references)
Label* if_smi);
TNode<Number> ChangeFloat64ToTagged(SloppyTNode<Float64T> value);
TNode<Number> ChangeInt32ToTagged(SloppyTNode<Int32T> value);
TNode<Number> ChangeUint32ToTagged(SloppyTNode<Uint32T> value);
TNode<Number> ChangeUintPtrToTagged(TNode<UintPtrT> value);
TNode<Uint32T> ChangeNumberToUint32(TNode<Number> value);
- TNode<Float64T> ChangeNumberToFloat64(SloppyTNode<Number> value);
+ TNode<Float64T> ChangeNumberToFloat64(TNode<Number> value);
TNode<UintPtrT> TryNumberToUintPtr(TNode<Number> value, Label* if_negative);
TNode<UintPtrT> ChangeNonnegativeNumberToUintPtr(TNode<Number> value) {
return TryNumberToUintPtr(value, nullptr);
@@ -2145,10 +2156,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
}
// Throws a TypeError for {method_name} if {value} is neither of the given
- // {primitive_type} nor a JSValue wrapping a value of {primitive_type}, or
- // returns the {value} (or wrapped value) otherwise.
- Node* ToThisValue(Node* context, Node* value, PrimitiveType primitive_type,
- char const* method_name);
+ // {primitive_type} nor a JSPrimitiveWrapper wrapping a value of
+ // {primitive_type}, or returns the {value} (or wrapped value) otherwise.
+ TNode<Object> ToThisValue(TNode<Context> context, TNode<Object> value,
+ PrimitiveType primitive_type,
+ char const* method_name);
// Throws a TypeError for {method_name} if {value} is not of the given
// instance type. Returns {value}'s map.
@@ -2231,6 +2243,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsJSFunction(SloppyTNode<HeapObject> object);
TNode<BoolT> IsJSGeneratorObject(SloppyTNode<HeapObject> object);
TNode<BoolT> IsJSGlobalProxyInstanceType(SloppyTNode<Int32T> instance_type);
+ TNode<BoolT> IsJSGlobalProxyMap(SloppyTNode<Map> map);
TNode<BoolT> IsJSGlobalProxy(SloppyTNode<HeapObject> object);
TNode<BoolT> IsJSObjectInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsJSObjectMap(SloppyTNode<Map> map);
@@ -2246,9 +2259,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsJSTypedArrayInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsJSTypedArrayMap(SloppyTNode<Map> map);
TNode<BoolT> IsJSTypedArray(SloppyTNode<HeapObject> object);
- TNode<BoolT> IsJSValueInstanceType(SloppyTNode<Int32T> instance_type);
- TNode<BoolT> IsJSValueMap(SloppyTNode<Map> map);
- TNode<BoolT> IsJSValue(SloppyTNode<HeapObject> object);
+ TNode<BoolT> IsJSPrimitiveWrapperInstanceType(
+ SloppyTNode<Int32T> instance_type);
+ TNode<BoolT> IsJSPrimitiveWrapperMap(SloppyTNode<Map> map);
+ TNode<BoolT> IsJSPrimitiveWrapper(SloppyTNode<HeapObject> object);
TNode<BoolT> IsMap(SloppyTNode<HeapObject> object);
TNode<BoolT> IsMutableHeapNumber(SloppyTNode<HeapObject> object);
TNode<BoolT> IsName(SloppyTNode<HeapObject> object);
@@ -2260,6 +2274,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsOneByteStringInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsPrimitiveInstanceType(SloppyTNode<Int32T> instance_type);
TNode<BoolT> IsPrivateSymbol(SloppyTNode<HeapObject> object);
+ TNode<BoolT> IsPrivateName(SloppyTNode<Symbol> symbol);
TNode<BoolT> IsPromiseCapability(SloppyTNode<HeapObject> object);
TNode<BoolT> IsPropertyArray(SloppyTNode<HeapObject> object);
TNode<BoolT> IsPropertyCell(SloppyTNode<HeapObject> object);
@@ -2305,7 +2320,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsPromiseThenProtectorCellInvalid();
TNode<BoolT> IsArraySpeciesProtectorCellInvalid();
TNode<BoolT> IsTypedArraySpeciesProtectorCellInvalid();
- TNode<BoolT> IsRegExpSpeciesProtectorCellInvalid();
+ TNode<BoolT> IsRegExpSpeciesProtectorCellInvalid(
+ TNode<Context> native_context);
TNode<BoolT> IsPromiseSpeciesProtectorCellInvalid();
TNode<BoolT> IsMockArrayBufferAllocatorFlag() {
@@ -2355,7 +2371,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
return Word32Equal(a, b);
}
bool ElementsKindEqual(ElementsKind a, ElementsKind b) { return a == b; }
- Node* IsFastElementsKind(Node* elements_kind);
+ TNode<BoolT> IsFastElementsKind(TNode<Int32T> elements_kind);
bool IsFastElementsKind(ElementsKind kind) {
return v8::internal::IsFastElementsKind(kind);
}
@@ -2366,12 +2382,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
bool IsDoubleElementsKind(ElementsKind kind) {
return v8::internal::IsDoubleElementsKind(kind);
}
- Node* IsFastSmiOrTaggedElementsKind(Node* elements_kind);
- Node* IsFastSmiElementsKind(Node* elements_kind);
- Node* IsHoleyFastElementsKind(Node* elements_kind);
- Node* IsHoleyFastElementsKindForRead(Node* elements_kind);
- Node* IsElementsKindGreaterThan(Node* target_kind,
- ElementsKind reference_kind);
+ TNode<BoolT> IsFastSmiOrTaggedElementsKind(TNode<Int32T> elements_kind);
+ TNode<BoolT> IsFastSmiElementsKind(SloppyTNode<Int32T> elements_kind);
+ TNode<BoolT> IsHoleyFastElementsKind(TNode<Int32T> elements_kind);
+ TNode<BoolT> IsHoleyFastElementsKindForRead(TNode<Int32T> elements_kind);
+ TNode<BoolT> IsElementsKindGreaterThan(TNode<Int32T> target_kind,
+ ElementsKind reference_kind);
TNode<BoolT> IsElementsKindLessThanOrEqual(TNode<Int32T> target_kind,
ElementsKind reference_kind);
// Check if reference_kind_a <= target_kind <= reference_kind_b
@@ -2413,8 +2429,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
Node* DerefIndirectString(TNode<String> string, TNode<Int32T> instance_type,
Label* cannot_deref);
- TNode<String> StringFromSingleCodePoint(TNode<Int32T> codepoint,
- UnicodeEncoding encoding);
+ TNode<String> StringFromSingleUTF16EncodedCodePoint(TNode<Int32T> codepoint);
// Type conversion helpers.
enum class BigIntHandling { kConvertToNumber, kThrow };
@@ -2578,7 +2593,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<BoolT> IsSetSmi(SloppyTNode<Smi> smi, int untagged_mask) {
intptr_t mask_word = bit_cast<intptr_t>(Smi::FromInt(untagged_mask));
return WordNotEqual(
- WordAnd(BitcastTaggedToWord(smi), IntPtrConstant(mask_word)),
+ WordAnd(BitcastTaggedSignedToWord(smi), IntPtrConstant(mask_word)),
IntPtrConstant(0));
}
@@ -2950,11 +2965,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// If it can't handle the case {receiver}/{key} case then the control goes
// to {if_bailout}.
// If {if_proxy} is nullptr, proxies go to if_bailout.
- void TryPrototypeChainLookup(Node* receiver, Node* key,
+ void TryPrototypeChainLookup(Node* receiver, Node* object, Node* key,
const LookupInHolder& lookup_property_in_holder,
const LookupInHolder& lookup_element_in_holder,
Label* if_end, Label* if_bailout,
- Label* if_proxy = nullptr);
+ Label* if_proxy);
// Instanceof helpers.
// Returns true if {object} has {prototype} somewhere in it's prototype
@@ -3055,7 +3070,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
void EmitElementStore(Node* object, Node* key, Node* value,
ElementsKind elements_kind,
KeyedAccessStoreMode store_mode, Label* bailout,
- Node* context);
+ Node* context,
+ Variable* maybe_converted_value = nullptr);
Node* CheckForCapacityGrow(Node* object, Node* elements, ElementsKind kind,
Node* length, Node* key, ParameterMode mode,
@@ -3204,8 +3220,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
Node* Equal(Node* lhs, Node* rhs, Node* context,
Variable* var_type_feedback = nullptr);
- Node* StrictEqual(Node* lhs, Node* rhs,
- Variable* var_type_feedback = nullptr);
+ TNode<Oddball> StrictEqual(SloppyTNode<Object> lhs, SloppyTNode<Object> rhs,
+ Variable* var_type_feedback = nullptr);
// ECMA#sec-samevalue
// Similar to StrictEqual except that NaNs are treated as equal and minus zero
@@ -3248,13 +3264,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Debug helpers
Node* IsDebugActive();
- TNode<BoolT> IsRuntimeCallStatsEnabled();
-
// JSArrayBuffer helpers
TNode<Uint32T> LoadJSArrayBufferBitField(TNode<JSArrayBuffer> array_buffer);
TNode<RawPtrT> LoadJSArrayBufferBackingStore(
TNode<JSArrayBuffer> array_buffer);
- Node* IsDetachedBuffer(Node* buffer);
+ TNode<BoolT> IsDetachedBuffer(TNode<JSArrayBuffer> buffer);
void ThrowIfArrayBufferIsDetached(SloppyTNode<Context> context,
TNode<JSArrayBuffer> array_buffer,
const char* method_name);
@@ -3301,12 +3315,6 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
Node* IsPromiseHookEnabledOrHasAsyncEventDelegate();
Node* IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate();
- // Helpers for StackFrame markers.
- Node* MarkerIsFrameType(Node* marker_or_function,
- StackFrame::Type frame_type);
- Node* MarkerIsNotFrameType(Node* marker_or_function,
- StackFrame::Type frame_type);
-
// for..in helpers
void CheckPrototypeEnumCache(Node* receiver, Node* receiver_map,
Label* if_fast, Label* if_slow);
@@ -3589,9 +3597,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
private:
// Low-level accessors for Descriptor arrays.
- TNode<MaybeObject> LoadDescriptorArrayElement(TNode<DescriptorArray> object,
- Node* index,
- int additional_offset = 0);
+ template <typename T>
+ TNode<T> LoadDescriptorArrayElement(TNode<DescriptorArray> object,
+ TNode<IntPtrT> index,
+ int additional_offset);
};
class V8_EXPORT_PRIVATE CodeStubArguments {