summaryrefslogtreecommitdiff
path: root/deps/v8/src/interpreter/interpreter-assembler.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/interpreter/interpreter-assembler.h')
-rw-r--r--deps/v8/src/interpreter/interpreter-assembler.h177
1 files changed, 110 insertions, 67 deletions
diff --git a/deps/v8/src/interpreter/interpreter-assembler.h b/deps/v8/src/interpreter/interpreter-assembler.h
index 63d1709145..cb622d0b2d 100644
--- a/deps/v8/src/interpreter/interpreter-assembler.h
+++ b/deps/v8/src/interpreter/interpreter-assembler.h
@@ -56,9 +56,6 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// Returns the smi immediate for bytecode operand |operand_index| in the
// current bytecode.
compiler::Node* BytecodeOperandImmSmi(int operand_index);
- // Returns the word-size sign-extended register index for bytecode operand
- // |operand_index| in the current bytecode.
- compiler::Node* BytecodeOperandReg(int operand_index);
// Returns the 32-bit unsigned runtime id immediate for bytecode operand
// |operand_index| in the current bytecode.
compiler::Node* BytecodeOperandRuntimeId(int operand_index);
@@ -86,31 +83,58 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
compiler::Node* depth, Label* target);
+ // A RegListNodePair provides an abstraction over lists of registers.
+ class RegListNodePair {
+ public:
+ RegListNodePair(Node* base_reg_location, Node* reg_count)
+ : base_reg_location_(base_reg_location), reg_count_(reg_count) {}
+
+ compiler::Node* reg_count() const { return reg_count_; }
+ compiler::Node* base_reg_location() const { return base_reg_location_; }
+
+ private:
+ compiler::Node* base_reg_location_;
+ compiler::Node* reg_count_;
+ };
+
// Backup/restore register file to/from a fixed array of the correct length.
compiler::Node* ExportRegisterFile(compiler::Node* array,
- compiler::Node* register_count);
+ const RegListNodePair& registers);
compiler::Node* ImportRegisterFile(compiler::Node* array,
- compiler::Node* register_count);
+ const RegListNodePair& registers);
// Loads from and stores to the interpreter register file.
compiler::Node* LoadRegister(Register reg);
- compiler::Node* LoadRegister(compiler::Node* reg_index);
compiler::Node* LoadAndUntagRegister(Register reg);
- compiler::Node* StoreRegister(compiler::Node* value, Register reg);
- compiler::Node* StoreRegister(compiler::Node* value,
- compiler::Node* reg_index);
- compiler::Node* StoreAndTagRegister(compiler::Node* value, Register reg);
-
- // Returns the next consecutive register.
- compiler::Node* NextRegister(compiler::Node* reg_index);
-
- // Returns the location in memory of the register |reg_index| in the
- // interpreter register file.
- compiler::Node* RegisterLocation(compiler::Node* reg_index);
-
+ compiler::Node* LoadRegisterAtOperandIndex(int operand_index);
+ std::pair<compiler::Node*, compiler::Node*> LoadRegisterPairAtOperandIndex(
+ int operand_index);
+ void StoreRegister(compiler::Node* value, Register reg);
+ void StoreAndTagRegister(compiler::Node* value, Register reg);
+ void StoreRegisterAtOperandIndex(compiler::Node* value, int operand_index);
+ void StoreRegisterPairAtOperandIndex(compiler::Node* value1,
+ compiler::Node* value2,
+ int operand_index);
+ void StoreRegisterTripleAtOperandIndex(compiler::Node* value1,
+ compiler::Node* value2,
+ compiler::Node* value3,
+ int operand_index);
+
+ RegListNodePair GetRegisterListAtOperandIndex(int operand_index);
+ Node* LoadRegisterFromRegisterList(const RegListNodePair& reg_list,
+ int index);
+ Node* RegisterLocationInRegisterList(const RegListNodePair& reg_list,
+ int index);
+
+ // Load constant at the index specified in operand |operand_index| from the
+ // constant pool.
+ compiler::Node* LoadConstantPoolEntryAtOperandIndex(int operand_index);
+ // Load and untag constant at the index specified in operand |operand_index|
+ // from the constant pool.
+ compiler::Node* LoadAndUntagConstantPoolEntryAtOperandIndex(
+ int operand_index);
// Load constant at |index| in the constant pool.
compiler::Node* LoadConstantPoolEntry(compiler::Node* index);
-
// Load and untag constant at |index| in the constant pool.
compiler::Node* LoadAndUntagConstantPoolEntry(compiler::Node* index);
@@ -135,12 +159,11 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
compiler::Node* feedback_vector,
compiler::Node* slot_id);
- // Call JSFunction or Callable |function| with |arg_count| arguments (not
- // including receiver) and the first argument located at |first_arg|, possibly
+ // Call JSFunction or Callable |function| with |args| arguments, possibly
// including the receiver depending on |receiver_mode|. After the call returns
// directly dispatches to the next bytecode.
void CallJSAndDispatch(compiler::Node* function, compiler::Node* context,
- compiler::Node* first_arg, compiler::Node* arg_count,
+ const RegListNodePair& args,
ConvertReceiverMode receiver_mode);
// Call JSFunction or Callable |function| with |arg_count| arguments (not
@@ -151,46 +174,41 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
void CallJSAndDispatch(Node* function, Node* context, Node* arg_count,
ConvertReceiverMode receiver_mode, TArgs... args);
- // Call JSFunction or Callable |function| with |arg_count|
- // arguments (not including receiver) and the first argument
- // located at |first_arg|, and the final argument being spread. After the call
- // returns directly dispatches to the next bytecode.
+ // Call JSFunction or Callable |function| with |args|
+ // arguments (not including receiver), and the final argument being spread.
+ // After the call returns directly dispatches to the next bytecode.
void CallJSWithSpreadAndDispatch(compiler::Node* function,
compiler::Node* context,
- compiler::Node* first_arg,
- compiler::Node* arg_count,
+ const RegListNodePair& args,
compiler::Node* slot_id,
compiler::Node* feedback_vector);
- // Call constructor |target| with |arg_count| arguments (not
- // including receiver) and the first argument located at
- // |first_arg|. The |new_target| is the same as the
- // |target| for the new keyword, but differs for the super
- // keyword.
+ // Call constructor |target| with |args| arguments (not including receiver).
+ // The |new_target| is the same as the |target| for the new keyword, but
+ // differs for the super keyword.
compiler::Node* Construct(compiler::Node* target, compiler::Node* context,
compiler::Node* new_target,
- compiler::Node* first_arg,
- compiler::Node* arg_count, compiler::Node* slot_id,
+ const RegListNodePair& args,
+ compiler::Node* slot_id,
compiler::Node* feedback_vector);
- // Call constructor |target| with |arg_count| arguments (not including
- // receiver) and the first argument located at |first_arg|. The last argument
- // is always a spread. The |new_target| is the same as the |target| for
- // the new keyword, but differs for the super keyword.
+ // Call constructor |target| with |args| arguments (not including
+ // receiver). The last argument is always a spread. The |new_target| is the
+ // same as the |target| for the new keyword, but differs for the super
+ // keyword.
compiler::Node* ConstructWithSpread(compiler::Node* target,
compiler::Node* context,
compiler::Node* new_target,
- compiler::Node* first_arg,
- compiler::Node* arg_count,
+ const RegListNodePair& args,
compiler::Node* slot_id,
compiler::Node* feedback_vector);
- // Call runtime function with |arg_count| arguments and the first argument
- // located at |first_arg|.
+ // Call runtime function with |args| arguments which will return |return_size|
+ // number of values.
compiler::Node* CallRuntimeN(compiler::Node* function_id,
compiler::Node* context,
- compiler::Node* first_arg,
- compiler::Node* arg_count, int return_size = 1);
+ const RegListNodePair& args,
+ int return_size = 1);
// Jump forward relative to the current bytecode by the |jump_offset|.
compiler::Node* Jump(compiler::Node* jump_offset);
@@ -217,14 +235,14 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// Dispatch to the bytecode.
compiler::Node* Dispatch();
- // Dispatch to bytecode handler.
- compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler) {
- return DispatchToBytecodeHandler(handler, BytecodeOffset());
- }
-
// Dispatch bytecode as wide operand variant.
void DispatchWide(OperandScale operand_scale);
+ // Dispatch to |target_bytecode| at |new_bytecode_offset|.
+ // |target_bytecode| should be equivalent to loading from the offset.
+ compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
+ compiler::Node* new_bytecode_offset);
+
// Abort with the given abort reason.
void Abort(AbortReason abort_reason);
void AbortIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
@@ -264,6 +282,18 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// interpreted.
compiler::Node* GetInterpretedFramePointer();
+ // Operations on registers.
+ compiler::Node* RegisterLocation(Register reg);
+ compiler::Node* RegisterLocation(compiler::Node* reg_index);
+ compiler::Node* NextRegister(compiler::Node* reg_index);
+ compiler::Node* LoadRegister(Node* reg_index);
+ void StoreRegister(compiler::Node* value, compiler::Node* reg_index);
+
+ // Poison |value| on speculative paths.
+ compiler::Node* PoisonOnSpeculationTagged(Node* value);
+ compiler::Node* PoisonOnSpeculationWord(Node* value);
+ compiler::Node* PoisonOnSpeculationInt32(Node* value);
+
// Saves and restores interpreter bytecode offset to the interpreter stack
// frame when performing a call.
void CallPrologue();
@@ -291,16 +321,21 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// The |result_type| determines the size and signedness. of the
// value read. This method should only be used on architectures that
// do not support unaligned memory accesses.
- compiler::Node* BytecodeOperandReadUnaligned(int relative_offset,
- MachineType result_type);
-
- // Returns zero- or sign-extended to word32 value of the operand.
- compiler::Node* BytecodeOperandUnsignedByte(int operand_index);
- compiler::Node* BytecodeOperandSignedByte(int operand_index);
- compiler::Node* BytecodeOperandUnsignedShort(int operand_index);
- compiler::Node* BytecodeOperandSignedShort(int operand_index);
- compiler::Node* BytecodeOperandUnsignedQuad(int operand_index);
- compiler::Node* BytecodeOperandSignedQuad(int operand_index);
+ compiler::Node* BytecodeOperandReadUnalignedUnpoisoned(
+ int relative_offset, MachineType result_type);
+
+ // Returns zero- or sign-extended to word32 value of the operand. Values are
+ // not poisoned on speculation - should be used with care.
+ compiler::Node* BytecodeOperandUnsignedByteUnpoisoned(int operand_index);
+ compiler::Node* BytecodeOperandSignedByteUnpoisoned(int operand_index);
+ compiler::Node* BytecodeOperandUnsignedShortUnpoisoned(int operand_index);
+ compiler::Node* BytecodeOperandSignedShortUnpoisoned(int operand_index);
+ compiler::Node* BytecodeOperandUnsignedQuadUnpoisoned(int operand_index);
+ compiler::Node* BytecodeOperandSignedQuadUnpoisoned(int operand_index);
+ compiler::Node* BytecodeSignedOperandUnpoisoned(int operand_index,
+ OperandSize operand_size);
+ compiler::Node* BytecodeUnsignedOperandUnpoisoned(int operand_index,
+ OperandSize operand_size);
// Returns zero- or sign-extended to word32 value of the operand of
// given size.
@@ -309,6 +344,15 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
compiler::Node* BytecodeUnsignedOperand(int operand_index,
OperandSize operand_size);
+ // Returns the word-size sign-extended register index for bytecode operand
+ // |operand_index| in the current bytecode. Value is not poisoned on
+ // speculation since the value loaded from the register is poisoned instead.
+ compiler::Node* BytecodeOperandRegUnpoisoned(int operand_index);
+
+ // Returns the word zero-extended index immediate for bytecode operand
+ // |operand_index| in the current bytecode for use when loading a .
+ compiler::Node* BytecodeOperandConstantPoolIdxUnpoisoned(int operand_index);
+
// Jump relative to the current bytecode by the |jump_offset|. If |backward|,
// then jump backward (subtract the offset), otherwise jump forward (add the
// offset). Helper function for Jump and JumpBackward.
@@ -344,18 +388,15 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
// next dispatch offset.
void InlineStar();
- // Dispatch to |target_bytecode| at |new_bytecode_offset|.
- // |target_bytecode| should be equivalent to loading from the offset.
- compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
- compiler::Node* new_bytecode_offset);
-
// Dispatch to the bytecode handler with code offset |handler|.
compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler,
- compiler::Node* bytecode_offset);
+ compiler::Node* bytecode_offset,
+ compiler::Node* target_bytecode);
// Dispatch to the bytecode handler with code entry point |handler_entry|.
compiler::Node* DispatchToBytecodeHandlerEntry(
- compiler::Node* handler_entry, compiler::Node* bytecode_offset);
+ compiler::Node* handler_entry, compiler::Node* bytecode_offset,
+ compiler::Node* target_bytecode);
int CurrentBytecodeSize() const;
@@ -373,6 +414,8 @@ class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
bool reloaded_frame_ptr_;
bool bytecode_array_valid_;
+ Node* speculation_poison_;
+
bool disable_stack_check_across_call_;
compiler::Node* stack_pointer_before_call_;