summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/code-generator.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/compiler/code-generator.h')
-rw-r--r--deps/v8/src/compiler/code-generator.h57
1 files changed, 54 insertions, 3 deletions
diff --git a/deps/v8/src/compiler/code-generator.h b/deps/v8/src/compiler/code-generator.h
index 425ea2ebf2..a91ae0212a 100644
--- a/deps/v8/src/compiler/code-generator.h
+++ b/deps/v8/src/compiler/code-generator.h
@@ -86,7 +86,8 @@ class CodeGenerator final : public GapResolver::Assembler {
int start_source_position,
JumpOptimizationInfo* jump_opt,
std::vector<trap_handler::ProtectedInstructionData>*
- protected_instructions);
+ protected_instructions,
+ LoadPoisoning load_poisoning);
// Generate native code. After calling AssembleCode, call FinalizeCode to
// produce the actual code object. If an error occurs during either phase,
@@ -95,7 +96,6 @@ class CodeGenerator final : public GapResolver::Assembler {
Handle<Code> FinalizeCode();
Handle<ByteArray> GetSourcePositionTable();
- MaybeHandle<HandlerTable> GetHandlerTable() const;
InstructionSequence* code() const { return code_; }
FrameAccessState* frame_access_state() const { return frame_access_state_; }
@@ -122,6 +122,7 @@ class CodeGenerator final : public GapResolver::Assembler {
Zone* zone() const { return zone_; }
TurboAssembler* tasm() { return &tasm_; }
size_t GetSafepointTableOffset() const { return safepoints_.GetCodeOffset(); }
+ size_t GetHandlerTableOffset() const { return handler_table_offset_; }
private:
GapResolver* resolver() { return &resolver_; }
@@ -150,11 +151,25 @@ class CodeGenerator final : public GapResolver::Assembler {
// Assemble instructions for the specified block.
CodeGenResult AssembleBlock(const InstructionBlock* block);
+ // Inserts mask update at the beginning of an instruction block if the
+ // predecessor blocks ends with a masking branch.
+ void TryInsertBranchPoisoning(const InstructionBlock* block);
+
+ // Initializes the masking register.
+ // Eventually, this should be always threaded through from the caller
+ // (in the proplogue) or from a callee (after a call).
+ void InitializePoisonForLoadsIfNeeded();
+
// Assemble code for the specified instruction.
CodeGenResult AssembleInstruction(Instruction* instr,
const InstructionBlock* block);
void AssembleGaps(Instruction* instr);
+ // Compute branch info from given instruction. Returns a valid rpo number
+ // if the branch is redundant, the returned rpo number point to the target
+ // basic block.
+ RpoNumber ComputeBranchInfo(BranchInfo* branch, Instruction* instr);
+
// Returns true if a instruction is a tail call that needs to adjust the stack
// pointer before execution. The stack slot index to the empty slot above the
// adjusted stack pointer is returned in |slot|.
@@ -179,12 +194,26 @@ class CodeGenerator final : public GapResolver::Assembler {
void AssembleArchLookupSwitch(Instruction* instr);
void AssembleArchTableSwitch(Instruction* instr);
+ // Generates code that checks whether the {kJavaScriptCallCodeStartRegister}
+ // contains the expected pointer to the start of the instruction stream.
+ void AssembleCodeStartRegisterCheck();
+
+ void AssembleBranchPoisoning(FlagsCondition condition, Instruction* instr);
+
// When entering a code that is marked for deoptimization, rather continuing
// with its execution, we jump to a lazy compiled code. We need to do this
// because this code has already been deoptimized and needs to be unlinked
// from the JS functions referring it.
void BailoutIfDeoptimized();
+ // Generates a mask which can be used to poison values when we detect
+ // the code is executing speculatively.
+ void GenerateSpeculationPoison();
+
+ // Generates code to poison the stack pointer and implicit register arguments
+ // like the context register and the function register.
+ void AssembleRegisterArgumentPoisoning();
+
// Generates an architecture-specific, descriptor-specific prologue
// to set up a stack frame.
void AssembleConstructFrame();
@@ -224,6 +253,26 @@ class CodeGenerator final : public GapResolver::Assembler {
PushTypeFlags push_type,
ZoneVector<MoveOperands*>* pushes);
+ class MoveType {
+ public:
+ enum Type {
+ kRegisterToRegister,
+ kRegisterToStack,
+ kStackToRegister,
+ kStackToStack,
+ kConstantToRegister,
+ kConstantToStack
+ };
+
+ // Detect what type of move or swap needs to be performed. Note that these
+ // functions do not take into account the representation (Tagged, FP,
+ // ...etc).
+
+ static Type InferMove(InstructionOperand* source,
+ InstructionOperand* destination);
+ static Type InferSwap(InstructionOperand* source,
+ InstructionOperand* destination);
+ };
// Called before a tail call |instr|'s gap moves are assembled and allows
// gap-specific pre-processing, e.g. adjustment of the sp for tail calls that
// need it before gap moves or conversion of certain gap moves into pushes.
@@ -346,6 +395,7 @@ class CodeGenerator final : public GapResolver::Assembler {
ZoneDeque<DeoptimizationLiteral> deoptimization_literals_;
size_t inlined_function_count_;
TranslationBuffer translations_;
+ int handler_table_offset_;
int last_lazy_deopt_pc_;
// kArchCallCFunction could be reached either:
@@ -368,10 +418,11 @@ class CodeGenerator final : public GapResolver::Assembler {
SourcePositionTableBuilder source_position_table_builder_;
std::vector<trap_handler::ProtectedInstructionData>* protected_instructions_;
CodeGenResult result_;
+ LoadPoisoning load_poisoning_;
};
} // namespace compiler
} // namespace internal
} // namespace v8
-#endif // V8_COMPILER_CODE_GENERATOR_H
+#endif // V8_COMPILER_CODE_GENERATOR_H_