diff options
Diffstat (limited to 'deps/v8/src/compiler/ppc/code-generator-ppc.cc')
-rw-r--r-- | deps/v8/src/compiler/ppc/code-generator-ppc.cc | 194 |
1 files changed, 140 insertions, 54 deletions
diff --git a/deps/v8/src/compiler/ppc/code-generator-ppc.cc b/deps/v8/src/compiler/ppc/code-generator-ppc.cc index 7fc537784c..6bdf8fa974 100644 --- a/deps/v8/src/compiler/ppc/code-generator-ppc.cc +++ b/deps/v8/src/compiler/ppc/code-generator-ppc.cc @@ -34,7 +34,9 @@ class PPCOperandConverter final : public InstructionOperandConverter { RCBit OutputRCBit() const { switch (instr_->flags_mode()) { case kFlags_branch: + case kFlags_branch_and_poison: case kFlags_deoptimize: + case kFlags_deoptimize_and_poison: case kFlags_set: case kFlags_trap: return SetRC; @@ -270,6 +272,16 @@ Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { UNREACHABLE(); } +void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen, Instruction* instr, + PPCOperandConverter& i) { + const MemoryAccessMode access_mode = + static_cast<MemoryAccessMode>(MiscField::decode(instr->opcode())); + if (access_mode == kMemoryAccessPoisoned) { + Register value = i.OutputRegister(); + codegen->tasm()->and_(value, value, kSpeculationPoisonRegister); + } +} + } // namespace #define ASSEMBLE_FLOAT_UNOP_RC(asm_instr, round) \ @@ -779,21 +791,40 @@ void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr, first_unused_stack_slot); } +// Check that {kJavaScriptCallCodeStartRegister} is correct. +void CodeGenerator::AssembleCodeStartRegisterCheck() { + Register scratch = kScratchReg; + + Label current_pc; + __ mov_label_addr(scratch, ¤t_pc); + + __ bind(¤t_pc); + __ subi(scratch, scratch, Operand(__ pc_offset())); + __ cmp(scratch, kJavaScriptCallCodeStartRegister); + __ Assert(eq, AbortReason::kWrongFunctionCodeStart); +} + // Check if the code object is marked for deoptimization. If it is, then it // jumps to the CompileLazyDeoptimizedCode builtin. In order to do this we need // to: -// 1. load the address of the current instruction; -// 2. read from memory the word that contains that bit, which can be found in +// 1. read from memory the word that contains that bit, which can be found in // the flags in the referenced {CodeDataContainer} object; -// 3. test kMarkedForDeoptimizationBit in those flags; and -// 4. if it is not zero then it jumps to the builtin. +// 2. test kMarkedForDeoptimizationBit in those flags; and +// 3. if it is not zero then it jumps to the builtin. void CodeGenerator::BailoutIfDeoptimized() { - Label current; - __ mov_label_addr(r11, ¤t); - int pc_offset = __ pc_offset(); - __ bind(¤t); - int offset = Code::kCodeDataContainerOffset - (Code::kHeaderSize + pc_offset); - __ LoadP(r11, MemOperand(r11, offset)); + if (FLAG_debug_code) { + // Check that {kJavaScriptCallCodeStartRegister} is correct. + Label current_pc; + __ mov_label_addr(ip, ¤t_pc); + + __ bind(¤t_pc); + __ subi(ip, ip, Operand(__ pc_offset())); + __ cmp(ip, kJavaScriptCallCodeStartRegister); + __ Assert(eq, AbortReason::kWrongFunctionCodeStart); + } + + int offset = Code::kCodeDataContainerOffset - Code::kHeaderSize; + __ LoadP(r11, MemOperand(kJavaScriptCallCodeStartRegister, offset)); __ LoadWordArith( r11, FieldMemOperand(r11, CodeDataContainer::kKindSpecificFlagsOffset)); __ TestBit(r11, Code::kMarkedForDeoptimizationBit); @@ -802,6 +833,37 @@ void CodeGenerator::BailoutIfDeoptimized() { __ Jump(code, RelocInfo::CODE_TARGET, ne, cr0); } +void CodeGenerator::GenerateSpeculationPoison() { + Register scratch = kScratchReg; + + Label current_pc; + __ mov_label_addr(scratch, ¤t_pc); + + __ bind(¤t_pc); + __ subi(scratch, scratch, Operand(__ pc_offset())); + + // Calculate a mask which has all bits set in the normal case, but has all + // bits cleared if we are speculatively executing the wrong PC. + // difference = (current - expected) | (expected - current) + // poison = ~(difference >> (kBitsPerPointer - 1)) + __ mr(kSpeculationPoisonRegister, scratch); + __ sub(kSpeculationPoisonRegister, kSpeculationPoisonRegister, + kJavaScriptCallCodeStartRegister); + __ sub(kJavaScriptCallCodeStartRegister, kJavaScriptCallCodeStartRegister, + scratch); + __ orx(kSpeculationPoisonRegister, kSpeculationPoisonRegister, + kJavaScriptCallCodeStartRegister); + __ ShiftRightArithImm(kSpeculationPoisonRegister, kSpeculationPoisonRegister, + kBitsPerPointer - 1); + __ notx(kSpeculationPoisonRegister, kSpeculationPoisonRegister); +} + +void CodeGenerator::AssembleRegisterArgumentPoisoning() { + __ and_(kJSFunctionRegister, kJSFunctionRegister, kSpeculationPoisonRegister); + __ and_(kContextRegister, kContextRegister, kSpeculationPoisonRegister); + __ and_(sp, sp, kSpeculationPoisonRegister); +} + // Assembles an instruction after register allocation, producing machine code. CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Instruction* instr) { @@ -909,9 +971,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ cmp(cp, kScratchReg); __ Assert(eq, AbortReason::kWrongFunctionContext); } - __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeOffset)); - __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); - __ Call(ip); + static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch"); + __ LoadP(r5, FieldMemOperand(func, JSFunction::kCodeOffset)); + __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Call(r5); RecordCallPosition(instr); DCHECK_EQ(LeaveRC, i.OutputRCBit()); frame_access_state()->ClearSPDelta(); @@ -1808,26 +1871,33 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( #endif case kPPC_LoadWordU8: ASSEMBLE_LOAD_INTEGER(lbz, lbzx); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; case kPPC_LoadWordS8: ASSEMBLE_LOAD_INTEGER(lbz, lbzx); __ extsb(i.OutputRegister(), i.OutputRegister()); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; case kPPC_LoadWordU16: ASSEMBLE_LOAD_INTEGER(lhz, lhzx); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; case kPPC_LoadWordS16: ASSEMBLE_LOAD_INTEGER(lha, lhax); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; case kPPC_LoadWordU32: ASSEMBLE_LOAD_INTEGER(lwz, lwzx); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; case kPPC_LoadWordS32: ASSEMBLE_LOAD_INTEGER(lwa, lwax); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; #if V8_TARGET_ARCH_PPC64 case kPPC_LoadWord64: ASSEMBLE_LOAD_INTEGER(ld, ldx); + EmitWordLoadPoisoningIfNeeded(this, instr, i); break; #endif case kPPC_LoadFloat32: @@ -1856,47 +1926,47 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( case kPPC_StoreDouble: ASSEMBLE_STORE_DOUBLE(); break; - case kAtomicLoadInt8: + case kWord32AtomicLoadInt8: ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx); __ extsb(i.OutputRegister(), i.OutputRegister()); break; - case kAtomicLoadUint8: + case kWord32AtomicLoadUint8: ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx); break; - case kAtomicLoadInt16: + case kWord32AtomicLoadInt16: ASSEMBLE_ATOMIC_LOAD_INTEGER(lha, lhax); break; - case kAtomicLoadUint16: + case kWord32AtomicLoadUint16: ASSEMBLE_ATOMIC_LOAD_INTEGER(lhz, lhzx); break; - case kAtomicLoadWord32: + case kWord32AtomicLoadWord32: ASSEMBLE_ATOMIC_LOAD_INTEGER(lwz, lwzx); break; - case kAtomicStoreWord8: + case kWord32AtomicStoreWord8: ASSEMBLE_ATOMIC_STORE_INTEGER(stb, stbx); break; - case kAtomicStoreWord16: + case kWord32AtomicStoreWord16: ASSEMBLE_ATOMIC_STORE_INTEGER(sth, sthx); break; - case kAtomicStoreWord32: + case kWord32AtomicStoreWord32: ASSEMBLE_ATOMIC_STORE_INTEGER(stw, stwx); break; - case kAtomicExchangeInt8: + case kWord32AtomicExchangeInt8: ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lbarx, stbcx); __ extsb(i.OutputRegister(0), i.OutputRegister(0)); break; - case kAtomicExchangeUint8: + case kWord32AtomicExchangeUint8: ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lbarx, stbcx); break; - case kAtomicExchangeInt16: + case kWord32AtomicExchangeInt16: ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lharx, sthcx); __ extsh(i.OutputRegister(0), i.OutputRegister(0)); break; - case kAtomicExchangeUint16: + case kWord32AtomicExchangeUint16: ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lharx, sthcx); break; - case kAtomicExchangeWord32: + case kWord32AtomicExchangeWord32: ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lwarx, stwcx); break; default: @@ -1931,6 +2001,20 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. } +void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition, + Instruction* instr) { + // TODO(John) Handle float comparisons (kUnordered[Not]Equal). + if (condition == kUnorderedEqual || condition == kUnorderedNotEqual) { + return; + } + + ArchOpcode op = instr->arch_opcode(); + condition = NegateFlagsCondition(condition); + __ li(kScratchReg, Operand::Zero()); + __ isel(FlagsConditionToCondition(condition, op), kSpeculationPoisonRegister, + kScratchReg, kSpeculationPoisonRegister, cr0); +} + void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch) { AssembleArchBranch(instr, branch); @@ -1978,8 +2062,9 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, __ isolate()), 0); __ LeaveFrame(StackFrame::WASM_COMPILED); - CallDescriptor* descriptor = gen_->linkage()->GetIncomingDescriptor(); - int pop_count = static_cast<int>(descriptor->StackParameterCount()); + auto call_descriptor = gen_->linkage()->GetIncomingDescriptor(); + int pop_count = + static_cast<int>(call_descriptor->StackParameterCount()); __ Drop(pop_count); __ Ret(); } else { @@ -2109,8 +2194,8 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { } void CodeGenerator::FinishFrame(Frame* frame) { - CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); - const RegList double_saves = descriptor->CalleeSavedFPRegisters(); + auto call_descriptor = linkage()->GetIncomingDescriptor(); + const RegList double_saves = call_descriptor->CalleeSavedFPRegisters(); // Save callee-saved Double registers. if (double_saves != 0) { @@ -2121,10 +2206,10 @@ void CodeGenerator::FinishFrame(Frame* frame) { (kDoubleSize / kPointerSize)); } // Save callee-saved registers. - const RegList saves = - FLAG_enable_embedded_constant_pool - ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit() - : descriptor->CalleeSavedRegisters(); + const RegList saves = FLAG_enable_embedded_constant_pool + ? call_descriptor->CalleeSavedRegisters() & + ~kConstantPoolRegister.bit() + : call_descriptor->CalleeSavedRegisters(); if (saves != 0) { // register save area does not include the fp or constant pool pointer. const int num_saves = @@ -2135,9 +2220,9 @@ void CodeGenerator::FinishFrame(Frame* frame) { } void CodeGenerator::AssembleConstructFrame() { - CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); + auto call_descriptor = linkage()->GetIncomingDescriptor(); if (frame_access_state()->has_frame()) { - if (descriptor->IsCFunctionCall()) { + if (call_descriptor->IsCFunctionCall()) { __ function_descriptor(); __ mflr(r0); if (FLAG_enable_embedded_constant_pool) { @@ -2148,9 +2233,9 @@ void CodeGenerator::AssembleConstructFrame() { __ Push(r0, fp); __ mr(fp, sp); } - } else if (descriptor->IsJSFunctionCall()) { - __ Prologue(ip); - if (descriptor->PushArgumentCount()) { + } else if (call_descriptor->IsJSFunctionCall()) { + __ Prologue(); + if (call_descriptor->PushArgumentCount()) { __ Push(kJavaScriptCallArgCountRegister); } } else { @@ -2161,8 +2246,8 @@ void CodeGenerator::AssembleConstructFrame() { } } - int shrink_slots = - frame()->GetTotalFrameSlotCount() - descriptor->CalculateFixedFrameSize(); + int shrink_slots = frame()->GetTotalFrameSlotCount() - + call_descriptor->CalculateFixedFrameSize(); if (info()->is_osr()) { // TurboFan OSR-compiled functions cannot be entered directly. __ Abort(AbortReason::kShouldNotDirectlyEnterOsrFunction); @@ -2174,9 +2259,10 @@ void CodeGenerator::AssembleConstructFrame() { if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); osr_pc_offset_ = __ pc_offset(); shrink_slots -= osr_helper()->UnoptimizedFrameSlots(); + InitializePoisonForLoadsIfNeeded(); } - const RegList double_saves = descriptor->CalleeSavedFPRegisters(); + const RegList double_saves = call_descriptor->CalleeSavedFPRegisters(); if (shrink_slots > 0) { __ Add(sp, sp, -shrink_slots * kPointerSize, r0); } @@ -2189,10 +2275,10 @@ void CodeGenerator::AssembleConstructFrame() { } // Save callee-saved registers. - const RegList saves = - FLAG_enable_embedded_constant_pool - ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit() - : descriptor->CalleeSavedRegisters(); + const RegList saves = FLAG_enable_embedded_constant_pool + ? call_descriptor->CalleeSavedRegisters() & + ~kConstantPoolRegister.bit() + : call_descriptor->CalleeSavedRegisters(); if (saves != 0) { __ MultiPush(saves); // register save area does not include the fp or constant pool pointer. @@ -2200,26 +2286,26 @@ void CodeGenerator::AssembleConstructFrame() { } void CodeGenerator::AssembleReturn(InstructionOperand* pop) { - CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); - int pop_count = static_cast<int>(descriptor->StackParameterCount()); + auto call_descriptor = linkage()->GetIncomingDescriptor(); + int pop_count = static_cast<int>(call_descriptor->StackParameterCount()); // Restore registers. - const RegList saves = - FLAG_enable_embedded_constant_pool - ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit() - : descriptor->CalleeSavedRegisters(); + const RegList saves = FLAG_enable_embedded_constant_pool + ? call_descriptor->CalleeSavedRegisters() & + ~kConstantPoolRegister.bit() + : call_descriptor->CalleeSavedRegisters(); if (saves != 0) { __ MultiPop(saves); } // Restore double registers. - const RegList double_saves = descriptor->CalleeSavedFPRegisters(); + const RegList double_saves = call_descriptor->CalleeSavedFPRegisters(); if (double_saves != 0) { __ MultiPopDoubles(double_saves); } PPCOperandConverter g(this, nullptr); - if (descriptor->IsCFunctionCall()) { + if (call_descriptor->IsCFunctionCall()) { AssembleDeconstructFrame(); } else if (frame_access_state()->has_frame()) { // Canonicalize JSFunction return sites for now unless they have an variable |