summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/x64/code-generator-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/compiler/x64/code-generator-x64.cc')
-rw-r--r--deps/v8/src/compiler/x64/code-generator-x64.cc58
1 files changed, 50 insertions, 8 deletions
diff --git a/deps/v8/src/compiler/x64/code-generator-x64.cc b/deps/v8/src/compiler/x64/code-generator-x64.cc
index 3ad07ad7de..4c991718f8 100644
--- a/deps/v8/src/compiler/x64/code-generator-x64.cc
+++ b/deps/v8/src/compiler/x64/code-generator-x64.cc
@@ -1171,6 +1171,34 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
__ movsd(operand, i.InputDoubleRegister(index));
}
break;
+ case kX64BitcastFI:
+ if (instr->InputAt(0)->IsDoubleStackSlot()) {
+ __ movl(i.OutputRegister(), i.InputOperand(0));
+ } else {
+ __ movd(i.OutputRegister(), i.InputDoubleRegister(0));
+ }
+ break;
+ case kX64BitcastDL:
+ if (instr->InputAt(0)->IsDoubleStackSlot()) {
+ __ movq(i.OutputRegister(), i.InputOperand(0));
+ } else {
+ __ movq(i.OutputRegister(), i.InputDoubleRegister(0));
+ }
+ break;
+ case kX64BitcastIF:
+ if (instr->InputAt(0)->IsRegister()) {
+ __ movd(i.OutputDoubleRegister(), i.InputRegister(0));
+ } else {
+ __ movss(i.OutputDoubleRegister(), i.InputOperand(0));
+ }
+ break;
+ case kX64BitcastLD:
+ if (instr->InputAt(0)->IsRegister()) {
+ __ movq(i.OutputDoubleRegister(), i.InputRegister(0));
+ } else {
+ __ movsd(i.OutputDoubleRegister(), i.InputOperand(0));
+ }
+ break;
case kX64Lea32: {
AddressingMode mode = AddressingModeField::decode(instr->opcode());
// Shorten "leal" to "addl", "subl" or "shll" if the register allocation
@@ -1271,6 +1299,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kCheckedLoadWord32:
ASSEMBLE_CHECKED_LOAD_INTEGER(movl);
break;
+ case kCheckedLoadWord64:
+ ASSEMBLE_CHECKED_LOAD_INTEGER(movq);
+ break;
case kCheckedLoadFloat32:
ASSEMBLE_CHECKED_LOAD_FLOAT(movss);
break;
@@ -1286,6 +1317,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
case kCheckedStoreWord32:
ASSEMBLE_CHECKED_STORE_INTEGER(movl);
break;
+ case kCheckedStoreWord64:
+ ASSEMBLE_CHECKED_STORE_INTEGER(movq);
+ break;
case kCheckedStoreFloat32:
ASSEMBLE_CHECKED_STORE_FLOAT(movss);
break;
@@ -1349,6 +1383,9 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
case kNotOverflow:
__ j(no_overflow, tlabel);
break;
+ default:
+ UNREACHABLE();
+ break;
}
if (!branch->fallthru) __ jmp(flabel, flabel_distance);
}
@@ -1418,6 +1455,9 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
case kNotOverflow:
cc = no_overflow;
break;
+ default:
+ UNREACHABLE();
+ break;
}
__ bind(&check);
__ setcc(cc, reg);
@@ -1760,15 +1800,17 @@ void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
void CodeGenerator::EnsureSpaceForLazyDeopt() {
+ if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
+ return;
+ }
+
int space_needed = Deoptimizer::patch_size();
- if (!info()->IsStub()) {
- // Ensure that we have enough space after the previous lazy-bailout
- // instruction for patching the code here.
- int current_pc = masm()->pc_offset();
- if (current_pc < last_lazy_deopt_pc_ + space_needed) {
- int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
- __ Nop(padding_size);
- }
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ if (current_pc < last_lazy_deopt_pc_ + space_needed) {
+ int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
+ __ Nop(padding_size);
}
}