summaryrefslogtreecommitdiff
path: root/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc')
-rw-r--r--deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc478
1 files changed, 181 insertions, 297 deletions
diff --git a/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc b/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc
index f80e0768a6..1ca3a99271 100644
--- a/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc
+++ b/deps/v8/src/crankshaft/x87/lithium-codegen-x87.cc
@@ -57,13 +57,6 @@ bool LCodeGen::GenerateCode() {
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::MANUAL);
- support_aligned_spilled_doubles_ = info()->IsOptimizing();
-
- dynamic_frame_alignment_ = info()->IsOptimizing() &&
- ((chunk()->num_double_slots() > 2 &&
- !chunk()->graph()->is_recursive()) ||
- !info()->osr_ast_id().IsNone());
-
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
@@ -98,31 +91,6 @@ bool LCodeGen::GeneratePrologue() {
if (info()->IsOptimizing()) {
ProfileEntryHookStub::MaybeCallEntryHook(masm_);
-
- if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
- // Move state of dynamic frame alignment into edx.
- __ Move(edx, Immediate(kNoAlignmentPadding));
-
- Label do_not_pad, align_loop;
- STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
- // Align esp + 4 to a multiple of 2 * kPointerSize.
- __ test(esp, Immediate(kPointerSize));
- __ j(not_zero, &do_not_pad, Label::kNear);
- __ push(Immediate(0));
- __ mov(ebx, esp);
- __ mov(edx, Immediate(kAlignmentPaddingPushed));
- // Copy arguments, receiver, and return address.
- __ mov(ecx, Immediate(scope()->num_parameters() + 2));
-
- __ bind(&align_loop);
- __ mov(eax, Operand(ebx, 1 * kPointerSize));
- __ mov(Operand(ebx, 0), eax);
- __ add(Operand(ebx), Immediate(kPointerSize));
- __ dec(ecx);
- __ j(not_zero, &align_loop, Label::kNear);
- __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
- __ bind(&do_not_pad);
- }
}
info()->set_prologue_offset(masm_->pc_offset());
@@ -130,61 +98,29 @@ bool LCodeGen::GeneratePrologue() {
DCHECK(!frame_is_built_);
frame_is_built_ = true;
if (info()->IsStub()) {
- __ StubPrologue();
+ __ StubPrologue(StackFrame::STUB);
} else {
__ Prologue(info()->GeneratePreagedPrologue());
}
}
- if (info()->IsOptimizing() &&
- dynamic_frame_alignment_ &&
- FLAG_debug_code) {
- __ test(esp, Immediate(kPointerSize));
- __ Assert(zero, kFrameIsExpectedToBeAligned);
- }
-
// Reserve space for the stack slots needed by the code.
int slots = GetStackSlotCount();
DCHECK(slots != 0 || !info()->IsOptimizing());
if (slots > 0) {
- if (slots == 1) {
- if (dynamic_frame_alignment_) {
- __ push(edx);
- } else {
- __ push(Immediate(kNoAlignmentPadding));
- }
- } else {
- if (FLAG_debug_code) {
- __ sub(Operand(esp), Immediate(slots * kPointerSize));
+ __ sub(Operand(esp), Immediate(slots * kPointerSize));
#ifdef _MSC_VER
- MakeSureStackPagesMapped(slots * kPointerSize);
+ MakeSureStackPagesMapped(slots * kPointerSize);
#endif
- __ push(eax);
- __ mov(Operand(eax), Immediate(slots));
- Label loop;
- __ bind(&loop);
- __ mov(MemOperand(esp, eax, times_4, 0),
- Immediate(kSlotsZapValue));
- __ dec(eax);
- __ j(not_zero, &loop);
- __ pop(eax);
- } else {
- __ sub(Operand(esp), Immediate(slots * kPointerSize));
-#ifdef _MSC_VER
- MakeSureStackPagesMapped(slots * kPointerSize);
-#endif
- }
-
- if (support_aligned_spilled_doubles_) {
- Comment(";;; Store dynamic frame alignment tag for spilled doubles");
- // Store dynamic frame alignment state in the first local.
- int offset = JavaScriptFrameConstants::kDynamicAlignmentStateOffset;
- if (dynamic_frame_alignment_) {
- __ mov(Operand(ebp, offset), edx);
- } else {
- __ mov(Operand(ebp, offset), Immediate(kNoAlignmentPadding));
- }
- }
+ if (FLAG_debug_code) {
+ __ push(eax);
+ __ mov(Operand(eax), Immediate(slots));
+ Label loop;
+ __ bind(&loop);
+ __ mov(MemOperand(esp, eax, times_4, 0), Immediate(kSlotsZapValue));
+ __ dec(eax);
+ __ j(not_zero, &loop);
+ __ pop(eax);
}
}
@@ -265,50 +201,11 @@ void LCodeGen::GenerateOsrPrologue() {
osr_pc_offset_ = masm()->pc_offset();
- // Move state of dynamic frame alignment into edx.
- __ Move(edx, Immediate(kNoAlignmentPadding));
-
- if (support_aligned_spilled_doubles_ && dynamic_frame_alignment_) {
- Label do_not_pad, align_loop;
- // Align ebp + 4 to a multiple of 2 * kPointerSize.
- __ test(ebp, Immediate(kPointerSize));
- __ j(zero, &do_not_pad, Label::kNear);
- __ push(Immediate(0));
- __ mov(ebx, esp);
- __ mov(edx, Immediate(kAlignmentPaddingPushed));
-
- // Move all parts of the frame over one word. The frame consists of:
- // unoptimized frame slots, alignment state, context, frame pointer, return
- // address, receiver, and the arguments.
- __ mov(ecx, Immediate(scope()->num_parameters() +
- 5 + graph()->osr()->UnoptimizedFrameSlots()));
-
- __ bind(&align_loop);
- __ mov(eax, Operand(ebx, 1 * kPointerSize));
- __ mov(Operand(ebx, 0), eax);
- __ add(Operand(ebx), Immediate(kPointerSize));
- __ dec(ecx);
- __ j(not_zero, &align_loop, Label::kNear);
- __ mov(Operand(ebx, 0), Immediate(kAlignmentZapValue));
- __ sub(Operand(ebp), Immediate(kPointerSize));
- __ bind(&do_not_pad);
- }
-
- // Save the first local, which is overwritten by the alignment state.
- Operand alignment_loc = MemOperand(ebp, -3 * kPointerSize);
- __ push(alignment_loc);
-
- // Set the dynamic frame alignment state.
- __ mov(alignment_loc, edx);
-
// Adjust the frame size, subsuming the unoptimized frame into the
// optimized frame.
int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots();
- DCHECK(slots >= 1);
- __ sub(esp, Immediate((slots - 1) * kPointerSize));
-
- // Initailize FPU state.
- __ fninit();
+ DCHECK(slots >= 0);
+ __ sub(esp, Immediate(slots * kPointerSize));
}
@@ -376,32 +273,24 @@ bool LCodeGen::GenerateJumpTable() {
}
if (needs_frame.is_linked()) {
__ bind(&needs_frame);
-
/* stack layout
- 4: entry address
- 3: return address <-- esp
- 2: garbage
+ 3: entry address
+ 2: return address <-- esp
1: garbage
0: garbage
*/
- __ sub(esp, Immediate(kPointerSize)); // Reserve space for stub marker.
- __ push(MemOperand(esp, kPointerSize)); // Copy return address.
- __ push(MemOperand(esp, 3 * kPointerSize)); // Copy entry address.
+ __ push(MemOperand(esp, 0)); // Copy return address.
+ __ push(MemOperand(esp, 2 * kPointerSize)); // Copy entry address.
/* stack layout
4: entry address
3: return address
- 2: garbage
1: return address
0: entry address <-- esp
*/
- __ mov(MemOperand(esp, 4 * kPointerSize), ebp); // Save ebp.
-
- // Copy context.
- __ mov(ebp, MemOperand(ebp, StandardFrameConstants::kContextOffset));
- __ mov(MemOperand(esp, 3 * kPointerSize), ebp);
+ __ mov(MemOperand(esp, 3 * kPointerSize), ebp); // Save ebp.
// Fill ebp with the right stack frame address.
- __ lea(ebp, MemOperand(esp, 4 * kPointerSize));
+ __ lea(ebp, MemOperand(esp, 3 * kPointerSize));
// This variant of deopt can only be used with stubs. Since we don't
// have a function pointer to install in the stack frame that we're
@@ -411,8 +300,7 @@ bool LCodeGen::GenerateJumpTable() {
Immediate(Smi::FromInt(StackFrame::STUB)));
/* stack layout
- 4: old ebp
- 3: context pointer
+ 3: old ebp
2: stub marker
1: return address
0: entry address <-- esp
@@ -449,9 +337,8 @@ bool LCodeGen::GenerateDeferredCode() {
frame_is_built_ = true;
// Build the frame in such a way that esi isn't trashed.
__ push(ebp); // Caller's frame pointer.
- __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
__ push(Immediate(Smi::FromInt(StackFrame::STUB)));
- __ lea(ebp, Operand(esp, 2 * kPointerSize));
+ __ lea(ebp, Operand(esp, TypedFrameConstants::kFixedFrameSizeFromFp));
Comment(";;; Deferred code");
}
code->Generate();
@@ -2240,15 +2127,16 @@ void LCodeGen::DoBranch(LBranch* instr) {
__ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
EmitBranch(instr, not_equal);
} else {
- ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
- if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
+ ToBooleanICStub::Types expected =
+ instr->hydrogen()->expected_input_types();
+ if (expected.IsEmpty()) expected = ToBooleanICStub::Types::Generic();
- if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ if (expected.Contains(ToBooleanICStub::UNDEFINED)) {
// undefined -> false.
__ cmp(reg, factory()->undefined_value());
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ if (expected.Contains(ToBooleanICStub::BOOLEAN)) {
// true -> true.
__ cmp(reg, factory()->true_value());
__ j(equal, instr->TrueLabel(chunk_));
@@ -2256,13 +2144,13 @@ void LCodeGen::DoBranch(LBranch* instr) {
__ cmp(reg, factory()->false_value());
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ if (expected.Contains(ToBooleanICStub::NULL_TYPE)) {
// 'null' -> false.
__ cmp(reg, factory()->null_value());
__ j(equal, instr->FalseLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::SMI)) {
+ if (expected.Contains(ToBooleanICStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ test(reg, Operand(reg));
__ j(equal, instr->FalseLabel(chunk_));
@@ -2282,18 +2170,18 @@ void LCodeGen::DoBranch(LBranch* instr) {
if (expected.CanBeUndetectable()) {
// Undetectable -> false.
__ test_b(FieldOperand(map, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
+ Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, instr->FalseLabel(chunk_));
}
}
- if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ if (expected.Contains(ToBooleanICStub::SPEC_OBJECT)) {
// spec object -> true.
__ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
__ j(above_equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::STRING)) {
+ if (expected.Contains(ToBooleanICStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
@@ -2304,19 +2192,19 @@ void LCodeGen::DoBranch(LBranch* instr) {
__ bind(&not_string);
}
- if (expected.Contains(ToBooleanStub::SYMBOL)) {
+ if (expected.Contains(ToBooleanICStub::SYMBOL)) {
// Symbol value -> true.
__ CmpInstanceType(map, SYMBOL_TYPE);
__ j(equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::SIMD_VALUE)) {
+ if (expected.Contains(ToBooleanICStub::SIMD_VALUE)) {
// SIMD value -> true.
__ CmpInstanceType(map, SIMD128_VALUE_TYPE);
__ j(equal, instr->TrueLabel(chunk_));
}
- if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ if (expected.Contains(ToBooleanICStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
@@ -2524,7 +2412,7 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
}
__ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
__ test_b(FieldOperand(temp, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
+ Immediate(1 << Map::kIsUndetectable));
EmitBranch(instr, not_zero);
}
@@ -2554,11 +2442,10 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
DCHECK(ToRegister(instr->left()).is(edx));
DCHECK(ToRegister(instr->right()).is(eax));
- Handle<Code> code = CodeFactory::StringCompare(isolate()).code();
+ Handle<Code> code = CodeFactory::StringCompare(isolate(), instr->op()).code();
CallCode(code, RelocInfo::CODE_TARGET, instr);
- __ test(eax, eax);
-
- EmitBranch(instr, ComputeCompareCondition(instr->op()));
+ __ CompareRoot(eax, Heap::kTrueValueRootIndex);
+ EmitBranch(instr, equal);
}
@@ -2717,7 +2604,7 @@ void LCodeGen::DoHasInPrototypeChainAndBranch(
// Deoptimize if the object needs to be access checked.
__ test_b(FieldOperand(object_map, Map::kBitFieldOffset),
- 1 << Map::kIsAccessCheckNeeded);
+ Immediate(1 << Map::kIsAccessCheckNeeded));
DeoptimizeIf(not_zero, instr, Deoptimizer::kAccessCheck);
// Deoptimize for proxies.
__ CmpInstanceType(object_map, JS_PROXY_TYPE);
@@ -2750,18 +2637,11 @@ void LCodeGen::DoCmpT(LCmpT* instr) {
__ bind(&done);
}
-
-void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
- int extra_value_count = dynamic_frame_alignment ? 2 : 1;
+void LCodeGen::EmitReturn(LReturn* instr) {
+ int extra_value_count = 1;
if (instr->has_constant_parameter_count()) {
int parameter_count = ToInteger32(instr->constant_parameter_count());
- if (dynamic_frame_alignment && FLAG_debug_code) {
- __ cmp(Operand(esp,
- (parameter_count + extra_value_count) * kPointerSize),
- Immediate(kAlignmentZapValue));
- __ Assert(equal, kExpectedAlignmentMarker);
- }
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
} else {
DCHECK(info()->IsStub()); // Functions would need to drop one more value.
@@ -2769,19 +2649,9 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
// The argument count parameter is a smi
__ SmiUntag(reg);
Register return_addr_reg = reg.is(ecx) ? ebx : ecx;
- if (dynamic_frame_alignment && FLAG_debug_code) {
- DCHECK(extra_value_count == 2);
- __ cmp(Operand(esp, reg, times_pointer_size,
- extra_value_count * kPointerSize),
- Immediate(kAlignmentZapValue));
- __ Assert(equal, kExpectedAlignmentMarker);
- }
// emit code to restore stack based on instr->parameter_count()
__ pop(return_addr_reg); // save return address
- if (dynamic_frame_alignment) {
- __ inc(reg); // 1 more for alignment
- }
__ shl(reg, kPointerSizeLog2);
__ add(esp, reg);
__ jmp(return_addr_reg);
@@ -2799,25 +2669,12 @@ void LCodeGen::DoReturn(LReturn* instr) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntime(Runtime::kTraceExit);
}
- if (dynamic_frame_alignment_) {
- // Fetch the state of the dynamic frame alignment.
- __ mov(edx, Operand(ebp,
- JavaScriptFrameConstants::kDynamicAlignmentStateOffset));
- }
if (NeedsEagerFrame()) {
__ mov(esp, ebp);
__ pop(ebp);
}
- if (dynamic_frame_alignment_) {
- Label no_padding;
- __ cmp(edx, Immediate(kNoAlignmentPadding));
- __ j(equal, &no_padding, Label::kNear);
- EmitReturn(instr, true);
- __ bind(&no_padding);
- }
-
- EmitReturn(instr, false);
+ EmitReturn(instr);
}
@@ -3218,11 +3075,12 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
if (instr->hydrogen()->from_inlined()) {
__ lea(result, Operand(esp, -2 * kPointerSize));
- } else {
+ } else if (instr->hydrogen()->arguments_adaptor()) {
// Check for arguments adapter frame.
Label done, adapted;
__ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
+ __ mov(result,
+ Operand(result, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(Operand(result),
Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &adapted, Label::kNear);
@@ -3238,6 +3096,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
// Result is the frame pointer for the frame if not adapted and for the real
// frame below the adaptor frame if adapted.
__ bind(&done);
+ } else {
+ __ mov(result, Operand(ebp));
}
}
@@ -3272,6 +3132,7 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
// object as a receiver to normal functions. Values have to be
// passed unchanged to builtins and strict-mode functions.
Label receiver_ok, global_object;
+ Label::Distance dist = DeoptEveryNTimes() ? Label::kFar : Label::kNear;
Register scratch = ToRegister(instr->temp());
if (!instr->hydrogen()->known_function()) {
@@ -3280,20 +3141,20 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
__ mov(scratch,
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ test_b(FieldOperand(scratch, SharedFunctionInfo::kStrictModeByteOffset),
- 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
- __ j(not_equal, &receiver_ok);
+ Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
+ __ j(not_equal, &receiver_ok, dist);
// Do not transform the receiver to object for builtins.
__ test_b(FieldOperand(scratch, SharedFunctionInfo::kNativeByteOffset),
- 1 << SharedFunctionInfo::kNativeBitWithinByte);
- __ j(not_equal, &receiver_ok);
+ Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
+ __ j(not_equal, &receiver_ok, dist);
}
// Normal function. Replace undefined or null with global receiver.
__ cmp(receiver, factory()->null_value());
- __ j(equal, &global_object);
+ __ j(equal, &global_object, Label::kNear);
__ cmp(receiver, factory()->undefined_value());
- __ j(equal, &global_object);
+ __ j(equal, &global_object, Label::kNear);
// The receiver should be a JS object.
__ test(receiver, Immediate(kSmiTagMask));
@@ -3341,13 +3202,25 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
// Invoke the function.
__ bind(&invoke);
+
+ InvokeFlag flag = CALL_FUNCTION;
+ if (instr->hydrogen()->tail_call_mode() == TailCallMode::kAllow) {
+ DCHECK(!info()->saves_caller_doubles());
+ // TODO(ishell): drop current frame before pushing arguments to the stack.
+ flag = JUMP_FUNCTION;
+ ParameterCount actual(eax);
+ // It is safe to use ebx, ecx and edx as scratch registers here given that
+ // 1) we are not going to return to caller function anyway,
+ // 2) ebx (expected arguments count) and edx (new.target) will be
+ // initialized below.
+ PrepareForTailCall(actual, ebx, ecx, edx);
+ }
+
DCHECK(instr->HasPointerMap());
LPointerMap* pointers = instr->pointer_map();
- SafepointGenerator safepoint_generator(
- this, pointers, Safepoint::kLazyDeopt);
+ SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount actual(eax);
- __ InvokeFunction(function, no_reg, actual, CALL_FUNCTION,
- safepoint_generator);
+ __ InvokeFunction(function, no_reg, actual, flag, safepoint_generator);
}
@@ -3391,10 +3264,9 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
CallRuntime(Runtime::kDeclareGlobals, instr);
}
-
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int formal_parameter_count, int arity,
- LInstruction* instr) {
+ bool is_tail_call, LInstruction* instr) {
bool dont_adapt_arguments =
formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel;
bool can_invoke_directly =
@@ -3410,21 +3282,38 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ mov(edx, factory()->undefined_value());
__ mov(eax, arity);
+ bool is_self_call = function.is_identical_to(info()->closure());
+
// Invoke function directly.
- if (function.is_identical_to(info()->closure())) {
- __ CallSelf();
+ if (is_self_call) {
+ Handle<Code> self(reinterpret_cast<Code**>(__ CodeObject().location()));
+ if (is_tail_call) {
+ __ Jump(self, RelocInfo::CODE_TARGET);
+ } else {
+ __ Call(self, RelocInfo::CODE_TARGET);
+ }
} else {
- __ call(FieldOperand(function_reg, JSFunction::kCodeEntryOffset));
+ Operand target = FieldOperand(function_reg, JSFunction::kCodeEntryOffset);
+ if (is_tail_call) {
+ __ jmp(target);
+ } else {
+ __ call(target);
+ }
+ }
+
+ if (!is_tail_call) {
+ // Set up deoptimization.
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
}
- RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
} else {
// We need to adapt arguments.
LPointerMap* pointers = instr->pointer_map();
SafepointGenerator generator(
this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(arity);
+ ParameterCount actual(arity);
ParameterCount expected(formal_parameter_count);
- __ InvokeFunction(function_reg, expected, count, CALL_FUNCTION, generator);
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
+ __ InvokeFunction(function_reg, expected, actual, flag, generator);
}
}
@@ -3466,35 +3355,6 @@ void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
}
-void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
- DCHECK(ToRegister(instr->function()).is(edi));
- DCHECK(ToRegister(instr->result()).is(eax));
-
- // Change context.
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
-
- // Always initialize new target and number of actual arguments.
- __ mov(edx, factory()->undefined_value());
- __ mov(eax, instr->arity());
-
- bool is_self_call = false;
- if (instr->hydrogen()->function()->IsConstant()) {
- HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
- Handle<JSFunction> jsfun =
- Handle<JSFunction>::cast(fun_const->handle(isolate()));
- is_self_call = jsfun.is_identical_to(info()->closure());
- }
-
- if (is_self_call) {
- __ CallSelf();
- } else {
- __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
- }
-
- RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
-}
-
-
void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
Register input_reg = ToRegister(instr->value());
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
@@ -3502,8 +3362,19 @@ void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr) {
DeoptimizeIf(not_equal, instr, Deoptimizer::kNotAHeapNumber);
Label slow, allocated, done;
- Register tmp = input_reg.is(eax) ? ecx : eax;
- Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
+ uint32_t available_regs = eax.bit() | ecx.bit() | edx.bit() | ebx.bit();
+ available_regs &= ~input_reg.bit();
+ if (instr->context()->IsRegister()) {
+ // Make sure that the context isn't overwritten in the AllocateHeapNumber
+ // macro below.
+ available_regs &= ~ToRegister(instr->context()).bit();
+ }
+
+ Register tmp =
+ Register::from_code(base::bits::CountTrailingZeros32(available_regs));
+ available_regs &= ~tmp.bit();
+ Register tmp2 =
+ Register::from_code(base::bits::CountTrailingZeros32(available_regs));
// Preserve the value of all registers.
PushSafepointRegistersScope scope(this);
@@ -3621,6 +3492,7 @@ void LCodeGen::DoMathFloor(LMathFloor* instr) {
__ sub(esp, Immediate(kPointerSize));
__ fist_s(Operand(esp, 0));
__ pop(output_reg);
+ __ X87SetRC(0x0000);
__ X87CheckIA();
DeoptimizeIf(equal, instr, Deoptimizer::kOverflow);
__ fnclex();
@@ -3653,6 +3525,8 @@ void LCodeGen::DoMathRound(LMathRound* instr) {
// Clear exception bits.
__ fnclex();
__ fistp_s(MemOperand(esp, 0));
+ // Restore round mode.
+ __ X87SetRC(0x0000);
// Check overflow.
__ X87CheckIA();
__ pop(result);
@@ -3687,6 +3561,8 @@ void LCodeGen::DoMathRound(LMathRound* instr) {
// Clear exception bits.
__ fnclex();
__ fistp_s(MemOperand(esp, 0));
+ // Restore round mode.
+ __ X87SetRC(0x0000);
// Check overflow.
__ X87CheckIA();
__ pop(result);
@@ -3927,54 +3803,78 @@ void LCodeGen::DoMathExp(LMathExp* instr) {
X87CommitWrite(result_reg);
}
-
-void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
- DCHECK(ToRegister(instr->context()).is(esi));
- DCHECK(ToRegister(instr->function()).is(edi));
- DCHECK(instr->HasPointerMap());
-
- Handle<JSFunction> known_function = instr->hydrogen()->known_function();
- if (known_function.is_null()) {
- LPointerMap* pointers = instr->pointer_map();
- SafepointGenerator generator(
- this, pointers, Safepoint::kLazyDeopt);
- ParameterCount count(instr->arity());
- __ InvokeFunction(edi, no_reg, count, CALL_FUNCTION, generator);
+void LCodeGen::PrepareForTailCall(const ParameterCount& actual,
+ Register scratch1, Register scratch2,
+ Register scratch3) {
+#if DEBUG
+ if (actual.is_reg()) {
+ DCHECK(!AreAliased(actual.reg(), scratch1, scratch2, scratch3));
} else {
- CallKnownFunction(known_function,
- instr->hydrogen()->formal_parameter_count(),
- instr->arity(), instr);
+ DCHECK(!AreAliased(scratch1, scratch2, scratch3));
}
-}
+#endif
+ if (FLAG_code_comments) {
+ if (actual.is_reg()) {
+ Comment(";;; PrepareForTailCall, actual: %s {", actual.reg().ToString());
+ } else {
+ Comment(";;; PrepareForTailCall, actual: %d {", actual.immediate());
+ }
+ }
+
+ // Check if next frame is an arguments adaptor frame.
+ Register caller_args_count_reg = scratch1;
+ Label no_arguments_adaptor, formal_parameter_count_loaded;
+ __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ cmp(Operand(scratch2, StandardFrameConstants::kContextOffset),
+ Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ j(not_equal, &no_arguments_adaptor, Label::kNear);
+ // Drop current frame and load arguments count from arguments adaptor frame.
+ __ mov(ebp, scratch2);
+ __ mov(caller_args_count_reg,
+ Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiUntag(caller_args_count_reg);
+ __ jmp(&formal_parameter_count_loaded, Label::kNear);
-void LCodeGen::DoCallFunction(LCallFunction* instr) {
- HCallFunction* hinstr = instr->hydrogen();
+ __ bind(&no_arguments_adaptor);
+ // Load caller's formal parameter count.
+ __ mov(caller_args_count_reg,
+ Immediate(info()->literal()->parameter_count()));
+
+ __ bind(&formal_parameter_count_loaded);
+ __ PrepareForTailCall(actual, caller_args_count_reg, scratch2, scratch3,
+ ReturnAddressState::kNotOnStack, 0);
+ Comment(";;; }");
+}
+
+void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
+ HInvokeFunction* hinstr = instr->hydrogen();
DCHECK(ToRegister(instr->context()).is(esi));
DCHECK(ToRegister(instr->function()).is(edi));
- DCHECK(ToRegister(instr->result()).is(eax));
-
- int arity = instr->arity();
- ConvertReceiverMode mode = hinstr->convert_mode();
- if (hinstr->HasVectorAndSlot()) {
- Register slot_register = ToRegister(instr->temp_slot());
- Register vector_register = ToRegister(instr->temp_vector());
- DCHECK(slot_register.is(edx));
- DCHECK(vector_register.is(ebx));
+ DCHECK(instr->HasPointerMap());
- AllowDeferredHandleDereference vector_structure_check;
- Handle<TypeFeedbackVector> vector = hinstr->feedback_vector();
- int index = vector->GetIndex(hinstr->slot());
+ bool is_tail_call = hinstr->tail_call_mode() == TailCallMode::kAllow;
- __ mov(vector_register, vector);
- __ mov(slot_register, Immediate(Smi::FromInt(index)));
+ if (is_tail_call) {
+ DCHECK(!info()->saves_caller_doubles());
+ ParameterCount actual(instr->arity());
+ // It is safe to use ebx, ecx and edx as scratch registers here given that
+ // 1) we are not going to return to caller function anyway,
+ // 2) ebx (expected arguments count) and edx (new.target) will be
+ // initialized below.
+ PrepareForTailCall(actual, ebx, ecx, edx);
+ }
- Handle<Code> ic =
- CodeFactory::CallICInOptimizedCode(isolate(), arity, mode).code();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ Handle<JSFunction> known_function = hinstr->known_function();
+ if (known_function.is_null()) {
+ LPointerMap* pointers = instr->pointer_map();
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount actual(instr->arity());
+ InvokeFlag flag = is_tail_call ? JUMP_FUNCTION : CALL_FUNCTION;
+ __ InvokeFunction(edi, no_reg, actual, flag, generator);
} else {
- __ Set(eax, arity);
- CallCode(isolate()->builtins()->Call(mode), RelocInfo::CODE_TARGET, instr);
+ CallKnownFunction(known_function, hinstr->formal_parameter_count(),
+ instr->arity(), is_tail_call, instr);
}
}
@@ -5181,7 +5081,7 @@ void LCodeGen::DoCheckArrayBufferNotNeutered(
__ mov(scratch, FieldOperand(view, JSArrayBufferView::kBufferOffset));
__ test_b(FieldOperand(scratch, JSArrayBuffer::kBitFieldOffset),
- 1 << JSArrayBuffer::WasNeutered::kShift);
+ Immediate(1 << JSArrayBuffer::WasNeutered::kShift));
DeoptimizeIf(not_zero, instr, Deoptimizer::kOutOfBounds);
}
@@ -5197,8 +5097,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
InstanceType last;
instr->hydrogen()->GetCheckInterval(&first, &last);
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
- static_cast<int8_t>(first));
+ __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(first));
// If there is only one type in the interval check for equality.
if (first == last) {
@@ -5207,8 +5106,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
DeoptimizeIf(below, instr, Deoptimizer::kWrongInstanceType);
// Omit check for the last type.
if (last != LAST_TYPE) {
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
- static_cast<int8_t>(last));
+ __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(last));
DeoptimizeIf(above, instr, Deoptimizer::kWrongInstanceType);
}
}
@@ -5219,7 +5117,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
if (base::bits::IsPowerOfTwo32(mask)) {
DCHECK(tag == 0 || base::bits::IsPowerOfTwo32(tag));
- __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), mask);
+ __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), Immediate(mask));
DeoptimizeIf(tag == 0 ? not_zero : zero, instr,
Deoptimizer::kWrongInstanceType);
} else {
@@ -5589,13 +5487,6 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
}
-void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
- DCHECK(ToRegister(instr->value()).is(eax));
- __ push(eax);
- CallRuntime(Runtime::kToFastProperties, 1, instr);
-}
-
-
void LCodeGen::DoTypeof(LTypeof* instr) {
DCHECK(ToRegister(instr->context()).is(esi));
DCHECK(ToRegister(instr->value()).is(ebx));
@@ -5662,7 +5553,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) {
// Check for undetectable objects => true.
__ mov(input, FieldOperand(input, HeapObject::kMapOffset));
__ test_b(FieldOperand(input, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
+ Immediate(1 << Map::kIsUndetectable));
final_branch_condition = not_zero;
} else if (String::Equals(type_name, factory()->function_string())) {
@@ -5683,7 +5574,7 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) {
__ j(below, false_label, false_distance);
// Check for callable or undetectable objects => false.
__ test_b(FieldOperand(input, Map::kBitFieldOffset),
- (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
+ Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
final_branch_condition = zero;
// clang-format off
@@ -5947,13 +5838,6 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
__ bind(&done);
}
-
-void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
- Register context = ToRegister(instr->context());
- __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), context);
-}
-
-
#undef __
} // namespace internal