summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/ia32/builtins-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/ia32/builtins-ia32.cc')
-rw-r--r--deps/v8/src/builtins/ia32/builtins-ia32.cc536
1 files changed, 312 insertions, 224 deletions
diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc
index 00e9e720ae..bcffedfef2 100644
--- a/deps/v8/src/builtins/ia32/builtins-ia32.cc
+++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc
@@ -112,16 +112,12 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
namespace {
-void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
- bool create_implicit_receiver,
- bool check_derived_construct) {
- Label post_instantiation_deopt_entry;
-
+void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax: number of arguments
- // -- esi: context
// -- edi: constructor function
// -- edx: new target
+ // -- esi: context
// -----------------------------------
// Enter a construct frame.
@@ -132,41 +128,10 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
__ SmiTag(eax);
__ push(esi);
__ push(eax);
-
- if (create_implicit_receiver) {
- // Allocate the new receiver object.
- __ Push(edi);
- __ Push(edx);
- __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
- RelocInfo::CODE_TARGET);
- __ mov(ebx, eax);
- __ Pop(edx);
- __ Pop(edi);
-
- // ----------- S t a t e -------------
- // -- edi: constructor function
- // -- ebx: newly allocated object
- // -- edx: new target
- // -----------------------------------
-
- // Retrieve smi-tagged arguments count from the stack.
- __ mov(eax, Operand(esp, 0));
- }
-
__ SmiUntag(eax);
- if (create_implicit_receiver) {
- // Push the allocated receiver to the stack. We need two copies
- // because we may have to return the original one and the calling
- // conventions dictate that the called function pops the receiver.
- __ push(ebx);
- __ push(ebx);
- } else {
- __ PushRoot(Heap::kTheHoleValueRootIndex);
- }
-
- // Deoptimizer re-enters stub code here.
- __ bind(&post_instantiation_deopt_entry);
+ // The receiver for the builtin/api call.
+ __ PushRoot(Heap::kTheHoleValueRootIndex);
// Set up pointer to last argument.
__ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
@@ -174,6 +139,16 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
// Copy arguments and receiver to the expression stack.
Label loop, entry;
__ mov(ecx, eax);
+ // ----------- S t a t e -------------
+ // -- eax: number of arguments (untagged)
+ // -- edi: constructor function
+ // -- edx: new target
+ // -- ebx: pointer to last argument
+ // -- ecx: counter
+ // -- sp[0*kPointerSize]: the hole (receiver)
+ // -- sp[1*kPointerSize]: number of arguments (tagged)
+ // -- sp[2*kPointerSize]: context
+ // -----------------------------------
__ jmp(&entry);
__ bind(&loop);
__ push(Operand(ebx, ecx, times_4, 0));
@@ -182,122 +157,223 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
__ j(greater_equal, &loop);
// Call the function.
+ // eax: number of arguments (untagged)
+ // edi: constructor function
+ // edx: new target
ParameterCount actual(eax);
__ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
CheckDebugStepCallWrapper());
- // Store offset of return address for deoptimizer.
- if (create_implicit_receiver && !is_api_function) {
- masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
- masm->pc_offset());
- }
-
// Restore context from the frame.
__ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
-
- if (create_implicit_receiver) {
- // If the result is an object (in the ECMA sense), we should get rid
- // of the receiver and use the result.
- Label use_receiver, exit;
-
- // If the result is a smi, it is *not* an object in the ECMA sense.
- __ JumpIfSmi(eax, &use_receiver, Label::kNear);
-
- // If the type of the result (stored in its map) is less than
- // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
- __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
- __ j(above_equal, &exit, Label::kNear);
-
- // Throw away the result of the constructor invocation and use the
- // on-stack receiver as the result.
- __ bind(&use_receiver);
- __ mov(eax, Operand(esp, 0));
-
- // Restore the arguments count and leave the construct frame. The
- // arguments count is stored below the receiver.
- __ bind(&exit);
- __ mov(ebx, Operand(esp, 1 * kPointerSize));
- } else {
- __ mov(ebx, Operand(esp, 0));
- }
-
+ // Restore smi-tagged arguments count from the frame.
+ __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame.
}
- // ES6 9.2.2. Step 13+
- // Check that the result is not a Smi, indicating that the constructor result
- // from a derived class is neither undefined nor an Object.
- if (check_derived_construct) {
- Label do_throw, dont_throw;
- __ JumpIfSmi(eax, &do_throw, Label::kNear);
- STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
- __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
- __ j(above_equal, &dont_throw, Label::kNear);
- __ bind(&do_throw);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
- }
- __ bind(&dont_throw);
- }
-
// Remove caller arguments from the stack and return.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
- if (create_implicit_receiver) {
- __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
- }
__ ret(0);
+}
+
+// The construct stub for ES5 constructor functions and ES6 class constructors.
+void Generate_JSConstructStubGeneric(MacroAssembler* masm,
+ bool restrict_constructor_return) {
+ // ----------- S t a t e -------------
+ // -- eax: number of arguments (untagged)
+ // -- edi: constructor function
+ // -- edx: new target
+ // -- esi: context
+ // -- sp[...]: constructor arguments
+ // -----------------------------------
- // Store offset of trampoline address for deoptimizer. This is the bailout
- // point after the receiver instantiation but before the function invocation.
- // We need to restore some registers in order to continue the above code.
- if (create_implicit_receiver && !is_api_function) {
+ // Enter a construct frame.
+ {
+ FrameScope scope(masm, StackFrame::CONSTRUCT);
+ Label post_instantiation_deopt_entry, not_create_implicit_receiver;
+
+ // Preserve the incoming parameters on the stack.
+ __ mov(ecx, eax);
+ __ SmiTag(ecx);
+ __ Push(esi);
+ __ Push(ecx);
+ __ Push(edi);
+ __ Push(edx);
+
+ // ----------- S t a t e -------------
+ // -- sp[0*kPointerSize]: new target
+ // -- edi and sp[1*kPointerSize]: constructor function
+ // -- sp[2*kPointerSize]: argument count
+ // -- sp[3*kPointerSize]: context
+ // -----------------------------------
+
+ __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ebx, SharedFunctionInfo::kFunctionKindByteOffset),
+ Immediate(SharedFunctionInfo::kDerivedConstructorBitsWithinByte));
+ __ j(not_zero, &not_create_implicit_receiver);
+
+ // If not derived class constructor: Allocate the new receiver object.
+ __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
+ __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
+ RelocInfo::CODE_TARGET);
+ __ jmp(&post_instantiation_deopt_entry, Label::kNear);
+
+ // Else: use TheHoleValue as receiver for constructor call
+ __ bind(&not_create_implicit_receiver);
+ __ LoadRoot(eax, Heap::kTheHoleValueRootIndex);
+
+ // ----------- S t a t e -------------
+ // -- eax: implicit receiver
+ // -- Slot 3 / sp[0*kPointerSize]: new target
+ // -- Slot 2 / sp[1*kPointerSize]: constructor function
+ // -- Slot 1 / sp[2*kPointerSize]: number of arguments (tagged)
+ // -- Slot 0 / sp[3*kPointerSize]: context
+ // -----------------------------------
+ // Deoptimizer enters here.
masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
masm->pc_offset());
+ __ bind(&post_instantiation_deopt_entry);
+
+ // Restore new target.
+ __ Pop(edx);
+
+ // Push the allocated receiver to the stack. We need two copies
+ // because we may have to return the original one and the calling
+ // conventions dictate that the called function pops the receiver.
+ __ Push(eax);
+ __ Push(eax);
// ----------- S t a t e -------------
- // -- eax : newly allocated object
- // -- esp[0] : constructor function
+ // -- edx: new target
+ // -- sp[0*kPointerSize]: implicit receiver
+ // -- sp[1*kPointerSize]: implicit receiver
+ // -- sp[2*kPointerSize]: constructor function
+ // -- sp[3*kPointerSize]: number of arguments (tagged)
+ // -- sp[4*kPointerSize]: context
// -----------------------------------
- __ pop(edi);
- __ push(eax);
- __ push(eax);
-
- // Retrieve smi-tagged arguments count from the stack.
+ // Restore constructor function and argument count.
+ __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
__ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
__ SmiUntag(eax);
- // Retrieve the new target value from the stack. This was placed into the
- // frame description in place of the receiver by the optimizing compiler.
- __ mov(edx, Operand(ebp, eax, times_pointer_size,
- StandardFrameConstants::kCallerSPOffset));
+ // Set up pointer to last argument.
+ __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+
+ // Copy arguments and receiver to the expression stack.
+ Label loop, entry;
+ __ mov(ecx, eax);
+ // ----------- S t a t e -------------
+ // -- eax: number of arguments (untagged)
+ // -- edx: new target
+ // -- ebx: pointer to last argument
+ // -- ecx: counter (tagged)
+ // -- sp[0*kPointerSize]: implicit receiver
+ // -- sp[1*kPointerSize]: implicit receiver
+ // -- edi and sp[2*kPointerSize]: constructor function
+ // -- sp[3*kPointerSize]: number of arguments (tagged)
+ // -- sp[4*kPointerSize]: context
+ // -----------------------------------
+ __ jmp(&entry, Label::kNear);
+ __ bind(&loop);
+ __ Push(Operand(ebx, ecx, times_pointer_size, 0));
+ __ bind(&entry);
+ __ dec(ecx);
+ __ j(greater_equal, &loop);
+
+ // Call the function.
+ ParameterCount actual(eax);
+ __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
+ CheckDebugStepCallWrapper());
+
+ // ----------- S t a t e -------------
+ // -- eax: constructor result
+ // -- sp[0*kPointerSize]: implicit receiver
+ // -- sp[1*kPointerSize]: constructor function
+ // -- sp[2*kPointerSize]: number of arguments
+ // -- sp[3*kPointerSize]: context
+ // -----------------------------------
+
+ // Store offset of return address for deoptimizer.
+ masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
+ masm->pc_offset());
+
+ // Restore context from the frame.
+ __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
+
+ // If the result is an object (in the ECMA sense), we should get rid
+ // of the receiver and use the result; see ECMA-262 section 13.2.2-7
+ // on page 74.
+ Label use_receiver, do_throw, other_result, leave_frame;
+
+ // If the result is undefined, we jump out to using the implicit receiver.
+ __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &use_receiver,
+ Label::kNear);
- // Continue with constructor function invocation.
- __ jmp(&post_instantiation_deopt_entry);
+ // Otherwise we do a smi check and fall through to check if the return value
+ // is a valid receiver.
+
+ // If the result is a smi, it is *not* an object in the ECMA sense.
+ __ JumpIfSmi(eax, &other_result, Label::kNear);
+
+ // If the type of the result (stored in its map) is less than
+ // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
+ STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
+ __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
+ __ j(above_equal, &leave_frame, Label::kNear);
+
+ __ bind(&other_result);
+ // The result is now neither undefined nor an object.
+ if (restrict_constructor_return) {
+ // Throw if constructor function is a class constructor
+ __ mov(ebx, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
+ __ mov(ebx, FieldOperand(ebx, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ebx, SharedFunctionInfo::kFunctionKindByteOffset),
+ Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
+ __ j(Condition::zero, &use_receiver, Label::kNear);
+ } else {
+ __ jmp(&use_receiver, Label::kNear);
+ }
+
+ __ bind(&do_throw);
+ __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
+
+ // Throw away the result of the constructor invocation and use the
+ // on-stack receiver as the result.
+ __ bind(&use_receiver);
+ __ mov(eax, Operand(esp, 0 * kPointerSize));
+ __ JumpIfRoot(eax, Heap::kTheHoleValueRootIndex, &do_throw);
+
+ __ bind(&leave_frame);
+ // Restore smi-tagged arguments count from the frame.
+ __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
+ // Leave construct frame.
}
+ // Remove caller arguments from the stack and return.
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ __ pop(ecx);
+ __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
+ __ push(ecx);
+ __ ret(0);
}
-
} // namespace
-void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, true, false);
+void Builtins::Generate_JSConstructStubGenericRestrictedReturn(
+ MacroAssembler* masm) {
+ return Generate_JSConstructStubGeneric(masm, true);
+}
+void Builtins::Generate_JSConstructStubGenericUnrestrictedReturn(
+ MacroAssembler* masm) {
+ return Generate_JSConstructStubGeneric(masm, false);
}
-
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, true, false, false);
+ Generate_JSBuiltinsConstructStubHelper(masm);
}
-
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false, false);
-}
-
-void Builtins::Generate_JSBuiltinsConstructStubForDerived(
- MacroAssembler* masm) {
- Generate_JSConstructStubHelper(masm, false, false, true);
+ Generate_JSBuiltinsConstructStubHelper(masm);
}
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
@@ -541,6 +617,37 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ jmp(&stepping_prepared);
}
+static void ReplaceClosureEntryWithOptimizedCode(
+ MacroAssembler* masm, Register optimized_code_entry, Register closure,
+ Register scratch1, Register scratch2, Register scratch3) {
+ Register native_context = scratch1;
+
+ // Store the optimized code in the closure.
+ __ lea(optimized_code_entry,
+ FieldOperand(optimized_code_entry, Code::kHeaderSize));
+ __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset),
+ optimized_code_entry);
+ __ RecordWriteCodeEntryField(closure, optimized_code_entry, scratch2);
+
+ // Link the closure into the optimized function list.
+ __ mov(native_context, NativeContextOperand());
+ __ mov(scratch3,
+ ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
+ __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), scratch3);
+ __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, scratch3,
+ scratch2, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ const int function_list_offset =
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
+ __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
+ closure);
+ // Save closure before the write barrier.
+ __ mov(scratch3, closure);
+ __ RecordWriteContextSlot(native_context, function_list_offset, closure,
+ scratch2, kDontSaveFPRegs);
+ __ mov(closure, scratch3);
+}
+
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
Register scratch2) {
Register args_count = scratch1;
@@ -588,6 +695,19 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ push(edi); // Callee's JS function.
__ push(edx); // Callee's new target.
+ // First check if there is optimized code in the feedback vector which we
+ // could call instead.
+ Label switch_to_optimized_code;
+ Register optimized_code_entry = ecx;
+ __ mov(ebx, FieldOperand(edi, JSFunction::kFeedbackVectorOffset));
+ __ mov(ebx, FieldOperand(ebx, Cell::kValueOffset));
+ __ mov(optimized_code_entry,
+ FieldOperand(ebx, FeedbackVector::kOptimizedCodeIndex * kPointerSize +
+ FeedbackVector::kHeaderSize));
+ __ mov(optimized_code_entry,
+ FieldOperand(optimized_code_entry, WeakCell::kValueOffset));
+ __ JumpIfNotSmi(optimized_code_entry, &switch_to_optimized_code);
+
// Get the bytecode array from the function object (or from the DebugInfo if
// it is present) and load it into kInterpreterBytecodeArrayRegister.
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
@@ -704,6 +824,31 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
__ RecordWriteCodeEntryField(edi, ecx, ebx);
__ jmp(ecx);
+
+ // If there is optimized code on the type feedback vector, check if it is good
+ // to run, and if so, self heal the closure and call the optimized code.
+ __ bind(&switch_to_optimized_code);
+ Label gotta_call_runtime;
+
+ // Check if the optimized code is marked for deopt.
+ __ test(FieldOperand(optimized_code_entry, Code::kKindSpecificFlags1Offset),
+ Immediate(1 << Code::kMarkedForDeoptimizationBit));
+ __ j(not_zero, &gotta_call_runtime);
+
+ // Optimized code is good, get it into the closure and link the closure into
+ // the optimized functions list, then tail call the optimized code.
+ __ push(edx);
+ ReplaceClosureEntryWithOptimizedCode(masm, optimized_code_entry, edi, edx,
+ eax, ebx);
+ __ pop(edx);
+ __ leave();
+ __ jmp(optimized_code_entry);
+
+ // Optimized code is marked for deopt, bailout to the CompileLazy runtime
+ // function which will clear the feedback vector's optimized code slot.
+ __ bind(&gotta_call_runtime);
+ __ leave();
+ GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
}
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
@@ -1081,9 +1226,8 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// -- edi : target function (preserved for callee)
// -----------------------------------
// First lookup code, maybe we don't need to compile!
- Label gotta_call_runtime, gotta_call_runtime_no_stack;
+ Label gotta_call_runtime;
Label try_shared;
- Label loop_top, loop_bottom;
Register closure = edi;
Register new_target = edx;
@@ -1092,96 +1236,43 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Do we have a valid feedback vector?
__ mov(ebx, FieldOperand(closure, JSFunction::kFeedbackVectorOffset));
__ mov(ebx, FieldOperand(ebx, Cell::kValueOffset));
- __ JumpIfRoot(ebx, Heap::kUndefinedValueRootIndex,
- &gotta_call_runtime_no_stack);
-
- __ push(argument_count);
- __ push(new_target);
- __ push(closure);
-
- Register map = argument_count;
- Register index = ebx;
- __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
- __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
- __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
- __ cmp(index, Immediate(Smi::FromInt(2)));
- __ j(less, &try_shared);
-
- // edx : native context
- // ebx : length / index
- // eax : optimized code map
- // stack[0] : new target
- // stack[4] : closure
- Register native_context = edx;
- __ mov(native_context, NativeContextOperand());
-
- __ bind(&loop_top);
- Register temp = edi;
+ __ JumpIfRoot(ebx, Heap::kUndefinedValueRootIndex, &gotta_call_runtime);
- // Does the native context match?
- __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
- SharedFunctionInfo::kOffsetToPreviousContext));
- __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
- __ cmp(temp, native_context);
- __ j(not_equal, &loop_bottom);
-
- // Code available?
+ // Is optimized code available in the feedback vector?
Register entry = ecx;
- __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
- SharedFunctionInfo::kOffsetToPreviousCachedCode));
+ __ mov(entry,
+ FieldOperand(ebx, FeedbackVector::kOptimizedCodeIndex * kPointerSize +
+ FeedbackVector::kHeaderSize));
__ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
__ JumpIfSmi(entry, &try_shared);
- // Found code. Get it into the closure and return.
- __ pop(closure);
- // Store code entry in the closure.
- __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
- __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
- __ RecordWriteCodeEntryField(closure, entry, eax);
+ // Found code, check if it is marked for deopt, if so call into runtime to
+ // clear the optimized code slot.
+ __ test(FieldOperand(entry, Code::kKindSpecificFlags1Offset),
+ Immediate(1 << Code::kMarkedForDeoptimizationBit));
+ __ j(not_zero, &gotta_call_runtime);
- // Link the closure into the optimized function list.
- // ecx : code entry
- // edx : native context
- // edi : closure
- __ mov(ebx,
- ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
- __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
- __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
- kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- const int function_list_offset =
- Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
- __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
- closure);
- // Save closure before the write barrier.
- __ mov(ebx, closure);
- __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
- kDontSaveFPRegs);
- __ mov(closure, ebx);
+ // Code is good, get it into the closure and tail call.
+ __ push(argument_count);
+ __ push(new_target);
+ ReplaceClosureEntryWithOptimizedCode(masm, entry, closure, edx, eax, ebx);
__ pop(new_target);
__ pop(argument_count);
__ jmp(entry);
- __ bind(&loop_bottom);
- __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
- __ cmp(index, Immediate(Smi::FromInt(1)));
- __ j(greater, &loop_top);
-
- // We found no code.
+ // We found no optimized code.
__ bind(&try_shared);
- __ pop(closure);
- __ pop(new_target);
- __ pop(argument_count);
__ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
// Is the shared function marked for tier up?
__ test_b(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
- __ j(not_zero, &gotta_call_runtime_no_stack);
+ __ j(not_zero, &gotta_call_runtime);
// If SFI points to anything other than CompileLazy, install that.
__ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
__ Move(ebx, masm->CodeObject());
__ cmp(entry, ebx);
- __ j(equal, &gotta_call_runtime_no_stack);
+ __ j(equal, &gotta_call_runtime);
// Install the SFI's code entry.
__ lea(entry, FieldOperand(entry, Code::kHeaderSize));
@@ -1190,10 +1281,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ jmp(entry);
__ bind(&gotta_call_runtime);
- __ pop(closure);
- __ pop(new_target);
- __ pop(argument_count);
- __ bind(&gotta_call_runtime_no_stack);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
}
@@ -2222,15 +2309,18 @@ void Builtins::Generate_Apply(MacroAssembler* masm) {
}
// static
-void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
- Handle<Code> code) {
+void Builtins::Generate_ForwardVarargs(MacroAssembler* masm,
+ Handle<Code> code) {
// ----------- S t a t e -------------
- // -- edi : the target to call (can be any Object)
- // -- ecx : start index (to support rest parameters)
- // -- esp[0] : return address.
- // -- esp[4] : thisArgument
+ // -- eax : the number of arguments (not including the receiver)
+ // -- edi : the target to call (can be any Object)
+ // -- edx : the new target (for [[Construct]] calls)
+ // -- ecx : start index (to support rest parameters)
// -----------------------------------
+ // Preserve new.target (in case of [[Construct]]).
+ __ movd(xmm0, edx);
+
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
@@ -2238,24 +2328,24 @@ void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &arguments_adaptor, Label::kNear);
{
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
- __ mov(eax,
- FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ mov(edx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(edx,
+ FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(ebx, ebp);
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
// Just load the length from the ArgumentsAdaptorFrame.
- __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ mov(edx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ bind(&arguments_done);
- Label stack_empty, stack_done;
- __ SmiUntag(eax);
- __ sub(eax, ecx);
- __ j(less_equal, &stack_empty);
+ Label stack_done;
+ __ SmiUntag(edx);
+ __ sub(edx, ecx);
+ __ j(less_equal, &stack_done);
{
// Check for stack overflow.
{
@@ -2270,7 +2360,7 @@ void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
__ add(ecx, esp);
__ sar(ecx, kPointerSizeLog2);
// Check if the arguments will overflow the stack.
- __ cmp(ecx, eax);
+ __ cmp(ecx, edx);
__ j(greater, &done, Label::kNear); // Signed comparison.
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ bind(&done);
@@ -2279,25 +2369,23 @@ void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
// Forward the arguments from the caller frame.
{
Label loop;
- __ mov(ecx, eax);
- __ pop(edx);
+ __ add(eax, edx);
+ __ PopReturnAddressTo(ecx);
__ bind(&loop);
{
- __ Push(Operand(ebx, ecx, times_pointer_size, 1 * kPointerSize));
- __ dec(ecx);
+ __ Push(Operand(ebx, edx, times_pointer_size, 1 * kPointerSize));
+ __ dec(edx);
__ j(not_zero, &loop);
}
- __ push(edx);
+ __ PushReturnAddressFrom(ecx);
}
}
- __ jmp(&stack_done, Label::kNear);
- __ bind(&stack_empty);
- {
- // We just pass the receiver, which is already on the stack.
- __ Move(eax, Immediate(0));
- }
__ bind(&stack_done);
+ // Restore new.target (in case of [[Construct]]).
+ __ movd(edx, xmm0);
+
+ // Tail-call to the {code} handler.
__ Jump(code, RelocInfo::CODE_TARGET);
}