summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/ia32/builtins-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/ia32/builtins-ia32.cc')
-rw-r--r--deps/v8/src/builtins/ia32/builtins-ia32.cc973
1 files changed, 554 insertions, 419 deletions
diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc
index 4707667bbf..063daed5e3 100644
--- a/deps/v8/src/builtins/ia32/builtins-ia32.cc
+++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc
@@ -22,6 +22,7 @@ namespace internal {
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
ExitFrameType exit_frame_type) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
__ mov(kJavaScriptCallExtraArg1Register,
Immediate(ExternalReference::Create(address)));
if (exit_frame_type == BUILTIN_EXIT) {
@@ -70,6 +71,7 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
namespace {
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
@@ -88,10 +90,10 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ SmiUntag(eax);
// The receiver for the builtin/api call.
- __ PushRoot(Heap::kTheHoleValueRootIndex);
+ __ PushRoot(RootIndex::kTheHoleValue);
- // Set up pointer to last argument.
- __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+ // Set up pointer to last argument. We are using esi as scratch register.
+ __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
// Copy arguments and receiver to the expression stack.
Label loop, entry;
@@ -100,7 +102,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -- eax: number of arguments (untagged)
// -- edi: constructor function
// -- edx: new target
- // -- ebx: pointer to last argument
+ // -- esi: pointer to last argument
// -- ecx: counter
// -- sp[0*kPointerSize]: the hole (receiver)
// -- sp[1*kPointerSize]: number of arguments (tagged)
@@ -108,7 +110,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -----------------------------------
__ jmp(&entry);
__ bind(&loop);
- __ push(Operand(ebx, ecx, times_4, 0));
+ __ push(Operand(esi, ecx, times_4, 0));
__ bind(&entry);
__ dec(ecx);
__ j(greater_equal, &loop);
@@ -118,27 +120,54 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// edi: constructor function
// edx: new target
ParameterCount actual(eax);
+ // Reload context from the frame.
+ __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
__ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
// Restore context from the frame.
__ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
// Restore smi-tagged arguments count from the frame.
- __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
+ __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame.
}
// Remove caller arguments from the stack and return.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ pop(ecx);
- __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
- __ push(ecx);
+ __ PopReturnAddressTo(ecx);
+ __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize)); // 1 ~ receiver
+ __ PushReturnAddressFrom(ecx);
__ ret(0);
}
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+ Register scratch, Label* stack_overflow,
+ bool include_receiver = false) {
+ // Check the stack for overflow. We are not trying to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ ExternalReference real_stack_limit =
+ ExternalReference::address_of_real_stack_limit(masm->isolate());
+ // Compute the space that is left as a negative number in scratch. If
+ // we already overflowed, this will be a positive number.
+ __ mov(scratch, __ StaticVariable(real_stack_limit));
+ __ sub(scratch, esp);
+ // Add the size of the arguments.
+ static_assert(kPointerSize == 4,
+ "The next instruction assumes kPointerSize == 4");
+ __ lea(scratch, Operand(scratch, num_args, times_4, 0));
+ if (include_receiver) {
+ __ add(scratch, Immediate(kPointerSize));
+ }
+ // See if we overflowed, i.e. scratch is positive.
+ __ cmp(scratch, Immediate(0));
+ __ j(greater, stack_overflow); // Signed comparison.
+}
+
} // namespace
// The construct stub for ES5 constructor functions and ES6 class constructors.
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax: number of arguments (untagged)
// -- edi: constructor function
@@ -158,7 +187,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ Push(esi);
__ Push(ecx);
__ Push(edi);
- __ PushRoot(Heap::kTheHoleValueRootIndex);
+ __ PushRoot(RootIndex::kTheHoleValue);
__ Push(edx);
// ----------- S t a t e -------------
@@ -169,8 +198,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// -- sp[4*kPointerSize]: context
// -----------------------------------
- __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ test(FieldOperand(ebx, SharedFunctionInfo::kFlagsOffset),
+ __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test(FieldOperand(eax, SharedFunctionInfo::kFlagsOffset),
Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
__ j(not_zero, &not_create_implicit_receiver);
@@ -182,7 +211,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Else: use TheHoleValue as receiver for constructor call
__ bind(&not_create_implicit_receiver);
- __ LoadRoot(eax, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(eax, RootIndex::kTheHoleValue);
// ----------- S t a t e -------------
// -- eax: implicit receiver
@@ -216,13 +245,27 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// -- sp[5*kPointerSize]: context
// -----------------------------------
- // Restore constructor function and argument count.
- __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
+ // Restore argument count.
__ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
__ SmiUntag(eax);
// Set up pointer to last argument.
- __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+ __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+
+ // Check if we have enough stack space to push all arguments.
+ // Argument count in eax. Clobbers ecx.
+ Label enough_stack_space, stack_overflow;
+ Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
+ __ jmp(&enough_stack_space);
+
+ __ bind(&stack_overflow);
+ // Restore context from the frame.
+ __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
+ __ CallRuntime(Runtime::kThrowStackOverflow);
+ // This should be unreachable.
+ __ int3();
+
+ __ bind(&enough_stack_space);
// Copy arguments and receiver to the expression stack.
Label loop, entry;
@@ -230,23 +273,24 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax: number of arguments (untagged)
// -- edx: new target
- // -- ebx: pointer to last argument
+ // -- edi: pointer to last argument
// -- ecx: counter (tagged)
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: implicit receiver
// -- sp[2*kPointerSize]: padding
- // -- edi and sp[3*kPointerSize]: constructor function
+ // -- sp[3*kPointerSize]: constructor function
// -- sp[4*kPointerSize]: number of arguments (tagged)
// -- sp[5*kPointerSize]: context
// -----------------------------------
__ jmp(&entry, Label::kNear);
__ bind(&loop);
- __ Push(Operand(ebx, ecx, times_pointer_size, 0));
+ __ Push(Operand(edi, ecx, times_pointer_size, 0));
__ bind(&entry);
__ dec(ecx);
__ j(greater_equal, &loop);
- // Call the function.
+ // Restore and and call the constructor function.
+ __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
ParameterCount actual(eax);
__ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
@@ -272,8 +316,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Label use_receiver, do_throw, leave_frame;
// If the result is undefined, we jump out to using the implicit receiver.
- __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &use_receiver,
- Label::kNear);
+ __ JumpIfRoot(eax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
// Otherwise we do a smi check and fall through to check if the return value
// is a valid receiver.
@@ -295,57 +338,33 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// on-stack receiver as the result.
__ bind(&use_receiver);
__ mov(eax, Operand(esp, 0 * kPointerSize));
- __ JumpIfRoot(eax, Heap::kTheHoleValueRootIndex, &do_throw);
+ __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
__ bind(&leave_frame);
// Restore smi-tagged arguments count from the frame.
- __ mov(ebx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
+ __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame.
}
// Remove caller arguments from the stack and return.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(ecx);
- __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
+ __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
__ ret(0);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
Generate_JSBuiltinsConstructStubHelper(masm);
}
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(edi);
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
-static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
- Register scratch1, Register scratch2,
- Label* stack_overflow,
- bool include_receiver = false) {
- // Check the stack for overflow. We are not trying to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- ExternalReference real_stack_limit =
- ExternalReference::address_of_real_stack_limit(masm->isolate());
- __ mov(scratch1, __ StaticVariable(real_stack_limit));
- // Make scratch2 the space we have left. The stack might already be overflowed
- // here which will cause scratch2 to become negative.
- __ mov(scratch2, esp);
- __ sub(scratch2, scratch1);
- // Make scratch1 the space we need for the array when it is unrolled onto the
- // stack.
- __ mov(scratch1, num_args);
- if (include_receiver) {
- __ add(scratch1, Immediate(1));
- }
- __ shl(scratch1, kPointerSizeLog2);
- // Check if the arguments will overflow the stack.
- __ cmp(scratch2, scratch1);
- __ j(less_equal, stack_overflow); // Signed comparison.
-}
-
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
ProfileEntryHookStub::MaybeCallEntryHook(masm);
@@ -353,26 +372,29 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ const Register scratch1 = edx;
+ const Register scratch2 = edi;
+
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ mov(esi, __ StaticVariable(context_address));
- // Load the previous frame pointer (ebx) to access C arguments
- __ mov(ebx, Operand(ebp, 0));
+ // Load the previous frame pointer (edx) to access C arguments
+ __ mov(scratch1, Operand(ebp, 0));
// Push the function and the receiver onto the stack.
- __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
- __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
+ __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
+ __ push(Operand(scratch1, EntryFrameConstants::kReceiverArgOffset));
// Load the number of arguments and setup pointer to the arguments.
- __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
- __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
+ __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
+ __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
// Check if we have enough stack space to push all arguments.
- // Argument count in eax. Clobbers ecx and edx.
+ // Argument count in eax. Clobbers ecx.
Label enough_stack_space, stack_overflow;
- Generate_StackOverflowCheck(masm, eax, ecx, edx, &stack_overflow);
+ Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
__ jmp(&enough_stack_space);
__ bind(&stack_overflow);
@@ -387,19 +409,20 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ Move(ecx, Immediate(0));
__ jmp(&entry, Label::kNear);
__ bind(&loop);
- __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
- __ push(Operand(edx, 0)); // dereference handle
+ // Push the parameter from argv.
+ __ mov(scratch2, Operand(scratch1, ecx, times_4, 0));
+ __ push(Operand(scratch2, 0)); // dereference handle
__ inc(ecx);
__ bind(&entry);
__ cmp(ecx, eax);
__ j(not_equal, &loop);
// Load the previous frame pointer (ebx) to access C arguments
- __ mov(ebx, Operand(ebp, 0));
+ __ mov(scratch2, Operand(ebp, 0));
// Get the new.target and function from the frame.
- __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
- __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
+ __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
+ __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
// Invoke the code.
Handle<Code> builtin = is_construct
@@ -415,10 +438,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
}
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
Generate_JSEntryTrampolineHelper(masm, false);
}
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
Generate_JSEntryTrampolineHelper(masm, true);
}
@@ -437,6 +462,8 @@ static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
// static
void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : the value to pass to the generator
// -- edx : the JSGeneratorObject to resume
@@ -471,7 +498,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit".
Label stack_overflow;
- __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
+ __ CompareRoot(esp, ecx, RootIndex::kRealStackLimit);
__ j(below, &stack_overflow);
// Pop return address.
@@ -488,26 +515,34 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// -- esp[0] : generator receiver
// -----------------------------------
- // Copy the function arguments from the generator object's register file.
- __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ movzx_w(
- ecx, FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
- __ mov(ebx,
- FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
{
- Label done_loop, loop;
- __ Set(edi, 0);
+ Assembler::AllowExplicitEbxAccessScope root_is_spilled(masm);
+ __ movd(xmm0, ebx);
- __ bind(&loop);
- __ cmp(edi, ecx);
- __ j(greater_equal, &done_loop);
- __ Push(
- FieldOperand(ebx, edi, times_pointer_size, FixedArray::kHeaderSize));
- __ add(edi, Immediate(1));
- __ jmp(&loop);
+ // Copy the function arguments from the generator object's register file.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ movzx_w(ecx, FieldOperand(
+ ecx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ mov(ebx,
+ FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
+ {
+ Label done_loop, loop;
+ __ Set(edi, 0);
- __ bind(&done_loop);
+ __ bind(&loop);
+ __ cmp(edi, ecx);
+ __ j(greater_equal, &done_loop);
+ __ Push(
+ FieldOperand(ebx, edi, times_pointer_size, FixedArray::kHeaderSize));
+ __ add(edi, Immediate(1));
+ __ jmp(&loop);
+
+ __ bind(&done_loop);
+ }
+
+ // Restore registers.
__ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
+ __ movd(ebx, xmm0);
}
// Underlying function needs to have bytecode available.
@@ -542,7 +577,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ Push(edx);
__ Push(edi);
// Push hole as receiver since we do not use it for stepping.
- __ PushRoot(Heap::kTheHoleValueRootIndex);
+ __ PushRoot(RootIndex::kTheHoleValue);
__ CallRuntime(Runtime::kDebugOnFunctionCall);
__ Pop(edx);
__ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
@@ -567,10 +602,11 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
}
}
-static void ReplaceClosureCodeWithOptimizedCode(
- MacroAssembler* masm, Register optimized_code, Register closure,
- Register scratch1, Register scratch2, Register scratch3) {
-
+static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
+ Register optimized_code,
+ Register closure,
+ Register scratch1,
+ Register scratch2) {
// Store the optimized code in the closure.
__ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
__ mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
@@ -611,21 +647,25 @@ static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
}
static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
- Register feedback_vector,
Register scratch) {
// ----------- S t a t e -------------
// -- eax : argument count (preserved for callee if needed, and caller)
// -- edx : new target (preserved for callee if needed, and caller)
// -- edi : target function (preserved for callee if needed, and caller)
- // -- feedback vector (preserved for caller if needed)
// -----------------------------------
- DCHECK(!AreAliased(feedback_vector, eax, edx, edi, scratch));
+ DCHECK(!AreAliased(eax, edx, edi, scratch));
Label optimized_code_slot_is_weak_ref, fallthrough;
Register closure = edi;
- Register optimized_code_entry = scratch;
+ // Load the feedback vector from the closure.
+ Register feedback_vector = scratch;
+ __ mov(feedback_vector,
+ FieldOperand(closure, JSFunction::kFeedbackCellOffset));
+ __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
+ // Load the optimized code from the feedback vector and re-use the register.
+ Register optimized_code_entry = scratch;
__ mov(optimized_code_entry,
FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
@@ -686,10 +726,8 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
// Optimized code is good, get it into the closure and link the closure into
// the optimized functions list, then tail call the optimized code.
- // The feedback vector is no longer used, so re-use it as a scratch
- // register.
ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
- edx, eax, feedback_vector);
+ edx, eax);
static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
__ Move(ecx, optimized_code_entry);
__ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -716,15 +754,19 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
Register bytecode_array,
Register bytecode_offset,
- Register bytecode, Register scratch1,
+ Register scratch1, Register scratch2,
Label* if_return) {
Register bytecode_size_table = scratch1;
+ Register bytecode = scratch2;
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
bytecode));
-
__ Move(bytecode_size_table,
Immediate(ExternalReference::bytecode_size_table_address()));
+ // Load the current bytecode.
+ __ movzx_b(bytecode, Operand(kInterpreterBytecodeArrayRegister,
+ kInterpreterBytecodeOffsetRegister, times_1, 0));
+
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
Label process_bytecode, extra_wide;
STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
@@ -732,7 +774,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
STATIC_ASSERT(3 ==
static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
- __ cmpb(bytecode, Immediate(0x3));
+ __ cmp(bytecode, Immediate(0x3));
__ j(above, &process_bytecode, Label::kNear);
__ test(bytecode, Immediate(0x1));
__ j(not_equal, &extra_wide, Label::kNear);
@@ -754,9 +796,9 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
__ bind(&process_bytecode);
// Bailout to the return label if this is a return bytecode.
-#define JUMP_IF_EQUAL(NAME) \
- __ cmpb(bytecode, \
- Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
+#define JUMP_IF_EQUAL(NAME) \
+ __ cmp(bytecode, \
+ Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
__ j(equal, if_return);
RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
#undef JUMP_IF_EQUAL
@@ -780,18 +822,23 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
// The function builds an interpreter frame. See InterpreterFrameConstants in
// frames.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
+ __ VerifyRootRegister();
+
Register closure = edi;
- Register feedback_vector = ebx;
- // Load the feedback vector from the closure.
+ // Read off the optimized code slot in the closure's feedback vector, and if
+ // there is optimized code or an optimization marker, call that instead.
+ MaybeTailCallOptimizedCodeSlot(masm, ecx);
+
+ // Load the feedback vector and increment the invocation count.
+ Register feedback_vector = ecx;
__ mov(feedback_vector,
FieldOperand(closure, JSFunction::kFeedbackCellOffset));
__ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
- // Read off the optimized code slot in the feedback vector, and if there
- // is optimized code or an optimization marker, call that instead.
- MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, ecx);
+ __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set
@@ -811,8 +858,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
__ Pop(eax);
- __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
-
// Check function data field is actually a BytecodeArray object.
if (FLAG_debug_code) {
__ AssertNotSmi(kInterpreterBytecodeArrayRegister);
@@ -836,16 +881,17 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Allocate the local and temporary register file on the stack.
{
// Load frame size from the BytecodeArray object.
- __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
- BytecodeArray::kFrameSizeOffset));
+ Register frame_size = ecx;
+ __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
+ BytecodeArray::kFrameSizeOffset));
// Do a stack check to ensure we don't go over the limit.
Label ok;
- __ mov(ecx, esp);
- __ sub(ecx, ebx);
+ __ mov(eax, esp);
+ __ sub(eax, frame_size);
ExternalReference stack_limit =
ExternalReference::address_of_real_stack_limit(masm->isolate());
- __ cmp(ecx, __ StaticVariable(stack_limit));
+ __ cmp(eax, __ StaticVariable(stack_limit));
__ j(above_equal, &ok);
__ CallRuntime(Runtime::kThrowStackOverflow);
__ bind(&ok);
@@ -860,7 +906,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ push(eax);
// Continue loop if not done.
__ bind(&loop_check);
- __ sub(ebx, Immediate(kPointerSize));
+ __ sub(frame_size, Immediate(kPointerSize));
__ j(greater_equal, &loop_header);
}
@@ -876,7 +922,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ bind(&no_incoming_new_target_or_generator_register);
// Load accumulator and bytecode offset into registers.
- __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
__ mov(kInterpreterBytecodeOffsetRegister,
Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
@@ -887,11 +933,12 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ mov(kInterpreterDispatchTableRegister,
Immediate(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
- __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
+ __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister, times_1, 0));
__ mov(
kJavaScriptCallCodeStartRegister,
- Operand(kInterpreterDispatchTableRegister, ebx, times_pointer_size, 0));
+ Operand(kInterpreterDispatchTableRegister, ecx, times_pointer_size, 0));
+ __ VerifyRootRegister();
__ call(kJavaScriptCallCodeStartRegister);
masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
@@ -907,16 +954,15 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// Either return, or advance to the next bytecode and dispatch.
Label do_return;
- __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
- kInterpreterBytecodeOffsetRegister, times_1, 0));
AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
- kInterpreterBytecodeOffsetRegister, ebx, ecx,
- &do_return);
+ kInterpreterBytecodeOffsetRegister, ecx,
+ kInterpreterDispatchTableRegister, &do_return);
__ jmp(&do_dispatch);
__ bind(&do_return);
// The return value is in eax.
- LeaveInterpreterFrame(masm, ebx, ecx);
+ LeaveInterpreterFrame(masm, edx, ecx);
+ __ VerifyRootRegister();
__ ret(0);
}
@@ -924,6 +970,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register array_limit,
Register start_address) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- start_address : Pointer to the last argument in the args array.
// -- array_limit : Pointer to one before the first argument in the
@@ -943,62 +990,62 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm,
void Builtins::Generate_InterpreterPushArgsThenCallImpl(
MacroAssembler* masm, ConvertReceiverMode receiver_mode,
InterpreterPushArgsMode mode) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
- // -- ebx : the address of the first argument to be pushed. Subsequent
+ // -- ecx : the address of the first argument to be pushed. Subsequent
// arguments should be consecutive above this, in the same order as
// they are to be pushed onto the stack.
// -- edi : the target to call (can be any Object).
// -----------------------------------
+
+ const Register scratch = edx;
+ const Register argv = ecx;
+
Label stack_overflow;
- // Compute the expected number of arguments.
- __ mov(ecx, eax);
- __ add(ecx, Immediate(1)); // Add one for receiver.
+ // Add a stack check before pushing the arguments.
+ Generate_StackOverflowCheck(masm, eax, scratch, &stack_overflow, true);
+
+ __ movd(xmm0, eax); // Spill number of arguments.
- // Add a stack check before pushing the arguments. We need an extra register
- // to perform a stack check. So push it onto the stack temporarily. This
- // might cause stack overflow, but it will be detected by the check.
- __ Push(edi);
- Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
- __ Pop(edi);
+ // Compute the expected number of arguments.
+ __ mov(scratch, eax);
+ __ add(scratch, Immediate(1)); // Add one for receiver.
// Pop return address to allow tail-call after pushing arguments.
- __ Pop(edx);
+ __ PopReturnAddressTo(eax);
// Push "undefined" as the receiver arg if we need to.
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
- __ PushRoot(Heap::kUndefinedValueRootIndex);
- __ sub(ecx, Immediate(1)); // Subtract one for receiver.
+ __ PushRoot(RootIndex::kUndefinedValue);
+ __ sub(scratch, Immediate(1)); // Subtract one for receiver.
}
// Find the address of the last argument.
- __ shl(ecx, kPointerSizeLog2);
- __ neg(ecx);
- __ add(ecx, ebx);
- Generate_InterpreterPushArgs(masm, ecx, ebx);
-
- if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
- __ Pop(ebx); // Pass the spread in a register
- __ sub(eax, Immediate(1)); // Subtract one for spread
- }
+ __ shl(scratch, kPointerSizeLog2);
+ __ neg(scratch);
+ __ add(scratch, argv);
+ Generate_InterpreterPushArgs(masm, scratch, argv);
// Call the target.
- __ Push(edx); // Re-push return address.
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
+ __ Pop(ecx); // Pass the spread in a register
+ __ PushReturnAddressFrom(eax);
+ __ movd(eax, xmm0); // Restore number of arguments.
+ __ sub(eax, Immediate(1)); // Subtract one for spread
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
RelocInfo::CODE_TARGET);
} else {
+ __ PushReturnAddressFrom(eax);
+ __ movd(eax, xmm0); // Restore number of arguments.
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
RelocInfo::CODE_TARGET);
}
__ bind(&stack_overflow);
{
- // Pop the temporary registers, so that return address is on top of stack.
- __ Pop(edi);
-
__ TailCallRuntime(Runtime::kThrowStackOverflow);
// This should be unreachable.
@@ -1008,44 +1055,40 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
namespace {
-// This function modified start_addr, and only reads the contents of num_args
-// register. scratch1 and scratch2 are used as temporary registers. Their
-// original values are restored after the use.
+// This function modifies start_addr, and only reads the contents of num_args
+// register. scratch1 and scratch2 are used as temporary registers.
void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
MacroAssembler* masm, Register num_args, Register start_addr,
- Register scratch1, Register scratch2, int num_slots_above_ret_addr,
+ Register scratch1, Register scratch2, int num_slots_to_move,
Label* stack_overflow) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// We have to move return address and the temporary registers above it
// before we can copy arguments onto the stack. To achieve this:
// Step 1: Increment the stack pointer by num_args + 1 (for receiver).
- // Step 2: Move the return address and values above it to the top of stack.
+ // Step 2: Move the return address and values around it to the top of stack.
// Step 3: Copy the arguments into the correct locations.
// current stack =====> required stack layout
- // | | | scratch1 | (2) <-- esp(1)
- // | | | .... | (2)
- // | | | scratch-n | (2)
- // | | | return addr | (2)
+ // | | | return addr | (2) <-- esp (1)
+ // | | | addtl. slot |
// | | | arg N | (3)
- // | scratch1 | <-- esp | .... |
- // | .... | | arg 1 |
- // | scratch-n | | arg 0 |
- // | return addr | | receiver slot |
+ // | | | .... |
+ // | | | arg 1 |
+ // | return addr | <-- esp | arg 0 |
+ // | addtl. slot | | receiver slot |
// Check for stack overflow before we increment the stack pointer.
- Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
- stack_overflow, true);
+ Generate_StackOverflowCheck(masm, num_args, scratch1, stack_overflow, true);
- // Step 1 - Update the stack pointer. scratch1 already contains the required
- // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
- // Generate_StackOverflowCheck.
+ // Step 1 - Update the stack pointer.
+ __ lea(scratch1, Operand(num_args, times_4, kPointerSize));
__ AllocateStackFrame(scratch1);
- // Step 2 move return_address and slots above it to the correct locations.
+ // Step 2 move return_address and slots around it to the correct locations.
// Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
// basically when the source and destination overlap. We at least need one
// extra slot for receiver, so no extra checks are required to avoid copy.
- for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
+ for (int i = 0; i < num_slots_to_move + 1; i++) {
__ mov(scratch1,
Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
__ mov(Operand(esp, i * kPointerSize), scratch1);
@@ -1055,7 +1098,7 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
// Slot meant for receiver contains return address. Reset it so that
// we will not incorrectly interpret return address as an object.
__ mov(Operand(esp, num_args, times_pointer_size,
- (num_slots_above_ret_addr + 1) * kPointerSize),
+ (num_slots_to_move + 1) * kPointerSize),
Immediate(0));
__ mov(scratch1, num_args);
@@ -1064,7 +1107,7 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
__ bind(&loop_header);
__ mov(scratch2, Operand(start_addr, 0));
__ mov(Operand(esp, scratch1, times_pointer_size,
- num_slots_above_ret_addr * kPointerSize),
+ num_slots_to_move * kPointerSize),
scratch2);
__ sub(start_addr, Immediate(kPointerSize));
__ sub(scratch1, Immediate(1));
@@ -1078,69 +1121,74 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
// static
void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
MacroAssembler* masm, InterpreterPushArgsMode mode) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
- // -- eax : the number of arguments (not including the receiver)
- // -- edx : the new target
- // -- edi : the constructor
- // -- ebx : allocation site feedback (if available or undefined)
- // -- ecx : the address of the first argument to be pushed. Subsequent
- // arguments should be consecutive above this, in the same order as
- // they are to be pushed onto the stack.
+ // -- eax : the number of arguments (not including the receiver)
+ // -- ecx : the address of the first argument to be pushed. Subsequent
+ // arguments should be consecutive above this, in the same order
+ // as they are to be pushed onto the stack.
+ // -- esp[0] : return address
+ // -- esp[4] : allocation site feedback (if available or undefined)
+ // -- esp[8] : the new target
+ // -- esp[12] : the constructor
// -----------------------------------
+
Label stack_overflow;
- // We need two scratch registers. Push edi and edx onto stack.
- __ Push(edi);
- __ Push(edx);
- // Push arguments and move return address to the top of stack.
- // The eax register is readonly. The ecx register will be modified. The edx
- // and edi registers will be modified but restored to their original values.
- Generate_InterpreterPushZeroAndArgsAndReturnAddress(masm, eax, ecx, edx, edi,
- 2, &stack_overflow);
+ // Push arguments and move return address and stack spill slots to the top of
+ // stack. The eax register is readonly. The ecx register will be modified. edx
+ // and edi are used as scratch registers.
+ Generate_InterpreterPushZeroAndArgsAndReturnAddress(
+ masm, eax, ecx, edx, edi,
+ InterpreterPushArgsThenConstructDescriptor::kStackArgumentsCount,
+ &stack_overflow);
- // Restore edi and edx
- __ Pop(edx);
- __ Pop(edi);
-
- if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
- __ PopReturnAddressTo(ecx);
- __ Pop(ebx); // Pass the spread in a register
- __ PushReturnAddressFrom(ecx);
- __ sub(eax, Immediate(1)); // Subtract one for spread
- } else {
- __ AssertUndefinedOrAllocationSite(ebx);
- }
+ // Call the appropriate constructor. eax and ecx already contain intended
+ // values, remaining registers still need to be initialized from the stack.
if (mode == InterpreterPushArgsMode::kArrayFunction) {
- // Tail call to the array construct stub (still in the caller
- // context at this point).
- __ AssertFunction(edi);
- // TODO(v8:6666): When rewriting ia32 ASM builtins to not clobber the
- // kRootRegister ebx, this useless move can be removed.
- __ Move(kJavaScriptCallExtraArg1Register, ebx);
- Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
- __ Jump(code, RelocInfo::CODE_TARGET);
+ // Tail call to the array construct stub (still in the caller context at
+ // this point).
+
+ __ movd(xmm0, eax); // Spill number of arguments.
+ __ PopReturnAddressTo(eax);
+ __ Pop(kJavaScriptCallExtraArg1Register);
+ __ Pop(kJavaScriptCallNewTargetRegister);
+ __ Pop(kJavaScriptCallTargetRegister);
+ __ PushReturnAddressFrom(eax);
+ __ movd(eax, xmm0); // Reload number of arguments.
+
+ __ AssertFunction(kJavaScriptCallTargetRegister);
+ __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register);
+ __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
+ RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
- // Call the constructor with unmodified eax, edi, edx values.
+ __ movd(xmm0, eax); // Spill number of arguments.
+ __ PopReturnAddressTo(eax);
+ __ Drop(1); // The allocation site is unused.
+ __ Pop(kJavaScriptCallNewTargetRegister);
+ __ Pop(kJavaScriptCallTargetRegister);
+ __ Pop(ecx); // Pop the spread (i.e. the first argument), overwriting ecx.
+ __ PushReturnAddressFrom(eax);
+ __ movd(eax, xmm0); // Reload number of arguments.
+ __ sub(eax, Immediate(1)); // The actual argc thus decrements by one.
+
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
RelocInfo::CODE_TARGET);
} else {
DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
- // Call the constructor with unmodified eax, edi, edx values.
+ __ PopReturnAddressTo(ecx);
+ __ Drop(1); // The allocation site is unused.
+ __ Pop(kJavaScriptCallNewTargetRegister);
+ __ Pop(kJavaScriptCallTargetRegister);
+ __ PushReturnAddressFrom(ecx);
+
__ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
}
__ bind(&stack_overflow);
- {
- // Pop the temporary registers, so that return address is on top of stack.
- __ Pop(edx);
- __ Pop(edi);
-
- __ TailCallRuntime(Runtime::kThrowStackOverflow);
-
- // This should be unreachable.
- __ int3();
- }
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
+ __ int3();
}
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
@@ -1151,26 +1199,30 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
+ static constexpr Register scratch = ecx;
+
// If the SFI function_data is an InterpreterData, get the trampoline stored
// in it, otherwise get the trampoline from the builtins list.
- __ mov(ebx, Operand(ebp, StandardFrameConstants::kFunctionOffset));
- __ mov(ebx, FieldOperand(ebx, JSFunction::kSharedFunctionInfoOffset));
- __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
+ __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
+ __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(scratch,
+ FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
__ Push(eax);
- __ CmpObjectType(ebx, INTERPRETER_DATA_TYPE, eax);
+ __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
__ j(not_equal, &builtin_trampoline, Label::kNear);
- __ mov(ebx, FieldOperand(ebx, InterpreterData::kInterpreterTrampolineOffset));
+ __ mov(scratch,
+ FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
__ jmp(&trampoline_loaded, Label::kNear);
__ bind(&builtin_trampoline);
- __ Move(ebx, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
+ __ Move(scratch, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ bind(&trampoline_loaded);
__ Pop(eax);
- __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
- Code::kHeaderSize - kHeapObjectTag));
- __ push(ebx);
+ __ add(scratch, Immediate(interpreter_entry_return_pc_offset->value() +
+ Code::kHeaderSize - kHeapObjectTag));
+ __ push(scratch);
// Initialize the dispatch table register.
__ mov(kInterpreterDispatchTableRegister,
@@ -1185,7 +1237,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// Check function data field is actually a BytecodeArray object.
__ AssertNotSmi(kInterpreterBytecodeArrayRegister);
__ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
- ebx);
+ scratch);
__ Assert(
equal,
AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
@@ -1197,15 +1249,17 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Dispatch to the target bytecode.
- __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
- kInterpreterBytecodeOffsetRegister, times_1, 0));
- __ mov(
- kJavaScriptCallCodeStartRegister,
- Operand(kInterpreterDispatchTableRegister, ebx, times_pointer_size, 0));
+ __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
+ kInterpreterBytecodeOffsetRegister, times_1, 0));
+ __ mov(kJavaScriptCallCodeStartRegister,
+ Operand(kInterpreterDispatchTableRegister, scratch, times_pointer_size,
+ 0));
__ jmp(kJavaScriptCallCodeStartRegister);
}
void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// Get bytecode array and bytecode offset from the stack frame.
__ mov(kInterpreterBytecodeArrayRegister,
Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
@@ -1213,20 +1267,16 @@ void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
- // Load the current bytecode
- __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
- kInterpreterBytecodeOffsetRegister, times_1, 0));
-
// Advance to the next bytecode.
Label if_return;
AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
- kInterpreterBytecodeOffsetRegister, ebx, ecx,
+ kInterpreterBytecodeOffsetRegister, ecx, esi,
&if_return);
// Convert new bytecode offset to a Smi and save in the stackframe.
- __ mov(ebx, kInterpreterBytecodeOffsetRegister);
- __ SmiTag(ebx);
- __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ebx);
+ __ mov(ecx, kInterpreterBytecodeOffsetRegister);
+ __ SmiTag(ecx);
+ __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
Generate_InterpreterEnterBytecode(masm);
@@ -1236,10 +1286,13 @@ void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
Generate_InterpreterEnterBytecode(masm);
}
void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argument count (preserved for callee)
// -- edx : new target (preserved for callee)
@@ -1272,7 +1325,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
}
for (int i = 0; i < 3 - j; ++i) {
- __ PushRoot(Heap::kUndefinedValueRootIndex);
+ __ PushRoot(RootIndex::kUndefinedValue);
}
if (j < 3) {
__ jmp(&args_done, Label::kNear);
@@ -1291,10 +1344,10 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
__ SmiUntag(ecx);
scope.GenerateLeaveFrame();
- __ PopReturnAddressTo(ebx);
+ __ PopReturnAddressTo(edx);
__ inc(ecx);
__ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
- __ PushReturnAddressFrom(ebx);
+ __ PushReturnAddressFrom(edx);
__ ret(0);
__ bind(&failed);
@@ -1316,7 +1369,13 @@ namespace {
void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
bool java_script_builtin,
bool with_result) {
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Fold into Default config once root is fully supported.
+ const RegisterConfiguration* config(
+ RegisterConfiguration::PreserveRootIA32());
+#else
const RegisterConfiguration* config(RegisterConfiguration::Default());
+#endif
int allocatable_register_count = config->num_allocatable_general_registers();
if (with_result) {
// Overwrite the hole inserted by the deoptimizer with the return value from
@@ -1346,24 +1405,42 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
} // namespace
void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Remove the ifdef once root is preserved by default.
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
Generate_ContinueToBuiltinHelper(masm, false, false);
}
void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
MacroAssembler* masm) {
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Remove the ifdef once root is preserved by default.
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
Generate_ContinueToBuiltinHelper(masm, false, true);
}
void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Remove the ifdef once root is preserved by default.
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
Generate_ContinueToBuiltinHelper(masm, true, false);
}
void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
MacroAssembler* masm) {
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Remove the ifdef once root is preserved by default.
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
Generate_ContinueToBuiltinHelper(masm, true, true);
}
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kNotifyDeoptimized);
@@ -1377,6 +1454,8 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
// static
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argc
// -- esp[0] : return address
@@ -1385,32 +1464,37 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// -- esp[12] : receiver
// -----------------------------------
- // 1. Load receiver into edi, argArray into ebx (if present), remove all
+ // 1. Load receiver into xmm0, argArray into edx (if present), remove all
// arguments from the stack (including the receiver), and push thisArg (if
// present) instead.
{
Label no_arg_array, no_this_arg;
- __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
- __ mov(ebx, edx);
- __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
+ // Spill receiver to allow the usage of edi as a scratch register.
+ __ movd(xmm0, Operand(esp, eax, times_pointer_size, kPointerSize));
+
+ __ LoadRoot(edx, RootIndex::kUndefinedValue);
+ __ mov(edi, edx);
__ test(eax, eax);
__ j(zero, &no_this_arg, Label::kNear);
{
- __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
+ __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
__ cmp(eax, Immediate(1));
__ j(equal, &no_arg_array, Label::kNear);
- __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
+ __ mov(edx, Operand(esp, eax, times_pointer_size, -kPointerSize));
__ bind(&no_arg_array);
}
__ bind(&no_this_arg);
__ PopReturnAddressTo(ecx);
__ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
- __ Push(edx);
+ __ Push(edi);
__ PushReturnAddressFrom(ecx);
+
+ // Restore receiver to edi.
+ __ movd(edi, xmm0);
}
// ----------- S t a t e -------------
- // -- ebx : argArray
+ // -- edx : argArray
// -- edi : receiver
// -- esp[0] : return address
// -- esp[4] : thisArg
@@ -1422,9 +1506,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
- __ JumpIfRoot(ebx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
- __ JumpIfRoot(ebx, Heap::kUndefinedValueRootIndex, &no_arguments,
- Label::kNear);
+ __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
+ __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
// 4a. Apply the receiver to the given argArray.
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
@@ -1441,6 +1524,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// static
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// Stack Layout:
// esp[0] : Return address
// esp[8] : Argument n
@@ -1456,9 +1541,9 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Label done;
__ test(eax, eax);
__ j(not_zero, &done, Label::kNear);
- __ PopReturnAddressTo(ebx);
- __ PushRoot(Heap::kUndefinedValueRootIndex);
- __ PushReturnAddressFrom(ebx);
+ __ PopReturnAddressTo(edx);
+ __ PushRoot(RootIndex::kUndefinedValue);
+ __ PushReturnAddressFrom(edx);
__ inc(eax);
__ bind(&done);
}
@@ -1473,11 +1558,11 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Label loop;
__ mov(ecx, eax);
__ bind(&loop);
- __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
- __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
+ __ mov(edx, Operand(esp, ecx, times_pointer_size, 0));
+ __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), edx);
__ dec(ecx);
__ j(not_sign, &loop); // While non-negative (to copy return address).
- __ pop(ebx); // Discard copy of return address.
+ __ pop(edx); // Discard copy of return address.
__ dec(eax); // One fewer argument (first argument is new receiver).
}
@@ -1486,6 +1571,8 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
}
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argc
// -- esp[0] : return address
@@ -1495,31 +1582,38 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// -- esp[16] : receiver
// -----------------------------------
- // 1. Load target into edi (if present), argumentsList into ebx (if present),
+ // 1. Load target into edi (if present), argumentsList into edx (if present),
// remove all arguments from the stack (including the receiver), and push
// thisArgument (if present) instead.
{
Label done;
- __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(edi, RootIndex::kUndefinedValue);
__ mov(edx, edi);
- __ mov(ebx, edi);
+ __ mov(ecx, edi);
__ cmp(eax, Immediate(1));
__ j(below, &done, Label::kNear);
__ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
__ j(equal, &done, Label::kNear);
- __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
+ __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
__ cmp(eax, Immediate(3));
__ j(below, &done, Label::kNear);
- __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
+ __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
__ bind(&done);
- __ PopReturnAddressTo(ecx);
+
+ // Spill argumentsList to use edx as a scratch register.
+ __ movd(xmm0, edx);
+
+ __ PopReturnAddressTo(edx);
__ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
- __ Push(edx);
- __ PushReturnAddressFrom(ecx);
+ __ Push(ecx);
+ __ PushReturnAddressFrom(edx);
+
+ // Restore argumentsList.
+ __ movd(edx, xmm0);
}
// ----------- S t a t e -------------
- // -- ebx : argumentsList
+ // -- edx : argumentsList
// -- edi : target
// -- esp[0] : return address
// -- esp[4] : thisArgument
@@ -1535,6 +1629,8 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argc
// -- esp[0] : return address
@@ -1544,33 +1640,40 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// -- esp[16] : receiver
// -----------------------------------
- // 1. Load target into edi (if present), argumentsList into ebx (if present),
+ // 1. Load target into edi (if present), argumentsList into ecx (if present),
// new.target into edx (if present, otherwise use target), remove all
// arguments from the stack (including the receiver), and push thisArgument
// (if present) instead.
{
Label done;
- __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(edi, RootIndex::kUndefinedValue);
__ mov(edx, edi);
- __ mov(ebx, edi);
+ __ mov(ecx, edi);
__ cmp(eax, Immediate(1));
__ j(below, &done, Label::kNear);
__ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
__ mov(edx, edi);
__ j(equal, &done, Label::kNear);
- __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
+ __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
__ cmp(eax, Immediate(3));
__ j(below, &done, Label::kNear);
__ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
__ bind(&done);
+
+ // Spill argumentsList to use ecx as a scratch register.
+ __ movd(xmm0, ecx);
+
__ PopReturnAddressTo(ecx);
__ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
- __ PushRoot(Heap::kUndefinedValueRootIndex);
+ __ PushRoot(RootIndex::kUndefinedValue);
__ PushReturnAddressFrom(ecx);
+
+ // Restore argumentsList.
+ __ movd(ecx, xmm0);
}
// ----------- S t a t e -------------
- // -- ebx : argumentsList
+ // -- ecx : argumentsList
// -- edx : new.target
// -- edi : target
// -- esp[0] : return address
@@ -1591,6 +1694,8 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
}
void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argc
// -- esp[0] : return address
@@ -1600,19 +1705,18 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
if (FLAG_debug_code) {
// Initial map for the builtin InternalArray function should be a map.
- __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a nullptr and a Smi.
- __ test(ebx, Immediate(kSmiTagMask));
+ __ test(ecx, Immediate(kSmiTagMask));
__ Assert(not_zero,
AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
- __ CmpObjectType(ebx, MAP_TYPE, ecx);
+ __ CmpObjectType(ecx, MAP_TYPE, ecx);
__ Assert(equal,
AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
}
// Run the native code for the InternalArray function called as a normal
// function.
- __ mov(ebx, masm->isolate()->factory()->undefined_value());
__ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
RelocInfo::CODE_TARGET);
}
@@ -1639,45 +1743,57 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// Retrieve the number of arguments from the stack.
- __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
// Leave the frame.
__ leave();
// Remove caller arguments from the stack.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ pop(ecx);
- __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
- __ push(ecx);
+ __ PopReturnAddressTo(ecx);
+ __ lea(esp, Operand(esp, edi, times_2, 1 * kPointerSize)); // 1 ~ receiver
+ __ PushReturnAddressFrom(ecx);
}
// static
void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
Handle<Code> code) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- edi : target
+ // -- esi : context for the Call / Construct builtin
// -- eax : number of parameters on the stack (not including the receiver)
- // -- ebx : arguments list (a FixedArray)
// -- ecx : len (number of elements to from args)
- // -- edx : new.target (checked to be constructor or undefined)
+ // -- ecx : new.target (checked to be constructor or undefined)
+ // -- esp[4] : arguments list (a FixedArray)
// -- esp[0] : return address.
// -----------------------------------
- // We need to preserve eax, edi and ebx.
+ // We need to preserve eax, edi, esi and ebx.
__ movd(xmm0, edx);
__ movd(xmm1, edi);
__ movd(xmm2, eax);
+ __ movd(xmm3, esi); // Spill the context.
+
+ // TODO(v8:6666): Remove this usage of ebx to enable kRootRegister support.
+ const Register kArgumentsList = esi;
+ const Register kArgumentsLength = ecx;
+
+ __ PopReturnAddressTo(edx);
+ __ pop(kArgumentsList);
+ __ PushReturnAddressFrom(edx);
if (masm->emit_debug_code()) {
- // Allow ebx to be a FixedArray, or a FixedDoubleArray if ecx == 0.
+ // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
+ // kArgumentsLength == 0.
Label ok, fail;
- __ AssertNotSmi(ebx);
- __ mov(edx, FieldOperand(ebx, HeapObject::kMapOffset));
+ __ AssertNotSmi(kArgumentsList);
+ __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
__ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
__ j(equal, &ok);
__ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
__ j(not_equal, &fail);
- __ cmp(ecx, 0);
+ __ cmp(kArgumentsLength, 0);
__ j(equal, &ok);
// Fall through.
__ bind(&fail);
@@ -1686,25 +1802,10 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
__ bind(&ok);
}
- // Check for stack overflow.
- {
- // Check the stack for overflow. We are not trying to catch interruptions
- // (i.e. debug break and preemption) here, so check the "real stack limit".
- Label done;
- ExternalReference real_stack_limit =
- ExternalReference::address_of_real_stack_limit(masm->isolate());
- __ mov(edx, __ StaticVariable(real_stack_limit));
- // Make edx the space we have left. The stack might already be overflowed
- // here which will cause edx to become negative.
- __ neg(edx);
- __ add(edx, esp);
- __ sar(edx, kPointerSizeLog2);
- // Check if the arguments will overflow the stack.
- __ cmp(edx, ecx);
- __ j(greater, &done, Label::kNear); // Signed comparison.
- __ TailCallRuntime(Runtime::kThrowStackOverflow);
- __ bind(&done);
- }
+ // Check the stack for overflow. We are not trying to catch interruptions
+ // (i.e. debug break and preemption) here, so check the "real stack limit".
+ Label stack_overflow;
+ Generate_StackOverflowCheck(masm, kArgumentsLength, edx, &stack_overflow);
// Push additional arguments onto the stack.
{
@@ -1712,14 +1813,14 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
__ Move(eax, Immediate(0));
Label done, push, loop;
__ bind(&loop);
- __ cmp(eax, ecx);
+ __ cmp(eax, kArgumentsLength);
__ j(equal, &done, Label::kNear);
// Turn the hole into undefined as we go.
- __ mov(edi,
- FieldOperand(ebx, eax, times_pointer_size, FixedArray::kHeaderSize));
- __ CompareRoot(edi, Heap::kTheHoleValueRootIndex);
+ __ mov(edi, FieldOperand(kArgumentsList, eax, times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ CompareRoot(edi, RootIndex::kTheHoleValue);
__ j(not_equal, &push, Label::kNear);
- __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(edi, RootIndex::kUndefinedValue);
__ bind(&push);
__ Push(edi);
__ inc(eax);
@@ -1729,34 +1830,45 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
}
// Restore eax, edi and edx.
+ __ movd(esi, xmm3); // Restore the context.
__ movd(eax, xmm2);
__ movd(edi, xmm1);
__ movd(edx, xmm0);
// Compute the actual parameter count.
- __ add(eax, ecx);
+ __ add(eax, kArgumentsLength);
// Tail-call to the actual Call or Construct builtin.
__ Jump(code, RelocInfo::CODE_TARGET);
+
+ __ bind(&stack_overflow);
+ __ movd(esi, xmm3); // Restore the context.
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
}
// static
void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
CallOrConstructMode mode,
Handle<Code> code) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the target to call (can be any Object)
+ // -- esi : context for the Call / Construct builtin
// -- edx : the new target (for [[Construct]] calls)
// -- ecx : start index (to support rest parameters)
// -----------------------------------
+ __ movd(xmm0, esi); // Spill the context.
+
+ Register scratch = esi;
+
// Check if new.target has a [[Construct]] internal method.
if (mode == CallOrConstructMode::kConstruct) {
Label new_target_constructor, new_target_not_constructor;
__ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
- __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
- __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
+ __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
+ __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
__ j(not_zero, &new_target_constructor, Label::kNear);
__ bind(&new_target_not_constructor);
@@ -1764,18 +1876,18 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
__ Push(edx);
+ __ movd(esi, xmm0); // Restore the context.
__ CallRuntime(Runtime::kThrowNotConstructor);
}
__ bind(&new_target_constructor);
}
- // Preserve new.target (in case of [[Construct]]).
- __ movd(xmm0, edx);
+ __ movd(xmm1, edx); // Preserve new.target (in case of [[Construct]]).
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
- __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
+ __ mov(scratch, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ cmp(Operand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &arguments_adaptor, Label::kNear);
{
@@ -1783,39 +1895,23 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
__ movzx_w(edx, FieldOperand(
edx, SharedFunctionInfo::kFormalParameterCountOffset));
- __ mov(ebx, ebp);
+ __ mov(scratch, ebp);
}
__ jmp(&arguments_done, Label::kNear);
__ bind(&arguments_adaptor);
{
// Just load the length from the ArgumentsAdaptorFrame.
- __ mov(edx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ mov(edx,
+ Operand(scratch, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ SmiUntag(edx);
}
__ bind(&arguments_done);
- Label stack_done;
+ Label stack_done, stack_overflow;
__ sub(edx, ecx);
__ j(less_equal, &stack_done);
{
- // Check for stack overflow.
- {
- // Check the stack for overflow. We are not trying to catch interruptions
- // (i.e. debug break and preemption) here, so check the "real stack
- // limit".
- Label done;
- __ LoadRoot(ecx, Heap::kRealStackLimitRootIndex);
- // Make ecx the space we have left. The stack might already be
- // overflowed here which will cause ecx to become negative.
- __ neg(ecx);
- __ add(ecx, esp);
- __ sar(ecx, kPointerSizeLog2);
- // Check if the arguments will overflow the stack.
- __ cmp(ecx, edx);
- __ j(greater, &done, Label::kNear); // Signed comparison.
- __ TailCallRuntime(Runtime::kThrowStackOverflow);
- __ bind(&done);
- }
+ Generate_StackOverflowCheck(masm, edx, ecx, &stack_overflow);
// Forward the arguments from the caller frame.
{
@@ -1824,7 +1920,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ PopReturnAddressTo(ecx);
__ bind(&loop);
{
- __ Push(Operand(ebx, edx, times_pointer_size, 1 * kPointerSize));
+ __ Push(Operand(scratch, edx, times_pointer_size, 1 * kPointerSize));
__ dec(edx);
__ j(not_zero, &loop);
}
@@ -1833,16 +1929,22 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
}
__ bind(&stack_done);
- // Restore new.target (in case of [[Construct]]).
- __ movd(edx, xmm0);
+ __ movd(edx, xmm1); // Restore new.target (in case of [[Construct]]).
+ __ movd(esi, xmm0); // Restore the context.
// Tail-call to the {code} handler.
__ Jump(code, RelocInfo::CODE_TARGET);
+
+ __ bind(&stack_overflow);
+ __ movd(esi, xmm0); // Restore the context.
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
}
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the function to call (checked to be a JSFunction)
@@ -1883,13 +1985,15 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
__ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
__ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
- __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
+ __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx); // Clobbers ecx.
__ j(above_equal, &done_convert);
+ // Reload the receiver (it was clobbered by CmpObjectType).
+ __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
Label convert_global_proxy;
- __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
- &convert_global_proxy, Label::kNear);
- __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
+ __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
+ Label::kNear);
+ __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
Label::kNear);
__ bind(&convert_global_proxy);
{
@@ -1932,9 +2036,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// -----------------------------------
__ movzx_w(
- ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
+ ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
ParameterCount actual(eax);
- ParameterCount expected(ebx);
+ ParameterCount expected(ecx);
__ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION);
// The function is a "classConstructor", need to raise an exception.
__ bind(&class_constructor);
@@ -1948,40 +2052,43 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
namespace {
void Generate_PushBoundArguments(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edx : new.target (only in case of [[Construct]])
// -- edi : target (checked to be a JSBoundFunction)
// -----------------------------------
- // Load [[BoundArguments]] into ecx and length of that into ebx.
+ __ movd(xmm0, edx); // Spill edx.
+
+ // Load [[BoundArguments]] into ecx and length of that into edx.
Label no_bound_arguments;
__ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
- __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
- __ SmiUntag(ebx);
- __ test(ebx, ebx);
+ __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
+ __ SmiUntag(edx);
+ __ test(edx, edx);
__ j(zero, &no_bound_arguments);
{
// ----------- S t a t e -------------
- // -- eax : the number of arguments (not including the receiver)
- // -- edx : new.target (only in case of [[Construct]])
- // -- edi : target (checked to be a JSBoundFunction)
- // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
- // -- ebx : the number of [[BoundArguments]]
+ // -- eax : the number of arguments (not including the receiver)
+ // -- xmm0 : new.target (only in case of [[Construct]])
+ // -- edi : target (checked to be a JSBoundFunction)
+ // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
+ // -- edx : the number of [[BoundArguments]]
// -----------------------------------
// Reserve stack space for the [[BoundArguments]].
{
Label done;
- __ lea(ecx, Operand(ebx, times_pointer_size, 0));
+ __ lea(ecx, Operand(edx, times_pointer_size, 0));
__ sub(esp, ecx);
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack
// limit".
- __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
- __ j(greater, &done, Label::kNear); // Signed comparison.
+ __ CompareRoot(esp, ecx, RootIndex::kRealStackLimit);
+ __ j(above_equal, &done, Label::kNear);
// Restore the stack pointer.
- __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
+ __ lea(esp, Operand(esp, edx, times_pointer_size, 0));
{
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
@@ -1997,10 +2104,10 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
{
Label loop;
__ Set(ecx, 0);
- __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
+ __ lea(edx, Operand(esp, edx, times_pointer_size, 0));
__ bind(&loop);
- __ movd(xmm0, Operand(ebx, ecx, times_pointer_size, 0));
- __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm0);
+ __ movd(xmm1, Operand(edx, ecx, times_pointer_size, 0));
+ __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm1);
__ inc(ecx);
__ cmp(ecx, eax);
__ j(less, &loop);
@@ -2010,13 +2117,13 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
{
Label loop;
__ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
- __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
- __ SmiUntag(ebx);
+ __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
+ __ SmiUntag(edx);
__ bind(&loop);
- __ dec(ebx);
- __ movd(xmm0, FieldOperand(ecx, ebx, times_pointer_size,
+ __ dec(edx);
+ __ movd(xmm1, FieldOperand(ecx, edx, times_pointer_size,
FixedArray::kHeaderSize));
- __ movd(Operand(esp, eax, times_pointer_size, 0), xmm0);
+ __ movd(Operand(esp, eax, times_pointer_size, 0), xmm1);
__ lea(eax, Operand(eax, 1));
__ j(greater, &loop);
}
@@ -2026,13 +2133,16 @@ void Generate_PushBoundArguments(MacroAssembler* masm) {
// [[BoundArguments]]), so we need to subtract one for the return address.
__ dec(eax);
}
+
__ bind(&no_bound_arguments);
+ __ movd(edx, xmm0); // Reload edx.
}
} // namespace
// static
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the function to call (checked to be a JSBoundFunction)
@@ -2040,8 +2150,8 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
__ AssertBoundFunction(edi);
// Patch the receiver to [[BoundThis]].
- __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
- __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
+ __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
+ __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
// Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm);
@@ -2054,6 +2164,7 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
// static
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edi : the target to call (can be any Object).
@@ -2101,6 +2212,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// static
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edx : the new target (checked to be a constructor)
@@ -2109,10 +2221,6 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
__ AssertConstructor(edi);
__ AssertFunction(edi);
- // Calling convention for function specific ConstructStubs require
- // ebx to contain either an AllocationSite or undefined.
- __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
-
Label call_generic_stub;
// Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
@@ -2121,16 +2229,23 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
__ j(zero, &call_generic_stub, Label::kNear);
+ // Calling convention for function specific ConstructStubs require
+ // ecx to contain either an AllocationSite or undefined.
+ __ LoadRoot(ecx, RootIndex::kUndefinedValue);
__ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
RelocInfo::CODE_TARGET);
__ bind(&call_generic_stub);
+ // Calling convention for function specific ConstructStubs require
+ // ecx to contain either an AllocationSite or undefined.
+ __ LoadRoot(ecx, RootIndex::kUndefinedValue);
__ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// -- edx : the new target (checked to be a constructor)
@@ -2213,18 +2328,23 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : actual number of arguments
- // -- ebx : expected number of arguments
+ // -- ecx : expected number of arguments
// -- edx : new target (passed through to callee)
// -- edi : function (passed through to callee)
// -----------------------------------
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
+ const Register kExpectedNumberOfArgumentsRegister = ecx;
+
Label invoke, dont_adapt_arguments, stack_overflow;
__ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
Label enough, too_few;
- __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
+ __ cmp(kExpectedNumberOfArgumentsRegister,
+ SharedFunctionInfo::kDontAdaptArgumentsSentinel);
__ j(equal, &dont_adapt_arguments);
- __ cmp(eax, ebx);
+ __ cmp(eax, kExpectedNumberOfArgumentsRegister);
__ j(less, &too_few);
{ // Enough parameters: Actual >= expected.
@@ -2232,7 +2352,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
EnterArgumentsAdaptorFrame(masm);
// edi is used as a scratch register. It should be restored from the frame
// when needed.
- Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
+ Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
+ &stack_overflow);
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
@@ -2244,7 +2365,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ inc(eax);
__ push(Operand(edi, 0));
__ sub(edi, Immediate(kPointerSize));
- __ cmp(eax, ebx);
+ __ cmp(eax, kExpectedNumberOfArgumentsRegister);
__ j(less, &copy);
// eax now contains the expected number of arguments.
__ jmp(&invoke);
@@ -2255,16 +2376,17 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
EnterArgumentsAdaptorFrame(masm);
// edi is used as a scratch register. It should be restored from the frame
// when needed.
- Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
+ Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
+ &stack_overflow);
- // Remember expected arguments in ecx.
- __ mov(ecx, ebx);
+ // Remember expected arguments in xmm0.
+ __ movd(xmm0, kExpectedNumberOfArgumentsRegister);
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(edi, Operand(ebp, eax, times_4, offset));
- // ebx = expected - actual.
- __ sub(ebx, eax);
+ // ecx = expected - actual.
+ __ sub(kExpectedNumberOfArgumentsRegister, eax);
// eax = -actual - 1
__ neg(eax);
__ sub(eax, Immediate(1));
@@ -2282,11 +2404,11 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&fill);
__ inc(eax);
__ push(Immediate(masm->isolate()->factory()->undefined_value()));
- __ cmp(eax, ebx);
+ __ cmp(eax, kExpectedNumberOfArgumentsRegister);
__ j(less, &fill);
// Restore expected arguments.
- __ mov(eax, ecx);
+ __ movd(eax, xmm0);
}
// Call the entry point.
@@ -2325,15 +2447,12 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
}
}
-static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
- bool has_handler_frame) {
+void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// Lookup the function in the JavaScript frame.
- if (has_handler_frame) {
- __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
- } else {
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- }
+ __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
{
FrameScope scope(masm, StackFrame::INTERNAL);
@@ -2350,23 +2469,21 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
__ bind(&skip);
- // Drop any potential handler frame that is be sitting on top of the actual
+ // Drop the handler frame that is be sitting on top of the actual
// JavaScript frame. This is the case then OSR is triggered from bytecode.
- if (has_handler_frame) {
- __ leave();
- }
+ __ leave();
// Load deoptimization data from the code object.
- __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
+ __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
// Load the OSR entrypoint offset from the deoptimization data.
- __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
+ __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
DeoptimizationData::kOsrPcOffsetIndex) -
kHeapObjectTag));
- __ SmiUntag(ebx);
+ __ SmiUntag(ecx);
// Compute the target address = code_obj + header_size + osr_offset
- __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
+ __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
// Overwrite the return address on the stack.
__ mov(Operand(esp, 0), eax);
@@ -2375,15 +2492,9 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
__ ret(0);
}
-void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
- Generate_OnStackReplacementHelper(masm, false);
-}
-
-void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
- Generate_OnStackReplacementHelper(masm, true);
-}
-
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// The function index was put in edi by the jump table trampoline.
// Convert to Smi for the runtime call.
__ SmiTag(edi);
@@ -2394,6 +2505,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// Save all parameter registers (see wasm-linkage.cc). They might be
// overwritten in the runtime call below. We don't have any callee-saved
// registers in wasm, so no need to store anything else.
+ Assembler::AllowExplicitEbxAccessScope root_is_spilled(masm);
static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
arraysize(wasm::kGpParamRegisters),
"frame size mismatch");
@@ -2420,7 +2532,12 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate.
__ Move(kContextRegister, Smi::kZero);
- __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, ecx);
+ {
+ // At this point, ebx has been spilled to the stack but is not yet
+ // overwritten with another value. We can still use it as kRootRegister.
+ Assembler::SupportsRootRegisterScope root_is_unclobbered(masm);
+ __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, ecx);
+ }
// The entrypoint address is the return value.
__ mov(edi, kReturnRegister0);
@@ -2452,6 +2569,11 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
// If argv_mode == kArgvInRegister:
// ecx: pointer to the first argument
+#ifdef V8_EMBEDDED_BUILTINS
+ // TODO(v8:6666): Remove the ifdef once branch load poisoning is removed.
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
+
STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
@@ -2571,11 +2693,16 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
__ bind(&skip);
+#ifdef V8_EMBEDDED_BUILTINS
+ STATIC_ASSERT(kRootRegister == kSpeculationPoisonRegister);
+ CHECK(!FLAG_untrusted_code_mitigations);
+#else
// Reset the masking register. This is done independent of the underlying
- // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with
- // both configurations. It is safe to always do this, because the underlying
- // register is caller-saved and can be arbitrarily clobbered.
+ // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
+ // with both configurations. It is safe to always do this, because the
+ // underlying register is caller-saved and can be arbitrarily clobbered.
__ ResetSpeculationPoisonRegister();
+#endif
// Compute the handler entry address and jump to it.
__ mov(edi, __ StaticVariable(pending_handler_entrypoint_address));
@@ -2583,6 +2710,8 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
}
void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
Label check_negative, process_64_bits, done;
// Account for return address and saved regs.
@@ -2596,6 +2725,7 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
MemOperand return_operand = mantissa_operand;
Register scratch1 = ebx;
+ Assembler::AllowExplicitEbxAccessScope root_is_spilled(masm);
// Since we must use ecx for shifts below, use some other register (eax)
// to calculate the result.
@@ -2676,6 +2806,8 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
}
void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
const Register exponent = eax;
const Register scratch = ecx;
const XMMRegister double_result = xmm3;
@@ -2843,9 +2975,10 @@ void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
RelocInfo::CODE_TARGET);
__ bind(&not_one_case);
- // TODO(v8:6666): When rewriting ia32 ASM builtins to not clobber the
- // kRootRegister ebx, this useless move can be removed.
- __ Move(kJavaScriptCallExtraArg1Register, ebx);
+ // Load undefined into the allocation site parameter as required by
+ // ArrayNArgumentsConstructor.
+ __ mov(kJavaScriptCallExtraArg1Register,
+ masm->isolate()->factory()->undefined_value());
Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
__ Jump(code, RelocInfo::CODE_TARGET);
}
@@ -2853,6 +2986,8 @@ void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
} // namespace
void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+
// ----------- S t a t e -------------
// -- eax : argc
// -- edi : constructor