aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/x64/builtins-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/x64/builtins-x64.cc')
-rw-r--r--deps/v8/src/builtins/x64/builtins-x64.cc74
1 files changed, 42 insertions, 32 deletions
diff --git a/deps/v8/src/builtins/x64/builtins-x64.cc b/deps/v8/src/builtins/x64/builtins-x64.cc
index 11bb9ca44a..bcdf5928e1 100644
--- a/deps/v8/src/builtins/x64/builtins-x64.cc
+++ b/deps/v8/src/builtins/x64/builtins-x64.cc
@@ -47,15 +47,11 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
- // -- rax : argument count (preserved for callee)
// -- rdx : new target (preserved for callee)
// -- rdi : target function (preserved for callee)
// -----------------------------------
{
FrameScope scope(masm, StackFrame::INTERNAL);
- // Push the number of arguments to the callee.
- __ SmiTag(rax, rax);
- __ Push(rax);
// Push a copy of the target function and the new target.
__ Push(rdi);
__ Push(rdx);
@@ -68,8 +64,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
// Restore target function and new target.
__ Pop(rdx);
__ Pop(rdi);
- __ Pop(rax);
- __ SmiUntag(rax, rax);
}
static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
__ JumpCodeObject(rcx);
@@ -77,6 +71,25 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
namespace {
+void Generate_StackOverflowCheck(
+ MacroAssembler* masm, Register num_args, Register scratch,
+ Label* stack_overflow,
+ Label::Distance stack_overflow_distance = Label::kFar) {
+ // Check the stack for overflow. We are not trying to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
+ __ movq(scratch, rsp);
+ // Make scratch the space we have left. The stack might already be overflowed
+ // here which will cause scratch to become negative.
+ __ subq(scratch, kScratchRegister);
+ __ sarq(scratch, Immediate(kSystemPointerSizeLog2));
+ // Check if the arguments will overflow the stack.
+ __ cmpq(scratch, num_args);
+ // Signed comparison.
+ __ j(less_equal, stack_overflow, stack_overflow_distance);
+}
+
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax: number of arguments
@@ -85,6 +98,9 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
// -- rsi: context
// -----------------------------------
+ Label stack_overflow;
+ Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kFar);
+
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
@@ -142,25 +158,13 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
__ PushReturnAddressFrom(rcx);
__ ret(0);
-}
-void Generate_StackOverflowCheck(
- MacroAssembler* masm, Register num_args, Register scratch,
- Label* stack_overflow,
- Label::Distance stack_overflow_distance = Label::kFar) {
- // Check the stack for overflow. We are not trying to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
- __ movq(scratch, rsp);
- // Make scratch the space we have left. The stack might already be overflowed
- // here which will cause scratch to become negative.
- __ subq(scratch, kScratchRegister);
- __ sarq(scratch, Immediate(kSystemPointerSizeLog2));
- // Check if the arguments will overflow the stack.
- __ cmpq(scratch, num_args);
- // Signed comparison.
- __ j(less_equal, stack_overflow, stack_overflow_distance);
+ __ bind(&stack_overflow);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ CallRuntime(Runtime::kThrowStackOverflow);
+ __ int3(); // This should be unreachable.
+ }
}
} // namespace
@@ -891,13 +895,11 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
Register scratch1, Register scratch2,
Register scratch3) {
// ----------- S t a t e -------------
- // -- rax : argument count (preserved for callee if needed, and caller)
// -- rdx : new target (preserved for callee if needed, and caller)
// -- rdi : target function (preserved for callee if needed, and caller)
// -- feedback vector (preserved for caller if needed)
// -----------------------------------
- DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
- scratch3));
+ DCHECK(!AreAliased(feedback_vector, rdx, rdi, scratch1, scratch2, scratch3));
Label optimized_code_slot_is_weak_ref, fallthrough;
@@ -1086,7 +1088,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Label push_stack_frame;
// Check if feedback vector is valid. If valid, check for optimized code
// and update invocation count. Otherwise, setup the stack frame.
- __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
+ __ LoadTaggedPointerField(
+ rcx, FieldOperand(feedback_vector, HeapObject::kMapOffset));
+ __ CmpInstanceType(rcx, FEEDBACK_VECTOR_TYPE);
+ __ j(not_equal, &push_stack_frame);
// Read off the optimized code slot in the feedback vector, and if there
// is optimized code or an optimization marker, call that instead.
@@ -1106,10 +1111,15 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ Push(rsi); // Callee's context.
__ Push(rdi); // Callee's JS function.
- // Reset code age.
- __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
- BytecodeArray::kBytecodeAgeOffset),
- Immediate(BytecodeArray::kNoAgeBytecodeAge));
+ // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
+ // 8-bit fields next to each other, so we could just optimize by writing a
+ // 16-bit. These static asserts guard our assumption is valid.
+ STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
+ BytecodeArray::kOSRNestingLevelOffset + kCharSize);
+ STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
+ __ movw(FieldOperand(kInterpreterBytecodeArrayRegister,
+ BytecodeArray::kOSRNestingLevelOffset),
+ Immediate(0));
// Load initial bytecode offset.
__ movq(kInterpreterBytecodeOffsetRegister,