diff options
Diffstat (limited to 'deps/v8/src/builtins/arm64/builtins-arm64.cc')
-rw-r--r-- | deps/v8/src/builtins/arm64/builtins-arm64.cc | 157 |
1 files changed, 73 insertions, 84 deletions
diff --git a/deps/v8/src/builtins/arm64/builtins-arm64.cc b/deps/v8/src/builtins/arm64/builtins-arm64.cc index 61fee9013b..0d51d9decf 100644 --- a/deps/v8/src/builtins/arm64/builtins-arm64.cc +++ b/deps/v8/src/builtins/arm64/builtins-arm64.cc @@ -55,7 +55,6 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) { // Run the native code for the InternalArray function called as a normal // function. - __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl), RelocInfo::CODE_TARGET); } @@ -129,7 +128,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ Claim(slot_count); // Preserve the incoming parameters on the stack. - __ LoadRoot(x10, Heap::kTheHoleValueRootIndex); + __ LoadRoot(x10, RootIndex::kTheHoleValue); // Compute a pointer to the slot immediately above the location on the // stack to which arguments will be later copied. @@ -195,6 +194,24 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ Ret(); } +static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, + Label* stack_overflow) { + UseScratchRegisterScope temps(masm); + Register scratch = temps.AcquireX(); + + // Check the stack for overflow. + // We are not trying to catch interruptions (e.g. debug break and + // preemption) here, so the "real stack limit" is checked. + Label enough_stack_space; + __ LoadRoot(scratch, RootIndex::kRealStackLimit); + // Make scratch the space we have left. The stack might already be overflowed + // here which will cause scratch to become negative. + __ Sub(scratch, sp, scratch); + // Check if the arguments will overflow the stack. + __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); + __ B(le, stack_overflow); +} + } // namespace // The construct stub for ES5 constructor functions and ES6 class constructors. @@ -249,7 +266,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // Else: use TheHoleValue as receiver for constructor call __ Bind(¬_create_implicit_receiver); - __ LoadRoot(x0, Heap::kTheHoleValueRootIndex); + __ LoadRoot(x0, RootIndex::kTheHoleValue); // ----------- S t a t e ------------- // -- x0: receiver @@ -303,6 +320,19 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // slots for the arguments. If the number of arguments was odd, the last // argument will overwrite one of the receivers pushed above. __ Bic(x10, x12, 1); + + // Check if we have enough stack space to push all arguments. + Label enough_stack_space, stack_overflow; + Generate_StackOverflowCheck(masm, x10, &stack_overflow); + __ B(&enough_stack_space); + + __ Bind(&stack_overflow); + // Restore the context from the frame. + __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); + __ CallRuntime(Runtime::kThrowStackOverflow); + __ Unreachable(); + + __ Bind(&enough_stack_space); __ Claim(x10); // Copy the arguments. @@ -342,7 +372,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { Label use_receiver, do_throw, leave_frame; // If the result is undefined, we jump out to using the implicit receiver. - __ CompareRoot(x0, Heap::kUndefinedValueRootIndex); + __ CompareRoot(x0, RootIndex::kUndefinedValue); __ B(eq, &use_receiver); // Otherwise we do a smi check and fall through to check if the return value @@ -364,7 +394,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // on-stack receiver as the result. __ Bind(&use_receiver); __ Peek(x0, 0 * kPointerSize); - __ CompareRoot(x0, Heap::kTheHoleValueRootIndex); + __ CompareRoot(x0, RootIndex::kTheHoleValue); __ B(eq, &do_throw); __ Bind(&leave_frame); @@ -425,7 +455,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Check the stack for overflow. We are not trying to catch interruptions // (i.e. debug break and preemption) here, so check the "real stack limit". Label stack_overflow; - __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); + __ CompareRoot(sp, RootIndex::kRealStackLimit); __ B(lo, &stack_overflow); // Get number of arguments for generator function. @@ -508,7 +538,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { { FrameScope scope(masm, StackFrame::INTERNAL); // Push hole as receiver since we do not use it for stepping. - __ LoadRoot(x5, Heap::kTheHoleValueRootIndex); + __ LoadRoot(x5, RootIndex::kTheHoleValue); __ Push(x1, padreg, x4, x5); __ CallRuntime(Runtime::kDebugOnFunctionCall); __ Pop(padreg, x1); @@ -534,24 +564,6 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { } } -static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args, - Label* stack_overflow) { - UseScratchRegisterScope temps(masm); - Register scratch = temps.AcquireX(); - - // Check the stack for overflow. - // We are not trying to catch interruptions (e.g. debug break and - // preemption) here, so the "real stack limit" is checked. - Label enough_stack_space; - __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); - // Make scratch the space we have left. The stack might already be overflowed - // here which will cause scratch to become negative. - __ Sub(scratch, sp, scratch); - // Check if the arguments will overflow the stack. - __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); - __ B(le, stack_overflow); -} - // Input: // x0: new.target. // x1: function. @@ -639,7 +651,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Initialize all JavaScript callee-saved registers, since they will be seen // by the garbage collector as part of handlers. // The original values have been saved in JSEntryStub::GenerateBody(). - __ LoadRoot(x19, Heap::kUndefinedValueRootIndex); + __ LoadRoot(x19, RootIndex::kUndefinedValue); __ Mov(x20, x19); __ Mov(x21, x19); __ Mov(x22, x19); @@ -957,7 +969,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { // Do a stack check to ensure we don't go over the limit. Label ok; __ Sub(x10, sp, Operand(x11)); - __ CompareRoot(x10, Heap::kRealStackLimitRootIndex); + __ CompareRoot(x10, RootIndex::kRealStackLimit); __ B(hs, &ok); __ CallRuntime(Runtime::kThrowStackOverflow); __ Bind(&ok); @@ -966,7 +978,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { // Note: there should always be at least one stack slot for the return // register in the register file. Label loop_header; - __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); + __ LoadRoot(x10, RootIndex::kUndefinedValue); __ Lsr(x11, x11, kPointerSizeLog2); // Round up the number of registers to a multiple of 2, to align the stack // to 16 bytes. @@ -988,7 +1000,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { __ Bind(&no_incoming_new_target_or_generator_register); // Load accumulator with undefined. - __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); + __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue); // Load the dispatch table into a register and dispatch to the bytecode // handler at the current bytecode offset. @@ -1081,7 +1093,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { // Store "undefined" as the receiver arg if we need to. Register receiver = x14; - __ LoadRoot(receiver, Heap::kUndefinedValueRootIndex); + __ LoadRoot(receiver, RootIndex::kUndefinedValue); __ SlotAddress(stack_addr, num_args); __ Str(receiver, MemOperand(stack_addr)); __ Mov(slots_to_copy, num_args); @@ -1300,7 +1312,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { Register scratch1 = x12; Register scratch2 = x13; Register scratch3 = x14; - __ LoadRoot(undef, Heap::kUndefinedValueRootIndex); + __ LoadRoot(undef, RootIndex::kUndefinedValue); Label at_least_one_arg; Label three_args; @@ -1452,15 +1464,10 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { __ Ret(); } -static void Generate_OnStackReplacementHelper(MacroAssembler* masm, - bool has_handler_frame) { +void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { // Lookup the function in the JavaScript frame. - if (has_handler_frame) { - __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset)); - } else { - __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); - } + __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); + __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset)); { FrameScope scope(masm, StackFrame::INTERNAL); @@ -1476,11 +1483,9 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm, __ Bind(&skip); - // Drop any potential handler frame that is be sitting on top of the actual + // Drop the handler frame that is be sitting on top of the actual // JavaScript frame. This is the case then OSR is triggered from bytecode. - if (has_handler_frame) { - __ LeaveFrame(StackFrame::STUB); - } + __ LeaveFrame(StackFrame::STUB); // Load deoptimization data from the code object. // <deopt_data> = <code>[#deoptimization_data_offset] @@ -1501,14 +1506,6 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm, __ Ret(); } -void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { - Generate_OnStackReplacementHelper(masm, false); -} - -void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { - Generate_OnStackReplacementHelper(masm, true); -} - // static void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { // ----------- S t a t e ------------- @@ -1526,8 +1523,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { Register undefined_value = x3; Register null_value = x4; - __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); - __ LoadRoot(null_value, Heap::kNullValueRootIndex); + __ LoadRoot(undefined_value, RootIndex::kUndefinedValue); + __ LoadRoot(null_value, RootIndex::kNullValue); // 1. Load receiver into x1, argArray into x2 (if present), remove all // arguments from the stack (including the receiver), and push thisArg (if @@ -1609,7 +1606,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { Label non_zero; Register scratch = x10; __ Cbnz(argc, &non_zero); - __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); + __ LoadRoot(scratch, RootIndex::kUndefinedValue); // Overwrite receiver with undefined, which will be the new receiver. // We do not need to overwrite the padding slot above it with anything. __ Poke(scratch, 0); @@ -1666,7 +1663,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { Register this_argument = x4; Register undefined_value = x3; - __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); + __ LoadRoot(undefined_value, RootIndex::kUndefinedValue); // 1. Load target into x1 (if present), argumentsList into x2 (if present), // remove all arguments from the stack (including the receiver), and push @@ -1743,7 +1740,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { Register new_target = x3; Register undefined_value = x4; - __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); + __ LoadRoot(undefined_value, RootIndex::kUndefinedValue); // 1. Load target into x1 (if present), argumentsList into x2 (if present), // new.target into x3 (if present, otherwise use target), remove all @@ -1933,21 +1930,8 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, Register argc = x0; Register len = x4; - // Check for stack overflow. - { - // Check the stack for overflow. We are not trying to catch interruptions - // (i.e. debug break and preemption) here, so check the "real stack limit". - Label done; - __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); - // Make x10 the space we have left. The stack might already be overflowed - // here which will cause x10 to become negative. - __ Sub(x10, sp, x10); - // Check if the arguments will overflow the stack. - __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2)); - __ B(gt, &done); // Signed comparison. - __ TailCallRuntime(Runtime::kThrowStackOverflow); - __ Bind(&done); - } + Label stack_overflow; + Generate_StackOverflowCheck(masm, len, &stack_overflow); // Skip argument setup if we don't need to push any varargs. Label done; @@ -1963,8 +1947,8 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, Register undefined_value = x12; Register scratch = x13; __ Add(src, arguments_list, FixedArray::kHeaderSize - kHeapObjectTag); - __ LoadRoot(the_hole_value, Heap::kTheHoleValueRootIndex); - __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); + __ LoadRoot(the_hole_value, RootIndex::kTheHoleValue); + __ LoadRoot(undefined_value, RootIndex::kUndefinedValue); // We do not use the CompareRoot macro as it would do a LoadRoot behind the // scenes and we want to avoid that in a loop. // TODO(all): Consider using Ldp and Stp. @@ -1980,6 +1964,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, // Tail-call to the actual Call or Construct builtin. __ Jump(code, RelocInfo::CODE_TARGET); + + __ bind(&stack_overflow); + __ TailCallRuntime(Runtime::kThrowStackOverflow); } // static @@ -2121,9 +2108,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, __ B(hs, &done_convert); if (mode != ConvertReceiverMode::kNotNullOrUndefined) { Label convert_global_proxy; - __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex, - &convert_global_proxy); - __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object); + __ JumpIfRoot(x3, RootIndex::kUndefinedValue, &convert_global_proxy); + __ JumpIfNotRoot(x3, RootIndex::kNullValue, &convert_to_object); __ Bind(&convert_global_proxy); { // Patch receiver to global proxy. @@ -2211,13 +2197,13 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { // (i.e. debug break and preemption) here, so check the "real stack // limit". Label done; - __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); + __ LoadRoot(x10, RootIndex::kRealStackLimit); // Make x10 the space we have left. The stack might already be overflowed // here which will cause x10 to become negative. __ Sub(x10, sp, x10); // Check if the arguments will overflow the stack. __ Cmp(x10, Operand(bound_argc, LSL, kPointerSizeLog2)); - __ B(gt, &done); // Signed comparison. + __ B(hs, &done); __ TailCallRuntime(Runtime::kThrowStackOverflow); __ Bind(&done); } @@ -2379,7 +2365,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { // Calling convention for function specific ConstructStubs require // x2 to contain either an AllocationSite or undefined. - __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); + __ LoadRoot(x2, RootIndex::kUndefinedValue); Label call_generic_stub; @@ -2586,7 +2572,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // Fill the remaining expected arguments with undefined. __ RecordComment("-- Fill slots with undefined --"); __ Sub(copy_end, copy_to, Operand(scratch1, LSL, kPointerSizeLog2)); - __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); + __ LoadRoot(scratch1, RootIndex::kUndefinedValue); Label fill; __ Bind(&fill); @@ -2856,7 +2842,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, // Check result for exception sentinel. Label exception_returned; - __ CompareRoot(result, Heap::kExceptionRootIndex); + __ CompareRoot(result, RootIndex::kException); __ B(eq, &exception_returned); // The call succeeded, so unwind the stack and return. @@ -2922,9 +2908,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ Bind(¬_js_frame); // Reset the masking register. This is done independent of the underlying - // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with - // both configurations. It is safe to always do this, because the underlying - // register is caller-saved and can be arbitrarily clobbered. + // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work + // with both configurations. It is safe to always do this, because the + // underlying register is caller-saved and can be arbitrarily clobbered. __ ResetSpeculationPoisonRegister(); // Compute the handler entry address and jump to it. @@ -3131,6 +3117,9 @@ void GenerateInternalArrayConstructorCase(MacroAssembler* masm, __ Bind(&n_case); // N arguments. + // Load undefined into the allocation site parameter as required by + // ArrayNArgumentsConstructor. + __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue); Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor); __ Jump(code, RelocInfo::CODE_TARGET); } |