summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/mips/builtins-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/mips/builtins-mips.cc')
-rw-r--r--deps/v8/src/builtins/mips/builtins-mips.cc260
1 files changed, 126 insertions, 134 deletions
diff --git a/deps/v8/src/builtins/mips/builtins-mips.cc b/deps/v8/src/builtins/mips/builtins-mips.cc
index 003eeb22e0..a2b6bea626 100644
--- a/deps/v8/src/builtins/mips/builtins-mips.cc
+++ b/deps/v8/src/builtins/mips/builtins-mips.cc
@@ -395,10 +395,9 @@ void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
__ bind(&to_string);
{
FrameScope scope(masm, StackFrame::MANUAL);
- ToStringStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
- __ CallStub(&stub);
+ __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
__ LeaveBuiltinFrame(cp, a1, t0);
__ SmiUntag(t0);
}
@@ -459,11 +458,10 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
__ bind(&convert);
{
FrameScope scope(masm, StackFrame::MANUAL);
- ToStringStub stub(masm->isolate());
__ SmiTag(t0);
__ EnterBuiltinFrame(cp, a1, t0);
__ Push(a3);
- __ CallStub(&stub);
+ __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
__ Move(a0, v0);
__ Pop(a3);
__ LeaveBuiltinFrame(cp, a1, t0);
@@ -1051,6 +1049,17 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ Branch(&switch_to_different_code_kind, ne, a0,
Operand(masm->CodeObject())); // Self-reference to this code.
+ // Increment invocation count for the function.
+ __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
+ __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
+ __ lw(t0, FieldMemOperand(
+ a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+ TypeFeedbackVector::kHeaderSize));
+ __ Addu(t0, t0, Operand(Smi::FromInt(1)));
+ __ sw(t0, FieldMemOperand(
+ a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
+ TypeFeedbackVector::kHeaderSize));
+
// Check function data field is actually a BytecodeArray object.
if (FLAG_debug_code) {
__ SmiTst(kInterpreterBytecodeArrayRegister, t0);
@@ -1160,6 +1169,45 @@ void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) {
__ Jump(ra);
}
+static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
+ Register scratch1, Register scratch2,
+ Label* stack_overflow) {
+ // Check the stack for overflow. We are not trying to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
+ // Make scratch1 the space we have left. The stack might already be overflowed
+ // here which will cause scratch1 to become negative.
+ __ subu(scratch1, sp, scratch1);
+ // Check if the arguments will overflow the stack.
+ __ sll(scratch2, num_args, kPointerSizeLog2);
+ // Signed comparison.
+ __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
+}
+
+static void Generate_InterpreterPushArgs(MacroAssembler* masm,
+ Register num_args, Register index,
+ Register scratch, Register scratch2,
+ Label* stack_overflow) {
+ Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
+ stack_overflow);
+
+ // Find the address of the last argument.
+ __ mov(scratch2, num_args);
+ __ sll(scratch2, scratch2, kPointerSizeLog2);
+ __ Subu(scratch2, index, Operand(scratch2));
+
+ // Push the arguments.
+ Label loop_header, loop_check;
+ __ Branch(&loop_check);
+ __ bind(&loop_header);
+ __ lw(scratch, MemOperand(index));
+ __ Addu(index, index, Operand(-kPointerSize));
+ __ push(scratch);
+ __ bind(&loop_check);
+ __ Branch(&loop_header, gt, index, Operand(scratch2));
+}
+
// static
void Builtins::Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode,
@@ -1171,21 +1219,12 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl(
// they are to be pushed onto the stack.
// -- a1 : the target to call (can be any Object).
// -----------------------------------
+ Label stack_overflow;
- // Find the address of the last argument.
- __ Addu(a3, a0, Operand(1)); // Add one for receiver.
- __ sll(a3, a3, kPointerSizeLog2);
- __ Subu(a3, a2, Operand(a3));
+ __ Addu(t0, a0, Operand(1)); // Add one for receiver.
- // Push the arguments.
- Label loop_header, loop_check;
- __ Branch(&loop_check);
- __ bind(&loop_header);
- __ lw(t0, MemOperand(a2));
- __ Addu(a2, a2, Operand(-kPointerSize));
- __ push(t0);
- __ bind(&loop_check);
- __ Branch(&loop_header, gt, a2, Operand(a3));
+ // This function modifies a2, t4 and t1.
+ Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow);
// Call the target.
if (function_type == CallableType::kJSFunction) {
@@ -1198,36 +1237,87 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl(
tail_call_mode),
RelocInfo::CODE_TARGET);
}
+
+ __ bind(&stack_overflow);
+ {
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
+ // Unreachable code.
+ __ break_(0xCC);
+ }
}
// static
-void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
+void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
+ MacroAssembler* masm, CallableType construct_type) {
// ----------- S t a t e -------------
// -- a0 : argument count (not including receiver)
// -- a3 : new target
// -- a1 : constructor to call
- // -- a2 : address of the first argument
+ // -- a2 : allocation site feedback if available, undefined otherwise.
+ // -- t4 : address of the first argument
// -----------------------------------
-
- // Find the address of the last argument.
- __ sll(t0, a0, kPointerSizeLog2);
- __ Subu(t0, a2, Operand(t0));
+ Label stack_overflow;
// Push a slot for the receiver.
__ push(zero_reg);
- // Push the arguments.
- Label loop_header, loop_check;
- __ Branch(&loop_check);
- __ bind(&loop_header);
- __ lw(t1, MemOperand(a2));
- __ Addu(a2, a2, Operand(-kPointerSize));
- __ push(t1);
- __ bind(&loop_check);
- __ Branch(&loop_header, gt, a2, Operand(t0));
+ // This function modified t4, t1 and t0.
+ Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow);
+
+ __ AssertUndefinedOrAllocationSite(a2, t0);
+ if (construct_type == CallableType::kJSFunction) {
+ __ AssertFunction(a1);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
+ __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
+ } else {
+ DCHECK_EQ(construct_type, CallableType::kAny);
+ // Call the constructor with a0, a1, and a3 unmodified.
+ __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+ }
+
+ __ bind(&stack_overflow);
+ {
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
+ // Unreachable code.
+ __ break_(0xCC);
+ }
+}
+
+// static
+void Builtins::Generate_InterpreterPushArgsAndConstructArray(
+ MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : the number of arguments (not including the receiver)
+ // -- a1 : the target to call checked to be Array function.
+ // -- a2 : allocation site feedback.
+ // -- a3 : the address of the first argument to be pushed. Subsequent
+ // arguments should be consecutive above this, in the same order as
+ // they are to be pushed onto the stack.
+ // -----------------------------------
+ Label stack_overflow;
+
+ __ Addu(t0, a0, Operand(1)); // Add one for receiver.
+
+ // This function modifies a3, t4, and t1.
+ Generate_InterpreterPushArgs(masm, t0, a3, t1, t4, &stack_overflow);
- // Call the constructor with a0, a1, and a3 unmodified.
- __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
+ // ArrayConstructor stub expects constructor in a3. Set it here.
+ __ mov(a3, a1);
+
+ ArrayConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
+
+ __ bind(&stack_overflow);
+ {
+ __ TailCallRuntime(Runtime::kThrowStackOverflow);
+ // Unreachable code.
+ __ break_(0xCC);
+ }
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
@@ -1805,61 +1895,6 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
}
// static
-void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
- int field_index) {
- // ----------- S t a t e -------------
- // -- a0 : number of arguments
- // -- a1 : function
- // -- cp : context
- // -- sp[0] : receiver
- // -----------------------------------
-
- // 1. Pop receiver into a0 and check that it's actually a JSDate object.
- Label receiver_not_date;
- {
- __ Pop(a0);
- __ JumpIfSmi(a0, &receiver_not_date);
- __ GetObjectType(a0, t0, t0);
- __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
- }
-
- // 2. Load the specified date field, falling back to the runtime as necessary.
- if (field_index == JSDate::kDateValue) {
- __ Ret(USE_DELAY_SLOT);
- __ lw(v0, FieldMemOperand(a0, JSDate::kValueOffset)); // In delay slot.
- } else {
- if (field_index < JSDate::kFirstUncachedField) {
- Label stamp_mismatch;
- __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
- __ lw(a1, MemOperand(a1));
- __ lw(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
- __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
- __ Ret(USE_DELAY_SLOT);
- __ lw(v0, FieldMemOperand(
- a0, JSDate::kValueOffset +
- field_index * kPointerSize)); // In delay slot.
- __ bind(&stamp_mismatch);
- }
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ PrepareCallCFunction(2, t0);
- __ li(a1, Operand(Smi::FromInt(field_index)));
- __ CallCFunction(
- ExternalReference::get_date_field_function(masm->isolate()), 2);
- }
- __ Ret();
-
- // 3. Raise a TypeError if the receiver is not a date.
- __ bind(&receiver_not_date);
- {
- FrameScope scope(masm, StackFrame::MANUAL);
- __ Push(a0);
- __ Move(a0, Smi::FromInt(0));
- __ EnterBuiltinFrame(cp, a1, a0);
- __ CallRuntime(Runtime::kThrowNotDateError);
- }
-}
-
-// static
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argc
@@ -2115,27 +2150,6 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
}
}
-static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
- Label* stack_overflow) {
- // ----------- S t a t e -------------
- // -- a0 : actual number of arguments
- // -- a1 : function (passed through to callee)
- // -- a2 : expected number of arguments
- // -- a3 : new target (passed through to callee)
- // -----------------------------------
- // Check the stack for overflow. We are not trying to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
- // Make t1 the space we have left. The stack might already be overflowed
- // here which will cause t1 to become negative.
- __ subu(t1, sp, t1);
- // Check if the arguments will overflow the stack.
- __ sll(at, a2, kPointerSizeLog2);
- // Signed comparison.
- __ Branch(stack_overflow, le, t1, Operand(at));
-}
-
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ sll(a0, a0, kSmiTagSize);
__ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
@@ -2854,28 +2868,6 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kAbort);
}
-// static
-void Builtins::Generate_ToNumber(MacroAssembler* masm) {
- // The ToNumber stub takes one argument in a0.
- Label not_smi;
- __ JumpIfNotSmi(a0, &not_smi);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(&not_smi);
-
- Label not_heap_number;
- __ GetObjectType(a0, a1, a1);
- // a0: receiver
- // a1: receiver instance type
- __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(&not_heap_number);
-
- __ Jump(masm->isolate()->builtins()->NonNumberToNumber(),
- RelocInfo::CODE_TARGET);
-}
-
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// State setup as expected by MacroAssembler::InvokePrologue.
// ----------- S t a t e -------------
@@ -2900,7 +2892,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// a3: new target (passed through to callee)
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
- ArgumentAdaptorStackCheck(masm, &stack_overflow);
+ Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
// Calculate copy start address into a0 and copy end address into t1.
__ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
@@ -2930,7 +2922,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
{ // Too few parameters: Actual < expected.
__ bind(&too_few);
EnterArgumentsAdaptorFrame(masm);
- ArgumentAdaptorStackCheck(masm, &stack_overflow);
+ Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
// Calculate copy start address into a0 and copy end address into t3.
// a0: actual number of arguments as a smi