diff options
Diffstat (limited to 'deps/v8/src/mips64/code-stubs-mips64.cc')
-rw-r--r-- | deps/v8/src/mips64/code-stubs-mips64.cc | 306 |
1 files changed, 15 insertions, 291 deletions
diff --git a/deps/v8/src/mips64/code-stubs-mips64.cc b/deps/v8/src/mips64/code-stubs-mips64.cc index 321fa44f55..5ed97cc004 100644 --- a/deps/v8/src/mips64/code-stubs-mips64.cc +++ b/deps/v8/src/mips64/code-stubs-mips64.cc @@ -25,21 +25,6 @@ namespace internal { #define __ ACCESS_MASM(masm) -void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) { - __ dsll(t9, a0, kPointerSizeLog2); - __ Daddu(t9, sp, t9); - __ Sd(a1, MemOperand(t9, 0)); - __ Push(a1); - __ Push(a2); - __ Daddu(a0, a0, 3); - __ TailCallRuntime(Runtime::kNewArray); -} - -void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { - CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate); - StoreFastElementStub::GenerateAheadOfTime(isolate); -} - void JSEntryStub::Generate(MacroAssembler* masm) { Label invoke, handler_entry, exit; Isolate* isolate = masm->isolate(); @@ -222,6 +207,19 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) { void DirectCEntryStub::GenerateCall(MacroAssembler* masm, Register target) { + if (FLAG_embedded_builtins) { + if (masm->root_array_available() && + isolate()->ShouldLoadConstantsFromRootList()) { + // This is basically an inlined version of Call(Handle<Code>) that loads + // the code object into kScratchReg instead of t9. + __ Move(t9, target); + __ IndirectLoadConstant(kScratchReg, GetCode()); + __ Daddu(kScratchReg, kScratchReg, + Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Call(kScratchReg); + return; + } + } intptr_t loc = reinterpret_cast<intptr_t>(GetCode().location()); __ Move(t9, target); @@ -311,280 +309,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { __ Ret(); } - -template<class T> -static void CreateArrayDispatch(MacroAssembler* masm, - AllocationSiteOverrideMode mode) { - if (mode == DISABLE_ALLOCATION_SITES) { - T stub(masm->isolate(), GetInitialFastElementsKind(), mode); - __ TailCallStub(&stub); - } else if (mode == DONT_OVERRIDE) { - int last_index = - GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); - for (int i = 0; i <= last_index; ++i) { - ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(masm->isolate(), kind); - __ TailCallStub(&stub, eq, a3, Operand(kind)); - } - - // If we reached this point there is a problem. - __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor); - } else { - UNREACHABLE(); - } -} - - -static void CreateArrayDispatchOneArgument(MacroAssembler* masm, - AllocationSiteOverrideMode mode) { - // a2 - allocation site (if mode != DISABLE_ALLOCATION_SITES) - // a3 - kind (if mode != DISABLE_ALLOCATION_SITES) - // a0 - number of arguments - // a1 - constructor? - // sp[0] - last argument - STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0); - STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1); - STATIC_ASSERT(PACKED_ELEMENTS == 2); - STATIC_ASSERT(HOLEY_ELEMENTS == 3); - STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4); - STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5); - - if (mode == DISABLE_ALLOCATION_SITES) { - ElementsKind initial = GetInitialFastElementsKind(); - ElementsKind holey_initial = GetHoleyElementsKind(initial); - - ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), - holey_initial, - DISABLE_ALLOCATION_SITES); - __ TailCallStub(&stub_holey); - } else if (mode == DONT_OVERRIDE) { - // is the low bit set? If so, we are holey and that is good. - Label normal_sequence; - __ And(kScratchReg, a3, Operand(1)); - __ Branch(&normal_sequence, ne, kScratchReg, Operand(zero_reg)); - - // We are going to create a holey array, but our kind is non-holey. - // Fix kind and retry (only if we have an allocation site in the slot). - __ Daddu(a3, a3, Operand(1)); - - if (FLAG_debug_code) { - __ Ld(a5, FieldMemOperand(a2, 0)); - __ LoadRoot(kScratchReg, Heap::kAllocationSiteMapRootIndex); - __ Assert(eq, AbortReason::kExpectedAllocationSite, a5, - Operand(kScratchReg)); - } - - // Save the resulting elements kind in type info. We can't just store a3 - // in the AllocationSite::transition_info field because elements kind is - // restricted to a portion of the field...upper bits need to be left alone. - STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); - __ Ld(a4, FieldMemOperand( - a2, AllocationSite::kTransitionInfoOrBoilerplateOffset)); - __ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); - __ Sd(a4, FieldMemOperand( - a2, AllocationSite::kTransitionInfoOrBoilerplateOffset)); - - __ bind(&normal_sequence); - int last_index = - GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); - for (int i = 0; i <= last_index; ++i) { - ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); - __ TailCallStub(&stub, eq, a3, Operand(kind)); - } - - // If we reached this point there is a problem. - __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor); - } else { - UNREACHABLE(); - } -} - - -template<class T> -static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { - int to_index = - GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); - for (int i = 0; i <= to_index; ++i) { - ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); - T stub(isolate, kind); - stub.GetCode(); - if (AllocationSite::ShouldTrack(kind)) { - T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); - stub1.GetCode(); - } - } -} - -void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) { - ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( - isolate); - ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( - isolate); - ArrayNArgumentsConstructorStub stub(isolate); - stub.GetCode(); - ElementsKind kinds[2] = {PACKED_ELEMENTS, HOLEY_ELEMENTS}; - for (int i = 0; i < 2; i++) { - // For internal arrays we only need a few things. - InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); - stubh1.GetCode(); - InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); - stubh2.GetCode(); - } -} - - -void ArrayConstructorStub::GenerateDispatchToArrayStub( - MacroAssembler* masm, - AllocationSiteOverrideMode mode) { - Label not_zero_case, not_one_case; - __ And(kScratchReg, a0, a0); - __ Branch(¬_zero_case, ne, kScratchReg, Operand(zero_reg)); - CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); - - __ bind(¬_zero_case); - __ Branch(¬_one_case, gt, a0, Operand(1)); - CreateArrayDispatchOneArgument(masm, mode); - - __ bind(¬_one_case); - ArrayNArgumentsConstructorStub stub(masm->isolate()); - __ TailCallStub(&stub); -} - - -void ArrayConstructorStub::Generate(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : argc (only if argument_count() == ANY) - // -- a1 : constructor - // -- a2 : AllocationSite or undefined - // -- a3 : new target - // -- sp[0] : last argument - // ----------------------------------- - - if (FLAG_debug_code) { - // The array construct code is only set for the global and natives - // builtin Array functions which always have maps. - - // Initial map for the builtin Array function should be a map. - __ Ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); - // Will both indicate a nullptr and a Smi. - __ SmiTst(a4, kScratchReg); - __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, - kScratchReg, Operand(zero_reg)); - __ GetObjectType(a4, a4, a5); - __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a5, - Operand(MAP_TYPE)); - - // We should either have undefined in a2 or a valid AllocationSite - __ AssertUndefinedOrAllocationSite(a2, a4); - } - - // Enter the context of the Array function. - __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); - - Label subclassing; - __ Branch(&subclassing, ne, a1, Operand(a3)); - - Label no_info; - // Get the elements kind and case on that. - __ LoadRoot(kScratchReg, Heap::kUndefinedValueRootIndex); - __ Branch(&no_info, eq, a2, Operand(kScratchReg)); - - __ Ld(a3, FieldMemOperand( - a2, AllocationSite::kTransitionInfoOrBoilerplateOffset)); - __ SmiUntag(a3); - STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); - __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); - GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); - - __ bind(&no_info); - GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); - - // Subclassing. - __ bind(&subclassing); - __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2); - __ Sd(a1, MemOperand(kScratchReg)); - __ li(kScratchReg, Operand(3)); - __ Daddu(a0, a0, kScratchReg); - __ Push(a3, a2); - __ JumpToExternalReference(ExternalReference::Create(Runtime::kNewArray)); -} - - -void InternalArrayConstructorStub::GenerateCase( - MacroAssembler* masm, ElementsKind kind) { - - InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); - __ TailCallStub(&stub0, lo, a0, Operand(1)); - - ArrayNArgumentsConstructorStub stubN(isolate()); - __ TailCallStub(&stubN, hi, a0, Operand(1)); - - if (IsFastPackedElementsKind(kind)) { - // We might need to create a holey array - // look at the first argument. - __ Ld(kScratchReg, MemOperand(sp, 0)); - - InternalArraySingleArgumentConstructorStub - stub1_holey(isolate(), GetHoleyElementsKind(kind)); - __ TailCallStub(&stub1_holey, ne, kScratchReg, Operand(zero_reg)); - } - - InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); - __ TailCallStub(&stub1); -} - - -void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : argc - // -- a1 : constructor - // -- sp[0] : return address - // -- sp[4] : last argument - // ----------------------------------- - - if (FLAG_debug_code) { - // The array construct code is only set for the global and natives - // builtin Array functions which always have maps. - - // Initial map for the builtin Array function should be a map. - __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); - // Will both indicate a nullptr and a Smi. - __ SmiTst(a3, kScratchReg); - __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, - kScratchReg, Operand(zero_reg)); - __ GetObjectType(a3, a3, a4); - __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a4, - Operand(MAP_TYPE)); - } - - // Figure out the right elements kind. - __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); - - // Load the map's "bit field 2" into a3. We only need the first byte, - // but the following bit field extraction takes care of that anyway. - __ Lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); - // Retrieve elements_kind from bit field 2. - __ DecodeField<Map::ElementsKindBits>(a3); - - if (FLAG_debug_code) { - Label done; - __ Branch(&done, eq, a3, Operand(PACKED_ELEMENTS)); - __ Assert( - eq, - AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray, - a3, Operand(HOLEY_ELEMENTS)); - __ bind(&done); - } - - Label fast_elements_case; - __ Branch(&fast_elements_case, eq, a3, Operand(PACKED_ELEMENTS)); - GenerateCase(masm, HOLEY_ELEMENTS); - - __ bind(&fast_elements_case); - GenerateCase(masm, PACKED_ELEMENTS); -} - static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { int64_t offset = (ref0.address() - ref1.address()); DCHECK(static_cast<int>(offset) == offset); @@ -620,7 +344,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); // Additional parameter is the address of the actual callback. - __ li(t9, Operand(thunk_ref)); + __ li(t9, thunk_ref); __ jmp(&end_profiler_check); __ bind(&profiler_disabled); @@ -628,7 +352,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, __ bind(&end_profiler_check); // Allocate HandleScope in callee-save registers. - __ li(s5, Operand(next_address)); + __ li(s5, next_address); __ Ld(s0, MemOperand(s5, kNextOffset)); __ Ld(s1, MemOperand(s5, kLimitOffset)); __ Lw(s2, MemOperand(s5, kLevelOffset)); |