summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips64/code-stubs-mips64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips64/code-stubs-mips64.cc')
-rw-r--r--deps/v8/src/mips64/code-stubs-mips64.cc401
1 files changed, 179 insertions, 222 deletions
diff --git a/deps/v8/src/mips64/code-stubs-mips64.cc b/deps/v8/src/mips64/code-stubs-mips64.cc
index a6c4f33806..2531d6b3f1 100644
--- a/deps/v8/src/mips64/code-stubs-mips64.cc
+++ b/deps/v8/src/mips64/code-stubs-mips64.cc
@@ -289,7 +289,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
__ GetObjectType(a0, t0, t0);
if (cc == less || cc == greater) {
// Call runtime on identical JSObjects.
- __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
// Call runtime on identical symbols since we need to throw a TypeError.
__ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
// Call runtime on identical SIMD values since we must throw a TypeError.
@@ -305,7 +305,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
__ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE));
// Comparing JS objects with <=, >= is complicated.
if (cc != eq) {
- __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE));
// Call runtime on identical symbols since we need to throw a TypeError.
__ Branch(slow, eq, t0, Operand(SYMBOL_TYPE));
// Call runtime on identical SIMD values since we must throw a TypeError.
@@ -455,12 +455,12 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
// If either operand is a JS object or an oddball value, then they are
// not equal since their pointers are different.
// There is no test for undetectability in strict equality.
- STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Label first_non_object;
// Get the type of the first operand into a2 and compare it with
- // FIRST_SPEC_OBJECT_TYPE.
+ // FIRST_JS_RECEIVER_TYPE.
__ GetObjectType(lhs, a2, a2);
- __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE));
// Return non-zero.
Label return_not_equal;
@@ -473,7 +473,7 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
__ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
__ GetObjectType(rhs, a3, a3);
- __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE));
// Check for oddballs: true, false, null, undefined.
__ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
@@ -535,9 +535,9 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
__ li(v0, Operand(1)); // Non-zero indicates not equal.
__ bind(&object_test);
- __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(not_both_strings, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
__ GetObjectType(rhs, a2, a3);
- __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ Branch(not_both_strings, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE));
// If both objects are undetectable, they are equal. Otherwise, they
// are not equal, since they are different objects and an object is not
@@ -725,8 +725,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
__ Push(lhs, rhs);
// Figure out which native to call and setup the arguments.
if (cc == eq) {
- __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2,
- 1);
+ __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
} else {
int ncr; // NaN compare result.
if (cc == lt || cc == le) {
@@ -740,9 +739,8 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
- __ TailCallRuntime(
- is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3,
- 1);
+ __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
+ : Runtime::kCompare);
}
__ bind(&miss);
@@ -977,7 +975,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
if (exponent_type() == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
+ __ TailCallRuntime(Runtime::kMathPowRT);
// The stub is called from non-optimized code, which expects the result
// as heap number in exponent.
@@ -1474,15 +1472,6 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
__ And(at, scratch, Operand(1 << Map::kHasNonInstancePrototype));
__ Branch(&slow_case, ne, at, Operand(zero_reg));
- // Ensure that {function} is not bound.
- Register const shared_info = scratch;
- __ ld(shared_info,
- FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
- __ lbu(scratch,
- FieldMemOperand(shared_info, SharedFunctionInfo::kBoundByteOffset));
- __ And(at, scratch, Operand(1 << SharedFunctionInfo::kBoundBitWithinByte));
- __ Branch(&slow_case, ne, at, Operand(zero_reg));
-
// Get the "prototype" (or initial map) of the {function}.
__ ld(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@@ -1507,25 +1496,49 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
- Register const object_prototype = object_map;
+ Register const object_instance_type = function_map;
+ Register const map_bit_field = function_map;
Register const null = scratch;
- Label done, loop;
- __ LoadRoot(v0, Heap::kTrueValueRootIndex);
+ Register const result = v0;
+
+ Label done, loop, fast_runtime_fallback;
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ bind(&loop);
- __ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
- __ Branch(&done, eq, object_prototype, Operand(function_prototype));
- __ Branch(USE_DELAY_SLOT, &loop, ne, object_prototype, Operand(null));
- __ ld(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
- __ LoadRoot(v0, Heap::kFalseValueRootIndex);
+
+ // Check if the object needs to be access checked.
+ __ lbu(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
+ __ And(map_bit_field, map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
+ __ Branch(&fast_runtime_fallback, ne, map_bit_field, Operand(zero_reg));
+ // Check if the current object is a Proxy.
+ __ lbu(object_instance_type,
+ FieldMemOperand(object_map, Map::kInstanceTypeOffset));
+ __ Branch(&fast_runtime_fallback, eq, object_instance_type,
+ Operand(JS_PROXY_TYPE));
+
+ __ ld(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
+ __ Branch(&done, eq, object, Operand(function_prototype));
+ __ Branch(USE_DELAY_SLOT, &loop, ne, object, Operand(null));
+ __ ld(object_map,
+ FieldMemOperand(object, HeapObject::kMapOffset)); // In delay slot.
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
__ bind(&done);
__ Ret(USE_DELAY_SLOT);
- __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
-
- // Slow-case: Call the runtime function.
+ __ StoreRoot(result,
+ Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
+
+ // Found Proxy or access check needed: Call the runtime
+ __ bind(&fast_runtime_fallback);
+ __ Push(object, function_prototype);
+ // Invalidate the instanceof cache.
+ DCHECK(Smi::FromInt(0) == 0);
+ __ StoreRoot(zero_reg, Heap::kInstanceofCacheFunctionRootIndex);
+ __ TailCallRuntime(Runtime::kHasInPrototypeChain);
+
+ // Slow-case: Call the %InstanceOf runtime function.
__ bind(&slow_case);
__ Push(object, function);
- __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
+ __ TailCallRuntime(Runtime::kInstanceOf);
}
@@ -1596,7 +1609,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// by calling the runtime system.
__ bind(&slow);
__ push(a1);
- __ TailCallRuntime(Runtime::kArguments, 1, 1);
+ __ TailCallRuntime(Runtime::kArguments);
}
@@ -1624,7 +1637,7 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
__ bind(&runtime);
__ Push(a1, a3, a2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1690,7 +1703,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
__ Daddu(t1, t1, Operand(Heap::kSloppyArgumentsObjectSize));
// Do the allocation of all three objects in one go.
- __ Allocate(t1, v0, a4, t1, &runtime, TAG_OBJECT);
+ __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT);
// v0 = address of new object(s) (tagged)
// a2 = argument count (smi-tagged)
@@ -1700,8 +1713,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
- __ ld(a4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ ld(a4, FieldMemOperand(a4, JSGlobalObject::kNativeContextOffset));
+ __ ld(a4, NativeContextMemOperand());
Label skip2_ne, skip2_eq;
__ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
__ ld(a4, MemOperand(a4, kNormalOffset));
@@ -1839,7 +1851,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// a5 = argument count (tagged)
__ bind(&runtime);
__ Push(a1, a3, a5);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1858,7 +1870,7 @@ void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
- __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
+ __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
@@ -1904,10 +1916,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
// Get the arguments boilerplate from the current native context.
- __ ld(a4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ ld(a4, FieldMemOperand(a4, JSGlobalObject::kNativeContextOffset));
- __ ld(a4, MemOperand(a4, Context::SlotOffset(
- Context::STRICT_ARGUMENTS_MAP_INDEX)));
+ __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, a4);
__ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
__ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex);
@@ -1955,7 +1964,33 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ Push(a1, a3, a2);
- __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
+}
+
+
+void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
+ // a2 : number of parameters (tagged)
+ // a3 : parameters pointer
+ // a4 : rest parameter index (tagged)
+ // Check if the calling frame is an arguments adaptor frame.
+
+ Label runtime;
+ __ ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ld(a5, MemOperand(a0, StandardFrameConstants::kContextOffset));
+ __ Branch(&runtime, ne, a5,
+ Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+
+ // Patch the arguments.length and the parameters pointer.
+ __ ld(a2, MemOperand(a0, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiScale(at, a2, kPointerSizeLog2);
+
+ __ Daddu(a3, a0, Operand(at));
+ __ Daddu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ // Do the runtime call to allocate the arguments object.
+ __ bind(&runtime);
+ __ Push(a2, a3, a4);
+ __ TailCallRuntime(Runtime::kNewRestParam);
}
@@ -1964,7 +1999,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// time or if regexp entry in generated code is turned off runtime switch or
// at compilation.
#ifdef V8_INTERPRETED_REGEXP
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
#else // V8_INTERPRETED_REGEXP
// Stack frame on entry.
@@ -2281,7 +2316,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ Branch(&runtime, eq, v0, Operand(a1));
// For exception, throw the exception again.
- __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExecReThrow);
__ bind(&failure);
// For failure and exception return null.
@@ -2377,7 +2412,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Do the runtime call to execute the regexp.
__ bind(&runtime);
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
// Deferred code for string handling.
// (6) Not a long external string? If yes, go to (8).
@@ -2423,19 +2458,16 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
}
-static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
- bool is_super) {
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
// a0 : number of arguments to the construct function
// a2 : feedback vector
// a3 : slot in feedback vector (Smi)
// a1 : the function to call
- // a4 : original constructor (for IsSuperConstructorCall)
FrameScope scope(masm, StackFrame::INTERNAL);
- const RegList kSavedRegs = 1 << 4 | // a0
- 1 << 5 | // a1
- 1 << 6 | // a2
- 1 << 7 | // a3
- BoolToInt(is_super) << 8; // a4
+ const RegList kSavedRegs = 1 << 4 | // a0
+ 1 << 5 | // a1
+ 1 << 6 | // a2
+ 1 << 7; // a3
// Number-of-arguments register must be smi-tagged to call out.
@@ -2449,7 +2481,7 @@ static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
}
-static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
@@ -2457,7 +2489,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// a1 : the function to call
// a2 : feedback vector
// a3 : slot in feedback vector (Smi)
- // a4 : original constructor (for IsSuperConstructorCall)
Label initialize, done, miss, megamorphic, not_array_function;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
@@ -2498,7 +2529,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ Branch(&miss, ne, feedback_map, Operand(at));
// Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&megamorphic, ne, a1, Operand(a5));
__ jmp(&done);
@@ -2520,20 +2551,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function.
__ bind(&initialize);
// Make sure the function is the Array() function.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5);
__ Branch(&not_array_function, ne, a1, Operand(a5));
// The target function is the Array constructor,
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ CallStubInRecordCallTarget(masm, &create_stub);
__ Branch(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
+ CallStubInRecordCallTarget(masm, &weak_cell_stub);
__ bind(&done);
}
@@ -2543,7 +2574,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
// a1 : the function to call
// a2 : feedback vector
// a3 : slot in feedback vector (Smi, for RecordCallTarget)
- // a4 : original constructor (for IsSuperConstructorCall)
Label non_function;
// Check that the function is not a smi.
@@ -2552,29 +2582,23 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ GetObjectType(a1, a5, a5);
__ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE));
- if (RecordCallTarget()) {
- GenerateRecordCallTarget(masm, IsSuperConstructorCall());
+ GenerateRecordCallTarget(masm);
- __ dsrl(at, a3, 32 - kPointerSizeLog2);
- __ Daddu(a5, a2, at);
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into a2, or undefined.
- __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
- __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ __ dsrl(at, a3, 32 - kPointerSizeLog2);
+ __ Daddu(a5, a2, at);
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into a2, or undefined.
+ __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize));
+ __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset));
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
+ __ Branch(&feedback_register_initialized, eq, a5, Operand(at));
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
- __ AssertUndefinedOrAllocationSite(a2, a5);
- }
+ __ AssertUndefinedOrAllocationSite(a2, a5);
- // Pass function as original constructor.
- if (IsSuperConstructorCall()) {
- __ mov(a3, a4);
- } else {
- __ mov(a3, a1);
- }
+ // Pass function as new target.
+ __ mov(a3, a1);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
@@ -2634,7 +2658,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a3 - slot id
// a2 - vector
// a4 - allocation site (loaded from vector[slot])
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
__ Branch(miss, ne, a1, Operand(at));
__ li(a0, Operand(arg_count()));
@@ -2657,11 +2681,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// a1 - function
// a3 - slot id (Smi)
// a2 - vector
- const int with_types_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
- const int generic_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
- Label extra_checks_or_miss, call;
+ Label extra_checks_or_miss, call, call_function;
int argc = arg_count();
ParameterCount actual(argc);
@@ -2698,9 +2718,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize));
- __ bind(&call);
- __ li(a0, Operand(argc));
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ __ bind(&call_function);
+ __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
+ RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
+ USE_DELAY_SLOT);
+ __ li(a0, Operand(argc)); // In delay slot.
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
@@ -2735,14 +2757,12 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Daddu(a4, a2, Operand(a4));
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize));
- // We have to update statistics for runtime profiling.
- __ ld(a4, FieldMemOperand(a2, with_types_offset));
- __ Dsubu(a4, a4, Operand(Smi::FromInt(1)));
- __ sd(a4, FieldMemOperand(a2, with_types_offset));
- __ ld(a4, FieldMemOperand(a2, generic_offset));
- __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
- __ Branch(USE_DELAY_SLOT, &call);
- __ sd(a4, FieldMemOperand(a2, generic_offset)); // In delay slot.
+
+ __ bind(&call);
+ __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
+ RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
+ USE_DELAY_SLOT);
+ __ li(a0, Operand(argc)); // In delay slot.
__ bind(&uninitialized);
@@ -2755,13 +2775,14 @@ void CallICStub::Generate(MacroAssembler* masm) {
// Make sure the function is not the Array() function, which requires special
// behavior on MISS.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, a4);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a4);
__ Branch(&miss, eq, a1, Operand(a4));
- // Update stats.
- __ ld(a4, FieldMemOperand(a2, with_types_offset));
- __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
- __ sd(a4, FieldMemOperand(a2, with_types_offset));
+ // Make sure the function belongs to the same native context.
+ __ ld(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
+ __ ld(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
+ __ ld(t1, NativeContextMemOperand());
+ __ Branch(&miss, ne, t0, Operand(t1));
// Initialize the call counter.
__ dsrl(at, a3, 32 - kPointerSizeLog2);
@@ -2781,7 +2802,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Pop(a1);
}
- __ Branch(&call);
+ __ Branch(&call_function);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
@@ -2799,7 +2820,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(a1, a2, a3);
// Call the entry.
- __ CallRuntime(Runtime::kCallIC_Miss, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
@@ -2828,11 +2849,11 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ Push(object_, index_);
}
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
+ __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
} else {
DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
// NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi, 1);
+ __ CallRuntime(Runtime::kNumberToSmi);
}
// Save the conversion result before the pop instructions below
@@ -2861,7 +2882,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.BeforeCall(masm);
__ SmiTag(index_);
__ Push(object_, index_);
- __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
+ __ CallRuntime(Runtime::kStringCharCodeAtRT);
__ Move(result_, v0);
@@ -2900,7 +2921,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
__ push(code_);
- __ CallRuntime(Runtime::kStringCharFromCode, 1);
+ __ CallRuntime(Runtime::kStringCharFromCode);
__ Move(result_, v0);
call_helper.AfterCall(masm);
@@ -3162,7 +3183,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Just jump to runtime to create the sub string.
__ bind(&runtime);
- __ TailCallRuntime(Runtime::kSubString, 3, 1);
+ __ TailCallRuntime(Runtime::kSubString);
__ bind(&single_char);
// v0: original string
@@ -3207,7 +3228,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ mov(v0, a0);
__ bind(&slow_string);
__ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kStringToNumber);
__ bind(&not_string);
Label not_oddball;
@@ -3217,7 +3238,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ bind(&not_oddball);
__ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kToNumber);
}
@@ -3234,7 +3255,7 @@ void ToLengthStub::Generate(MacroAssembler* masm) {
__ bind(&not_smi);
__ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToLength, 1, 1);
+ __ TailCallRuntime(Runtime::kToLength);
}
@@ -3266,7 +3287,7 @@ void ToStringStub::Generate(MacroAssembler* masm) {
__ bind(&not_oddball);
__ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToString, 1, 1);
+ __ TailCallRuntime(Runtime::kToString);
}
@@ -3404,7 +3425,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
__ bind(&runtime);
__ Push(a1, a0);
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
@@ -3443,7 +3464,7 @@ void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
__ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
__ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
if (op() != Token::EQ_STRICT && is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (!Token::IsEqualityOp(op())) {
__ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
@@ -3736,9 +3757,9 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
__ bind(&runtime);
__ Push(left, right);
if (equality) {
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ __ TailCallRuntime(Runtime::kStringEquals);
} else {
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
__ bind(&miss);
@@ -3746,18 +3767,19 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
}
-void CompareICStub::GenerateObjects(MacroAssembler* masm) {
- DCHECK(state() == CompareICState::OBJECT);
+void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
+ DCHECK_EQ(CompareICState::RECEIVER, state());
Label miss;
__ And(a2, a1, Operand(a0));
__ JumpIfSmi(a2, &miss);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
__ GetObjectType(a0, a2, a2);
- __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
+ __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
__ GetObjectType(a1, a2, a2);
- __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
+ __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE));
- DCHECK(GetCondition() == eq);
+ DCHECK_EQ(eq, GetCondition());
__ Ret(USE_DELAY_SLOT);
__ dsubu(v0, a0, a1);
@@ -3766,7 +3788,7 @@ void CompareICStub::GenerateObjects(MacroAssembler* masm) {
}
-void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
+void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Label miss;
Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
__ And(a2, a1, a0);
@@ -3781,7 +3803,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ Ret(USE_DELAY_SLOT);
__ dsubu(v0, a0, a1);
} else if (is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (op() == Token::LT || op() == Token::LTE) {
__ li(a2, Operand(Smi::FromInt(GREATER)));
@@ -3789,7 +3811,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ li(a2, Operand(Smi::FromInt(LESS)));
}
__ Push(a1, a0, a2);
- __ TailCallRuntime(Runtime::kCompare, 3, 1);
+ __ TailCallRuntime(Runtime::kCompare);
}
__ bind(&miss);
@@ -4278,11 +4300,11 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
// We need extra registers for this, so we push the object and the address
// register temporarily.
__ Push(regs_.object(), regs_.address());
- __ EnsureNotWhite(regs_.scratch0(), // The value.
- regs_.scratch1(), // Scratch.
- regs_.object(), // Scratch.
- regs_.address(), // Scratch.
- &need_incremental_pop_scratch);
+ __ JumpIfWhite(regs_.scratch0(), // The value.
+ regs_.scratch1(), // Scratch.
+ regs_.object(), // Scratch.
+ regs_.address(), // Scratch.
+ &need_incremental_pop_scratch);
__ Pop(regs_.object(), regs_.address());
regs_.Restore(masm);
@@ -4305,73 +4327,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
}
-void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- a0 : element value to store
- // -- a3 : element index as smi
- // -- sp[0] : array literal index in function as smi
- // -- sp[4] : array literal
- // clobbers a1, a2, a4
- // -----------------------------------
-
- Label element_done;
- Label double_elements;
- Label smi_element;
- Label slow_elements;
- Label fast_elements;
-
- // Get array literal index, array literal and its map.
- __ ld(a4, MemOperand(sp, 0 * kPointerSize));
- __ ld(a1, MemOperand(sp, 1 * kPointerSize));
- __ ld(a2, FieldMemOperand(a1, JSObject::kMapOffset));
-
- __ CheckFastElements(a2, a5, &double_elements);
- // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
- __ JumpIfSmi(a0, &smi_element);
- __ CheckFastSmiElements(a2, a5, &fast_elements);
-
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- __ bind(&slow_elements);
- // call.
- __ Push(a1, a3, a0);
- __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ ld(a5, FieldMemOperand(a5, JSFunction::kLiteralsOffset));
- __ Push(a5, a4);
- __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
-
- // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ ld(a5, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ SmiScale(a6, a3, kPointerSizeLog2);
- __ Daddu(a6, a5, a6);
- __ Daddu(a6, a6, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ sd(a0, MemOperand(a6, 0));
- // Update the write barrier for the array store.
- __ RecordWrite(a5, a6, a0, kRAHasNotBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
-
- // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
- // and value is Smi.
- __ bind(&smi_element);
- __ ld(a5, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ SmiScale(a6, a3, kPointerSizeLog2);
- __ Daddu(a6, a5, a6);
- __ sd(a0, FieldMemOperand(a6, FixedArray::kHeaderSize));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
-
- // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
- __ ld(a5, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(a0, a3, a5, a7, t1, &slow_elements);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
-}
-
-
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
CEntryStub ces(isolate(), 1, kSaveFPRegs);
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
@@ -5074,7 +5029,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// -- a0 : argc (only if argument_count() == ANY)
// -- a1 : constructor
// -- a2 : AllocationSite or undefined
- // -- a3 : original constructor
+ // -- a3 : new target
// -- sp[0] : last argument
// -----------------------------------
@@ -5096,6 +5051,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ AssertUndefinedOrAllocationSite(a2, a4);
}
+ // Enter the context of the Array function.
+ __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+
Label subclassing;
__ Branch(&subclassing, ne, a1, Operand(a3));
@@ -5115,26 +5073,26 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// Subclassing.
__ bind(&subclassing);
- __ Push(a1);
- __ Push(a3);
-
- // Adjust argc.
switch (argument_count()) {
case ANY:
case MORE_THAN_ONE:
- __ li(at, Operand(2));
- __ addu(a0, a0, at);
+ __ dsll(at, a0, kPointerSizeLog2);
+ __ Daddu(at, sp, at);
+ __ sd(a1, MemOperand(at));
+ __ li(at, Operand(3));
+ __ Daddu(a0, a0, at);
break;
case NONE:
- __ li(a0, Operand(2));
+ __ sd(a1, MemOperand(sp, 0 * kPointerSize));
+ __ li(a0, Operand(3));
break;
case ONE:
- __ li(a0, Operand(3));
+ __ sd(a1, MemOperand(sp, 1 * kPointerSize));
+ __ li(a0, Operand(4));
break;
}
-
- __ JumpToExternalReference(
- ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate()));
+ __ Push(a3, a2);
+ __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
}
@@ -5220,14 +5178,14 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
- __ ld(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ __ ld(result_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = result_reg;
}
// Load the PropertyCell value at the specified slot.
__ dsll(at, slot_reg, kPointerSizeLog2);
__ Daddu(at, at, Operand(context_reg));
- __ ld(result_reg, ContextOperand(at, 0));
+ __ ld(result_reg, ContextMemOperand(at, 0));
__ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
// Check that value is not the_hole.
@@ -5239,7 +5197,7 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Push(slot_reg);
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
}
@@ -5259,14 +5217,14 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
- __ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
+ __ ld(cell_reg, ContextMemOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = cell_reg;
}
// Load the PropertyCell at the specified slot.
__ dsll(at, slot_reg, kPointerSizeLog2);
__ Daddu(at, at, Operand(context_reg));
- __ ld(cell_reg, ContextOperand(at, 0));
+ __ ld(cell_reg, ContextMemOperand(at, 0));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ ld(cell_details_reg,
@@ -5353,8 +5311,7 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
__ Push(slot_reg, value_reg);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
- : Runtime::kStoreGlobalViaContext_Sloppy,
- 2, 1);
+ : Runtime::kStoreGlobalViaContext_Sloppy);
}
@@ -5479,7 +5436,7 @@ static void CallApiFunctionAndReturn(
// Re-throw by promoting a scheduled exception.
__ bind(&promote_scheduled_exception);
- __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+ __ TailCallRuntime(Runtime::kPromoteScheduledException);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);