aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/arm/code-stubs-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm/code-stubs-arm.cc')
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc379
1 files changed, 162 insertions, 217 deletions
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index c920725477..21413335ea 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -250,7 +250,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
// Smis. If it's not a heap number, then return equal.
if (cond == lt || cond == gt) {
// Call runtime on identical JSObjects.
- __ CompareObjectType(r0, r4, r4, FIRST_SPEC_OBJECT_TYPE);
+ __ CompareObjectType(r0, r4, r4, FIRST_JS_RECEIVER_TYPE);
__ b(ge, slow);
// Call runtime on identical symbols since we need to throw a TypeError.
__ cmp(r4, Operand(SYMBOL_TYPE));
@@ -271,7 +271,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
__ b(eq, &heap_number);
// Comparing JS objects with <=, >= is complicated.
if (cond != eq) {
- __ cmp(r4, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ cmp(r4, Operand(FIRST_JS_RECEIVER_TYPE));
__ b(ge, slow);
// Call runtime on identical symbols since we need to throw a TypeError.
__ cmp(r4, Operand(SYMBOL_TYPE));
@@ -436,11 +436,11 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
// If either operand is a JS object or an oddball value, then they are
// not equal since their pointers are different.
// There is no test for undetectability in strict equality.
- STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Label first_non_object;
// Get the type of the first operand into r2 and compare it with
- // FIRST_SPEC_OBJECT_TYPE.
- __ CompareObjectType(rhs, r2, r2, FIRST_SPEC_OBJECT_TYPE);
+ // FIRST_JS_RECEIVER_TYPE.
+ __ CompareObjectType(rhs, r2, r2, FIRST_JS_RECEIVER_TYPE);
__ b(lt, &first_non_object);
// Return non-zero (r0 is not zero)
@@ -453,7 +453,7 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
__ cmp(r2, Operand(ODDBALL_TYPE));
__ b(eq, &return_not_equal);
- __ CompareObjectType(lhs, r3, r3, FIRST_SPEC_OBJECT_TYPE);
+ __ CompareObjectType(lhs, r3, r3, FIRST_JS_RECEIVER_TYPE);
__ b(ge, &return_not_equal);
// Check for oddballs: true, false, null, undefined.
@@ -520,9 +520,9 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
__ Ret();
__ bind(&object_test);
- __ cmp(r2, Operand(FIRST_SPEC_OBJECT_TYPE));
+ __ cmp(r2, Operand(FIRST_JS_RECEIVER_TYPE));
__ b(lt, not_both_strings);
- __ CompareObjectType(lhs, r2, r3, FIRST_SPEC_OBJECT_TYPE);
+ __ CompareObjectType(lhs, r2, r3, FIRST_JS_RECEIVER_TYPE);
__ b(lt, not_both_strings);
// If both objects are undetectable, they are equal. Otherwise, they
// are not equal, since they are different objects and an object is not
@@ -682,8 +682,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
__ Push(lhs, rhs);
// Figure out which native to call and setup the arguments.
if (cc == eq) {
- __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2,
- 1);
+ __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
} else {
int ncr; // NaN compare result
if (cc == lt || cc == le) {
@@ -697,9 +696,8 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
- __ TailCallRuntime(
- is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3,
- 1);
+ __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
+ : Runtime::kCompare);
}
__ bind(&miss);
@@ -901,7 +899,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
if (exponent_type() == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
+ __ TailCallRuntime(Runtime::kMathPowRT);
// The stub is called from non-optimized code, which expects the result
// as heap number in exponent.
@@ -1346,16 +1344,6 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
__ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
__ b(ne, &slow_case);
- // Ensure that {function} is not bound.
- Register const shared_info = scratch;
- __ ldr(shared_info,
- FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
- __ ldr(scratch, FieldMemOperand(shared_info,
- SharedFunctionInfo::kCompilerHintsOffset));
- __ tst(scratch,
- Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
- __ b(ne, &slow_case);
-
// Get the "prototype" (or initial map) of the {function}.
__ ldr(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@@ -1380,27 +1368,47 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
- Register const object_prototype = object_map;
+ Register const object_instance_type = function_map;
+ Register const map_bit_field = function_map;
Register const null = scratch;
- Label done, loop;
- __ LoadRoot(r0, Heap::kTrueValueRootIndex);
+ Register const result = r0;
+
+ Label done, loop, fast_runtime_fallback;
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ bind(&loop);
- __ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
- __ cmp(object_prototype, function_prototype);
+
+ // Check if the object needs to be access checked.
+ __ ldrb(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
+ __ tst(map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
+ __ b(ne, &fast_runtime_fallback);
+ // Check if the current object is a Proxy.
+ __ CompareInstanceType(object_map, object_instance_type, JS_PROXY_TYPE);
+ __ b(eq, &fast_runtime_fallback);
+
+ __ ldr(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object, function_prototype);
__ b(eq, &done);
- __ cmp(object_prototype, null);
- __ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
+ __ cmp(object, null);
+ __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ b(ne, &loop);
- __ LoadRoot(r0, Heap::kFalseValueRootIndex);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
__ bind(&done);
- __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
+ __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
- // Slow-case: Call the runtime function.
+ // Found Proxy or access check needed: Call the runtime
+ __ bind(&fast_runtime_fallback);
+ __ Push(object, function_prototype);
+ // Invalidate the instanceof cache.
+ __ Move(scratch, Smi::FromInt(0));
+ __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ TailCallRuntime(Runtime::kHasInPrototypeChain);
+
+ // Slow-case: Call the %InstanceOf runtime function.
__ bind(&slow_case);
__ Push(object, function);
- __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
+ __ TailCallRuntime(Runtime::kInstanceOf);
}
@@ -1502,7 +1510,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// by calling the runtime system.
__ bind(&slow);
__ push(r1);
- __ TailCallRuntime(Runtime::kArguments, 1, 1);
+ __ TailCallRuntime(Runtime::kArguments);
}
@@ -1529,7 +1537,7 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
__ bind(&runtime);
__ Push(r1, r3, r2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1590,7 +1598,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
__ add(r9, r9, Operand(Heap::kSloppyArgumentsObjectSize));
// Do the allocation of all three objects in one go.
- __ Allocate(r9, r0, r4, r9, &runtime, TAG_OBJECT);
+ __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
// r0 = address of new object(s) (tagged)
// r2 = argument count (smi-tagged)
@@ -1600,8 +1608,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
const int kAliasedOffset =
Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
- __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
+ __ ldr(r4, NativeContextMemOperand());
__ cmp(r6, Operand::Zero());
__ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
@@ -1730,7 +1737,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// r5 = argument count (tagged)
__ bind(&runtime);
__ Push(r1, r3, r5);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1749,7 +1756,7 @@ void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
__ Push(receiver, key); // Receiver, key.
// Perform tail call to the entry.
- __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
+ __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
__ bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
@@ -1793,10 +1800,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
// Get the arguments boilerplate from the current native context.
- __ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
- __ ldr(r4, FieldMemOperand(r4, JSGlobalObject::kNativeContextOffset));
- __ ldr(r4, MemOperand(
- r4, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX)));
+ __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, r4);
__ str(r4, FieldMemOperand(r0, JSObject::kMapOffset));
__ LoadRoot(r5, Heap::kEmptyFixedArrayRootIndex);
@@ -1845,7 +1849,29 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ Push(r1, r3, r2);
- __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
+}
+
+
+void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
+ // r2 : number of parameters (tagged)
+ // r3 : parameters pointer
+ // r4 : rest parameter index (tagged)
+
+ Label runtime;
+ __ ldr(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(r0, MemOperand(r5, StandardFrameConstants::kContextOffset));
+ __ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ b(ne, &runtime);
+
+ // Patch the arguments.length and the parameters pointer.
+ __ ldr(r2, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ add(r3, r5, Operand::PointerOffsetFromSmiKey(r2));
+ __ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ __ bind(&runtime);
+ __ Push(r2, r3, r4);
+ __ TailCallRuntime(Runtime::kNewRestParam);
}
@@ -1854,7 +1880,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// time or if regexp entry in generated code is turned off runtime switch or
// at compilation.
#ifdef V8_INTERPRETED_REGEXP
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
#else // V8_INTERPRETED_REGEXP
// Stack frame on entry.
@@ -2125,7 +2151,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ b(eq, &runtime);
// For exception, throw the exception again.
- __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExecReThrow);
__ bind(&failure);
// For failure and exception return null.
@@ -2220,7 +2246,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Do the runtime call to execute the regexp.
__ bind(&runtime);
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
// Deferred code for string handling.
// (6) Not a long external string? If yes, go to (8).
@@ -2263,33 +2289,25 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
}
-static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
- bool is_super) {
+static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
// r0 : number of arguments to the construct function
// r1 : the function to call
// r2 : feedback vector
// r3 : slot in feedback vector (Smi)
- // r4 : original constructor (for IsSuperConstructorCall)
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(r0);
__ Push(r3, r2, r1, r0);
- if (is_super) {
- __ Push(r4);
- }
__ CallStub(stub);
- if (is_super) {
- __ Pop(r4);
- }
__ Pop(r3, r2, r1, r0);
__ SmiUntag(r0);
}
-static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a feedback vector slot. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
// megamorphic.
@@ -2297,7 +2315,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// r1 : the function to call
// r2 : feedback vector
// r3 : slot in feedback vector (Smi)
- // r4 : original constructor (for IsSuperConstructorCall)
Label initialize, done, miss, megamorphic, not_array_function;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
@@ -2338,7 +2355,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ b(ne, &miss);
// Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &megamorphic);
__ jmp(&done);
@@ -2361,7 +2378,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ bind(&initialize);
// Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &not_array_function);
@@ -2369,12 +2386,12 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// Create an AllocationSite if we don't already have it, store it in the
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ CallStubInRecordCallTarget(masm, &create_stub);
__ b(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
- CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
+ CallStubInRecordCallTarget(masm, &weak_cell_stub);
__ bind(&done);
}
@@ -2384,7 +2401,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
// r1 : the function to call
// r2 : feedback vector
// r3 : slot in feedback vector (Smi, for RecordCallTarget)
- // r4 : original constructor (for IsSuperConstructorCall)
Label non_function;
// Check that the function is not a smi.
@@ -2393,28 +2409,22 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
__ b(ne, &non_function);
- if (RecordCallTarget()) {
- GenerateRecordCallTarget(masm, IsSuperConstructorCall());
+ GenerateRecordCallTarget(masm);
- __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into r2, or undefined.
- __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
- __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
- __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
- __ b(eq, &feedback_register_initialized);
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
+ __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into r2, or undefined.
+ __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize));
+ __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset));
+ __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
+ __ b(eq, &feedback_register_initialized);
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
- __ AssertUndefinedOrAllocationSite(r2, r5);
- }
+ __ AssertUndefinedOrAllocationSite(r2, r5);
- // Pass function as original constructor.
- if (IsSuperConstructorCall()) {
- __ mov(r3, r4);
- } else {
- __ mov(r3, r1);
- }
+ // Pass function as new target.
+ __ mov(r3, r1);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
@@ -2433,7 +2443,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r3 - slot id
// r2 - vector
// r4 - allocation site (loaded from vector[slot])
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, miss);
@@ -2457,11 +2467,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// r1 - function
// r3 - slot id (Smi)
// r2 - vector
- const int with_types_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
- const int generic_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
- Label extra_checks_or_miss, call;
+ Label extra_checks_or_miss, call, call_function;
int argc = arg_count();
ParameterCount actual(argc);
@@ -2498,9 +2504,10 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ str(r3, FieldMemOperand(r2, 0));
- __ bind(&call);
+ __ bind(&call_function);
__ mov(r0, Operand(argc));
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
+ RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
@@ -2535,14 +2542,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex);
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
- // We have to update statistics for runtime profiling.
- __ ldr(r4, FieldMemOperand(r2, with_types_offset));
- __ sub(r4, r4, Operand(Smi::FromInt(1)));
- __ str(r4, FieldMemOperand(r2, with_types_offset));
- __ ldr(r4, FieldMemOperand(r2, generic_offset));
- __ add(r4, r4, Operand(Smi::FromInt(1)));
- __ str(r4, FieldMemOperand(r2, generic_offset));
- __ jmp(&call);
+
+ __ bind(&call);
+ __ mov(r0, Operand(argc));
+ __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
+ RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
@@ -2555,14 +2559,16 @@ void CallICStub::Generate(MacroAssembler* masm) {
// Make sure the function is not the Array() function, which requires special
// behavior on MISS.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r4);
__ cmp(r1, r4);
__ b(eq, &miss);
- // Update stats.
- __ ldr(r4, FieldMemOperand(r2, with_types_offset));
- __ add(r4, r4, Operand(Smi::FromInt(1)));
- __ str(r4, FieldMemOperand(r2, with_types_offset));
+ // Make sure the function belongs to the same native context.
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kContextOffset));
+ __ ldr(r4, ContextMemOperand(r4, Context::NATIVE_CONTEXT_INDEX));
+ __ ldr(ip, NativeContextMemOperand());
+ __ cmp(r4, ip);
+ __ b(ne, &miss);
// Initialize the call counter.
__ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
@@ -2581,7 +2587,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Pop(r1);
}
- __ jmp(&call);
+ __ jmp(&call_function);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
@@ -2599,7 +2605,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(r1, r2, r3);
// Call the entry.
- __ CallRuntime(Runtime::kCallIC_Miss, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss);
// Move result to edi and exit the internal frame.
__ mov(r1, r0);
@@ -2664,11 +2670,11 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ Push(object_, index_);
}
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
+ __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
} else {
DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
// NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi, 1);
+ __ CallRuntime(Runtime::kNumberToSmi);
}
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
@@ -2695,7 +2701,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.BeforeCall(masm);
__ SmiTag(index_);
__ Push(object_, index_);
- __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
+ __ CallRuntime(Runtime::kStringCharCodeAtRT);
__ Move(result_, r0);
call_helper.AfterCall(masm);
__ jmp(&exit_);
@@ -2734,7 +2740,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
__ push(code_);
- __ CallRuntime(Runtime::kStringCharFromCode, 1);
+ __ CallRuntime(Runtime::kStringCharFromCode);
__ Move(result_, r0);
call_helper.AfterCall(masm);
__ jmp(&exit_);
@@ -2990,7 +2996,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Just jump to runtime to create the sub string.
__ bind(&runtime);
- __ TailCallRuntime(Runtime::kSubString, 3, 1);
+ __ TailCallRuntime(Runtime::kSubString);
__ bind(&single_char);
// r0: original string
@@ -3030,7 +3036,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ Ret();
__ bind(&slow_string);
__ push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kStringToNumber);
__ bind(&not_string);
Label not_oddball;
@@ -3041,7 +3047,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ bind(&not_oddball);
__ push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kToNumber);
}
@@ -3056,7 +3062,7 @@ void ToLengthStub::Generate(MacroAssembler* masm) {
__ bind(&not_smi);
__ push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kToLength, 1, 1);
+ __ TailCallRuntime(Runtime::kToLength);
}
@@ -3086,7 +3092,7 @@ void ToStringStub::Generate(MacroAssembler* masm) {
__ bind(&not_oddball);
__ push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kToString, 1, 1);
+ __ TailCallRuntime(Runtime::kToString);
}
@@ -3216,7 +3222,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
// tagged as a small integer.
__ bind(&runtime);
__ Push(r1, r0);
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
@@ -3258,7 +3264,7 @@ void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
__ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
__ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
if (op() != Token::EQ_STRICT && is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (!Token::IsEqualityOp(op())) {
__ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset));
@@ -3519,9 +3525,9 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
__ bind(&runtime);
__ Push(left, right);
if (equality) {
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ __ TailCallRuntime(Runtime::kStringEquals);
} else {
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
__ bind(&miss);
@@ -3529,16 +3535,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
}
-void CompareICStub::GenerateObjects(MacroAssembler* masm) {
- DCHECK(state() == CompareICState::OBJECT);
+void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
+ DCHECK_EQ(CompareICState::RECEIVER, state());
Label miss;
__ and_(r2, r1, Operand(r0));
__ JumpIfSmi(r2, &miss);
- __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE);
- __ b(ne, &miss);
- __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE);
- __ b(ne, &miss);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ __ CompareObjectType(r0, r2, r2, FIRST_JS_RECEIVER_TYPE);
+ __ b(lt, &miss);
+ __ CompareObjectType(r1, r2, r2, FIRST_JS_RECEIVER_TYPE);
+ __ b(lt, &miss);
DCHECK(GetCondition() == eq);
__ sub(r0, r0, Operand(r1));
@@ -3549,7 +3556,7 @@ void CompareICStub::GenerateObjects(MacroAssembler* masm) {
}
-void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
+void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
Label miss;
Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
__ and_(r2, r1, Operand(r0));
@@ -3566,7 +3573,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ sub(r0, r0, Operand(r1));
__ Ret();
} else if (is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (op() == Token::LT || op() == Token::LTE) {
__ mov(r2, Operand(Smi::FromInt(GREATER)));
@@ -3574,7 +3581,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ mov(r2, Operand(Smi::FromInt(LESS)));
}
__ Push(r1, r0, r2);
- __ TailCallRuntime(Runtime::kCompare, 3, 1);
+ __ TailCallRuntime(Runtime::kCompare);
}
__ bind(&miss);
@@ -3590,7 +3597,7 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(lr, r1, r0);
__ mov(ip, Operand(Smi::FromInt(op())));
__ push(ip);
- __ CallRuntime(Runtime::kCompareIC_Miss, 3);
+ __ CallRuntime(Runtime::kCompareIC_Miss);
// Compute the entry point of the rewritten stub.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore registers.
@@ -4035,11 +4042,11 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
// We need extra registers for this, so we push the object and the address
// register temporarily.
__ Push(regs_.object(), regs_.address());
- __ EnsureNotWhite(regs_.scratch0(), // The value.
- regs_.scratch1(), // Scratch.
- regs_.object(), // Scratch.
- regs_.address(), // Scratch.
- &need_incremental_pop_scratch);
+ __ JumpIfWhite(regs_.scratch0(), // The value.
+ regs_.scratch1(), // Scratch.
+ regs_.object(), // Scratch.
+ regs_.address(), // Scratch.
+ &need_incremental_pop_scratch);
__ Pop(regs_.object(), regs_.address());
regs_.Restore(masm);
@@ -4059,68 +4066,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
}
-void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- r0 : element value to store
- // -- r3 : element index as smi
- // -- sp[0] : array literal index in function as smi
- // -- sp[4] : array literal
- // clobbers r1, r2, r4
- // -----------------------------------
-
- Label element_done;
- Label double_elements;
- Label smi_element;
- Label slow_elements;
- Label fast_elements;
-
- // Get array literal index, array literal and its map.
- __ ldr(r4, MemOperand(sp, 0 * kPointerSize));
- __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
- __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset));
-
- __ CheckFastElements(r2, r5, &double_elements);
- // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
- __ JumpIfSmi(r0, &smi_element);
- __ CheckFastSmiElements(r2, r5, &fast_elements);
-
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- __ bind(&slow_elements);
- // call.
- __ Push(r1, r3, r0);
- __ ldr(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ ldr(r5, FieldMemOperand(r5, JSFunction::kLiteralsOffset));
- __ Push(r5, r4);
- __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
-
- // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ add(r6, r5, Operand::PointerOffsetFromSmiKey(r3));
- __ add(r6, r6, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ str(r0, MemOperand(r6, 0));
- // Update the write barrier for the array store.
- __ RecordWrite(r5, r6, r0, kLRHasNotBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Ret();
-
- // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
- // and value is Smi.
- __ bind(&smi_element);
- __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ add(r6, r5, Operand::PointerOffsetFromSmiKey(r3));
- __ str(r0, FieldMemOperand(r6, FixedArray::kHeaderSize));
- __ Ret();
-
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
- __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(r0, r3, r5, r6, d0, &slow_elements);
- __ Ret();
-}
-
-
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
CEntryStub ces(isolate(), 1, kSaveFPRegs);
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
@@ -4835,7 +4780,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// -- r0 : argc (only if argument_count() == ANY)
// -- r1 : constructor
// -- r2 : AllocationSite or undefined
- // -- r3 : original constructor
+ // -- r3 : new target
// -- sp[0] : return address
// -- sp[4] : last argument
// -----------------------------------
@@ -4856,6 +4801,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ AssertUndefinedOrAllocationSite(r2, r4);
}
+ // Enter the context of the Array function.
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+
Label subclassing;
__ cmp(r3, r1);
__ b(ne, &subclassing);
@@ -4875,25 +4823,23 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
__ bind(&subclassing);
- __ push(r1);
- __ push(r3);
-
- // Adjust argc.
switch (argument_count()) {
case ANY:
case MORE_THAN_ONE:
- __ add(r0, r0, Operand(2));
+ __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
+ __ add(r0, r0, Operand(3));
break;
case NONE:
- __ mov(r0, Operand(2));
+ __ str(r1, MemOperand(sp, 0 * kPointerSize));
+ __ mov(r0, Operand(3));
break;
case ONE:
- __ mov(r0, Operand(3));
+ __ str(r1, MemOperand(sp, 1 * kPointerSize));
+ __ mov(r0, Operand(4));
break;
}
-
- __ JumpToExternalReference(
- ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate()));
+ __ Push(r3, r2);
+ __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
}
@@ -4979,13 +4925,13 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
// Go up the context chain to the script context.
for (int i = 0; i < depth(); ++i) {
- __ ldr(result, ContextOperand(context, Context::PREVIOUS_INDEX));
+ __ ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
context = result;
}
// Load the PropertyCell value at the specified slot.
__ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
- __ ldr(result, ContextOperand(result));
+ __ ldr(result, ContextMemOperand(result));
__ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
// If the result is not the_hole, return. Otherwise, handle in the runtime.
@@ -4995,7 +4941,7 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
// Fallback to runtime.
__ SmiTag(slot);
__ push(slot);
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
}
@@ -5021,13 +4967,13 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
// Go up the context chain to the script context.
for (int i = 0; i < depth(); i++) {
- __ ldr(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
+ __ ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
context = context_temp;
}
// Load the PropertyCell at the specified slot.
__ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
- __ ldr(cell, ContextOperand(cell));
+ __ ldr(cell, ContextMemOperand(cell));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
@@ -5119,8 +5065,7 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
__ Push(slot, value);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
- : Runtime::kStoreGlobalViaContext_Sloppy,
- 2, 1);
+ : Runtime::kStoreGlobalViaContext_Sloppy);
}
@@ -5247,7 +5192,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm,
// Re-throw by promoting a scheduled exception.
__ bind(&promote_scheduled_exception);
- __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+ __ TailCallRuntime(Runtime::kPromoteScheduledException);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);