summaryrefslogtreecommitdiff
path: root/deps/v8/src/arm64/code-stubs-arm64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm64/code-stubs-arm64.cc')
-rw-r--r--deps/v8/src/arm64/code-stubs-arm64.cc435
1 files changed, 197 insertions, 238 deletions
diff --git a/deps/v8/src/arm64/code-stubs-arm64.cc b/deps/v8/src/arm64/code-stubs-arm64.cc
index 751d8aebde..a1e920755d 100644
--- a/deps/v8/src/arm64/code-stubs-arm64.cc
+++ b/deps/v8/src/arm64/code-stubs-arm64.cc
@@ -223,7 +223,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Register left,
Register right_type = scratch;
if ((cond == lt) || (cond == gt)) {
// Call runtime on identical JSObjects. Otherwise return equal.
- __ JumpIfObjectType(right, right_type, right_type, FIRST_SPEC_OBJECT_TYPE,
+ __ JumpIfObjectType(right, right_type, right_type, FIRST_JS_RECEIVER_TYPE,
slow, ge);
// Call runtime on identical symbols since we need to throw a TypeError.
__ Cmp(right_type, SYMBOL_TYPE);
@@ -245,7 +245,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Register left,
__ JumpIfObjectType(right, right_type, right_type, HEAP_NUMBER_TYPE,
&heap_number);
// Comparing JS objects with <=, >= is complicated.
- __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE);
+ __ Cmp(right_type, FIRST_JS_RECEIVER_TYPE);
__ B(ge, slow);
// Call runtime on identical symbols since we need to throw a TypeError.
__ Cmp(right_type, SYMBOL_TYPE);
@@ -336,10 +336,10 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
// If either operand is a JS object or an oddball value, then they are not
// equal since their pointers are different.
// There is no test for undetectability in strict equality.
- STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
Label right_non_object;
- __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE);
+ __ Cmp(right_type, FIRST_JS_RECEIVER_TYPE);
__ B(lt, &right_non_object);
// Return non-zero - x0 already contains a non-zero pointer.
@@ -356,9 +356,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
// If right is not ODDBALL, test left. Otherwise, set eq condition.
__ Ccmp(left_type, ODDBALL_TYPE, ZFlag, ne);
- // If right or left is not ODDBALL, test left >= FIRST_SPEC_OBJECT_TYPE.
+ // If right or left is not ODDBALL, test left >= FIRST_JS_RECEIVER_TYPE.
// Otherwise, right or left is ODDBALL, so set a ge condition.
- __ Ccmp(left_type, FIRST_SPEC_OBJECT_TYPE, NVFlag, ne);
+ __ Ccmp(left_type, FIRST_JS_RECEIVER_TYPE, NVFlag, ne);
__ B(ge, &return_not_equal);
@@ -471,11 +471,11 @@ static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
__ Bind(&object_test);
- __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE);
+ __ Cmp(right_type, FIRST_JS_RECEIVER_TYPE);
- // If right >= FIRST_SPEC_OBJECT_TYPE, test left.
- // Otherwise, right < FIRST_SPEC_OBJECT_TYPE, so set lt condition.
- __ Ccmp(left_type, FIRST_SPEC_OBJECT_TYPE, NFlag, ge);
+ // If right >= FIRST_JS_RECEIVER_TYPE, test left.
+ // Otherwise, right < FIRST_JS_RECEIVER_TYPE, so set lt condition.
+ __ Ccmp(left_type, FIRST_JS_RECEIVER_TYPE, NFlag, ge);
__ B(lt, not_both_strings);
@@ -653,8 +653,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
__ Push(lhs, rhs);
// Figure out which native to call and setup the arguments.
if (cond == eq) {
- __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2,
- 1);
+ __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
} else {
int ncr; // NaN compare result
if ((cond == lt) || (cond == le)) {
@@ -668,9 +667,8 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
- __ TailCallRuntime(
- is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3,
- 1);
+ __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
+ : Runtime::kCompare);
}
__ Bind(&miss);
@@ -966,7 +964,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ Bind(&call_runtime);
// Put the arguments back on the stack.
__ Push(base_tagged, exponent_tagged);
- __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
+ __ TailCallRuntime(Runtime::kMathPowRT);
// Return.
__ Bind(&done);
@@ -1550,17 +1548,6 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
__ Ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
__ Tbnz(scratch, Map::kHasNonInstancePrototype, &slow_case);
- // Ensure that {function} is not bound.
- Register const shared_info = scratch;
- Register const scratch_w = scratch.W();
- __ Ldr(shared_info,
- FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
- // On 64-bit platforms, compiler hints field is not a smi. See definition of
- // kCompilerHintsOffset in src/objects.h.
- __ Ldr(scratch_w, FieldMemOperand(shared_info,
- SharedFunctionInfo::kCompilerHintsOffset));
- __ Tbnz(scratch_w, SharedFunctionInfo::kBoundFunction, &slow_case);
-
// Get the "prototype" (or initial map) of the {function}.
__ Ldr(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@@ -1585,27 +1572,47 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
- Register const object_prototype = object_map;
+ Register const object_instance_type = function_map;
+ Register const map_bit_field = function_map;
Register const null = scratch;
- Label done, loop;
- __ LoadRoot(x0, Heap::kTrueValueRootIndex);
+ Register const result = x0;
+
+ Label done, loop, fast_runtime_fallback;
+ __ LoadRoot(result, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ Bind(&loop);
- __ Ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
- __ Cmp(object_prototype, function_prototype);
+
+ // Check if the object needs to be access checked.
+ __ Ldrb(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
+ __ TestAndBranchIfAnySet(map_bit_field, 1 << Map::kIsAccessCheckNeeded,
+ &fast_runtime_fallback);
+ // Check if the current object is a Proxy.
+ __ CompareInstanceType(object_map, object_instance_type, JS_PROXY_TYPE);
+ __ B(eq, &fast_runtime_fallback);
+
+ __ Ldr(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
+ __ Cmp(object, function_prototype);
__ B(eq, &done);
- __ Cmp(object_prototype, null);
- __ Ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
+ __ Cmp(object, null);
+ __ Ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ B(ne, &loop);
- __ LoadRoot(x0, Heap::kFalseValueRootIndex);
+ __ LoadRoot(result, Heap::kFalseValueRootIndex);
__ Bind(&done);
- __ StoreRoot(x0, Heap::kInstanceofCacheAnswerRootIndex);
+ __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
- // Slow-case: Call the runtime function.
+ // Found Proxy or access check needed: Call the runtime
+ __ Bind(&fast_runtime_fallback);
+ __ Push(object, function_prototype);
+ // Invalidate the instanceof cache.
+ __ Move(scratch, Smi::FromInt(0));
+ __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ TailCallRuntime(Runtime::kHasInPrototypeChain);
+
+ // Slow-case: Call the %InstanceOf runtime function.
__ bind(&slow_case);
__ Push(object, function);
- __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
+ __ TailCallRuntime(Runtime::kInstanceOf);
}
@@ -1656,7 +1663,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// the runtime system.
__ Bind(&slow);
__ Push(key);
- __ TailCallRuntime(Runtime::kArguments, 1, 1);
+ __ TailCallRuntime(Runtime::kArguments);
}
@@ -1687,7 +1694,7 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
__ Bind(&runtime);
__ Push(x1, x3, x2);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1801,13 +1808,10 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// x11 sloppy_args_map offset to args (or aliased args) map (uninit)
// x14 arg_count number of function arguments
- Register global_object = x10;
Register global_ctx = x10;
Register sloppy_args_map = x11;
Register aliased_args_map = x10;
- __ Ldr(global_object, GlobalObjectMemOperand());
- __ Ldr(global_ctx,
- FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
+ __ Ldr(global_ctx, NativeContextMemOperand());
__ Ldr(sloppy_args_map,
ContextMemOperand(global_ctx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
@@ -1965,7 +1969,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ Bind(&runtime);
__ Push(function, recv_arg, arg_count_smi);
- __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewSloppyArguments);
}
@@ -1981,7 +1985,7 @@ void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
// Everything is fine, call runtime.
__ Push(receiver, key);
- __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
+ __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
__ Bind(&slow);
PropertyAccessCompiler::TailCallBuiltin(
@@ -2047,14 +2051,9 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
// Get the arguments boilerplate from the current (native) context.
- Register global_object = x10;
- Register global_ctx = x10;
Register strict_args_map = x4;
- __ Ldr(global_object, GlobalObjectMemOperand());
- __ Ldr(global_ctx,
- FieldMemOperand(global_object, JSGlobalObject::kNativeContextOffset));
- __ Ldr(strict_args_map,
- ContextMemOperand(global_ctx, Context::STRICT_ARGUMENTS_MAP_INDEX));
+ __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX,
+ strict_args_map);
// x0 alloc_obj pointer to allocated objects: parameter array and
// arguments object
@@ -2118,13 +2117,61 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ Bind(&runtime);
__ Push(function, params, param_count_smi);
- __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
+}
+
+
+void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
+ // x2 : number of parameters (tagged)
+ // x3 : parameters pointer
+ // x4 : rest parameter index (tagged)
+ //
+ // Returns pointer to result object in x0.
+
+ DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count()));
+ DCHECK(x3.is(RestParamAccessDescriptor::parameter_pointer()));
+ DCHECK(x4.is(RestParamAccessDescriptor::rest_parameter_index()));
+
+ // Get the stub arguments from the frame, and make an untagged copy of the
+ // parameter count.
+ Register rest_index_smi = x4;
+ Register param_count_smi = x2;
+ Register params = x3;
+ Register param_count = x13;
+ __ SmiUntag(param_count, param_count_smi);
+
+ // Test if arguments adaptor needed.
+ Register caller_fp = x11;
+ Register caller_ctx = x12;
+ Label runtime;
+ __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ Ldr(caller_ctx,
+ MemOperand(caller_fp, StandardFrameConstants::kContextOffset));
+ __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ __ B(ne, &runtime);
+
+ // x4 rest_index_smi index of rest parameter
+ // x2 param_count_smi number of parameters passed to function (smi)
+ // x3 params pointer to parameters
+ // x11 caller_fp caller's frame pointer
+ // x13 param_count number of parameters passed to function
+
+ // Patch the argument length and parameters pointer.
+ __ Ldr(param_count_smi,
+ MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ SmiUntag(param_count, param_count_smi);
+ __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2));
+ __ Add(params, x10, StandardFrameConstants::kCallerSPOffset);
+
+ __ Bind(&runtime);
+ __ Push(param_count_smi, params, rest_index_smi);
+ __ TailCallRuntime(Runtime::kNewRestParam);
}
void RegExpExecStub::Generate(MacroAssembler* masm) {
#ifdef V8_INTERPRETED_REGEXP
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
#else // V8_INTERPRETED_REGEXP
// Stack frame on entry.
@@ -2565,7 +2612,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ B(eq, &runtime);
// For exception, throw the exception again.
- __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExecReThrow);
__ Bind(&failure);
__ Mov(x0, Operand(isolate()->factory()->null_value()));
@@ -2574,7 +2621,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ Ret();
__ Bind(&runtime);
- __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
+ __ TailCallRuntime(Runtime::kRegExpExec);
// Deferred code for string handling.
// (6) Not a long external string? If yes, go to (8).
@@ -2622,25 +2669,17 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
Register argc, Register function,
Register feedback_vector, Register index,
- Register orig_construct, bool is_super) {
+ Register new_target) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(argc);
- if (is_super) {
- __ Push(argc, function, feedback_vector, index, orig_construct);
- } else {
- __ Push(argc, function, feedback_vector, index);
- }
+ __ Push(argc, function, feedback_vector, index);
DCHECK(feedback_vector.Is(x2) && index.Is(x3));
__ CallStub(stub);
- if (is_super) {
- __ Pop(orig_construct, index, feedback_vector, function, argc);
- } else {
- __ Pop(index, feedback_vector, function, argc);
- }
+ __ Pop(index, feedback_vector, function, argc);
__ SmiUntag(argc);
}
@@ -2648,19 +2687,17 @@ static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
Register function,
Register feedback_vector, Register index,
- Register orig_construct, Register scratch1,
- Register scratch2, Register scratch3,
- bool is_super) {
+ Register new_target, Register scratch1,
+ Register scratch2, Register scratch3) {
ASM_LOCATION("GenerateRecordCallTarget");
DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function,
- feedback_vector, index, orig_construct));
+ feedback_vector, index, new_target));
// Cache the called function in a feedback vector slot. Cache states are
// uninitialized, monomorphic (indicated by a JSFunction), and megamorphic.
// argc : number of arguments to the construct function
// function : the function to call
// feedback_vector : the feedback vector
// index : slot in feedback vector (smi)
- // orig_construct : original constructor (for IsSuperConstructorCall)
Label initialize, done, miss, megamorphic, not_array_function;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
@@ -2703,7 +2740,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
__ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
// Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &megamorphic);
__ B(&done);
@@ -2727,7 +2764,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
__ Bind(&initialize);
// Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &not_array_function);
@@ -2736,13 +2773,13 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
- feedback_vector, index, orig_construct, is_super);
+ feedback_vector, index, new_target);
__ B(&done);
__ Bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
- feedback_vector, index, orig_construct, is_super);
+ feedback_vector, index, new_target);
__ Bind(&done);
}
@@ -2753,7 +2790,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
// x1 : the function to call
// x2 : feedback vector
// x3 : slot in feedback vector (Smi, for RecordCallTarget)
- // x4 : original constructor (for IsSuperConstructorCall)
Register function = x1;
Label non_function;
@@ -2764,28 +2800,21 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE,
&non_function);
- if (RecordCallTarget()) {
- GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12,
- IsSuperConstructorCall());
-
- __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into x2, or undefined.
- __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
- __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
- __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
- &feedback_register_initialized);
- __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
- __ bind(&feedback_register_initialized);
-
- __ AssertUndefinedOrAllocationSite(x2, x5);
- }
+ GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12);
- if (IsSuperConstructorCall()) {
- __ Mov(x3, x4);
- } else {
- __ Mov(x3, function);
- }
+ __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into x2, or undefined.
+ __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize));
+ __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset));
+ __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
+ &feedback_register_initialized);
+ __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+ __ bind(&feedback_register_initialized);
+
+ __ AssertUndefinedOrAllocationSite(x2, x5);
+
+ __ Mov(x3, function);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
@@ -2811,7 +2840,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
Register allocation_site = x4;
Register scratch = x5;
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch);
__ Cmp(function, scratch);
__ B(ne, miss);
@@ -2828,9 +2857,9 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// Set up arguments for the array constructor stub.
Register allocation_site_arg = feedback_vector;
- Register original_constructor_arg = index;
+ Register new_target_arg = index;
__ Mov(allocation_site_arg, allocation_site);
- __ Mov(original_constructor_arg, function);
+ __ Mov(new_target_arg, function);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
}
@@ -2842,11 +2871,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// x1 - function
// x3 - slot id (Smi)
// x2 - vector
- const int with_types_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
- const int generic_offset =
- FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
- Label extra_checks_or_miss, call;
+ Label extra_checks_or_miss, call, call_function;
int argc = arg_count();
ParameterCount actual(argc);
@@ -2890,9 +2915,10 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ Str(index, FieldMemOperand(feedback_vector, 0));
- __ bind(&call);
+ __ Bind(&call_function);
__ Mov(x0, argc);
- __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
+ RelocInfo::CODE_TARGET);
__ bind(&extra_checks_or_miss);
Label uninitialized, miss, not_allocation_site;
@@ -2922,14 +2948,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ LoadRoot(x5, Heap::kmegamorphic_symbolRootIndex);
__ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
- // We have to update statistics for runtime profiling.
- __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset));
- __ Subs(x4, x4, Operand(Smi::FromInt(1)));
- __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset));
- __ Ldr(x4, FieldMemOperand(feedback_vector, generic_offset));
- __ Adds(x4, x4, Operand(Smi::FromInt(1)));
- __ Str(x4, FieldMemOperand(feedback_vector, generic_offset));
- __ B(&call);
+
+ __ Bind(&call);
+ __ Mov(x0, argc);
+ __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
+ RelocInfo::CODE_TARGET);
__ bind(&uninitialized);
@@ -2941,14 +2964,16 @@ void CallICStub::Generate(MacroAssembler* masm) {
// Make sure the function is not the Array() function, which requires special
// behavior on MISS.
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, x5);
+ __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, x5);
__ Cmp(function, x5);
__ B(eq, &miss);
- // Update stats.
- __ Ldr(x4, FieldMemOperand(feedback_vector, with_types_offset));
- __ Adds(x4, x4, Operand(Smi::FromInt(1)));
- __ Str(x4, FieldMemOperand(feedback_vector, with_types_offset));
+ // Make sure the function belongs to the same native context.
+ __ Ldr(x4, FieldMemOperand(function, JSFunction::kContextOffset));
+ __ Ldr(x4, ContextMemOperand(x4, Context::NATIVE_CONTEXT_INDEX));
+ __ Ldr(x5, NativeContextMemOperand());
+ __ Cmp(x4, x5);
+ __ B(ne, &miss);
// Initialize the call counter.
__ Mov(x5, Smi::FromInt(CallICNexus::kCallCountIncrement));
@@ -2968,7 +2993,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Pop(function);
}
- __ B(&call);
+ __ B(&call_function);
// We are here because tracing is on or we encountered a MISS case we can't
// handle here.
@@ -2988,7 +3013,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(x1, x2, x3);
// Call the entry.
- __ CallRuntime(Runtime::kCallIC_Miss, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss);
// Move result to edi and exit the internal frame.
__ Mov(x1, x0);
@@ -3046,11 +3071,11 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ Push(object_, index_);
}
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
+ __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
} else {
DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
// NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi, 1);
+ __ CallRuntime(Runtime::kNumberToSmi);
}
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
@@ -3078,7 +3103,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.BeforeCall(masm);
__ SmiTag(index_);
__ Push(object_, index_);
- __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
+ __ CallRuntime(Runtime::kStringCharCodeAtRT);
__ Mov(result_, x0);
call_helper.AfterCall(masm);
__ B(&exit_);
@@ -3109,7 +3134,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ Bind(&slow_case_);
call_helper.BeforeCall(masm);
__ Push(code_);
- __ CallRuntime(Runtime::kStringCharFromCode, 1);
+ __ CallRuntime(Runtime::kStringCharFromCode);
__ Mov(result_, x0);
call_helper.AfterCall(masm);
__ B(&exit_);
@@ -3127,7 +3152,7 @@ void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
__ CheckMap(x1, x2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
__ CheckMap(x0, x3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
if (op() != Token::EQ_STRICT && is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
if (!Token::IsEqualityOp(op())) {
__ Ldr(x1, FieldMemOperand(x1, Oddball::kToNumberOffset));
@@ -3381,9 +3406,9 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
__ Bind(&runtime);
__ Push(lhs, rhs);
if (equality) {
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ __ TailCallRuntime(Runtime::kStringEquals);
} else {
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
__ Bind(&miss);
@@ -3391,9 +3416,9 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
}
-void CompareICStub::GenerateObjects(MacroAssembler* masm) {
- DCHECK(state() == CompareICState::OBJECT);
- ASM_LOCATION("CompareICStub[Objects]");
+void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
+ DCHECK_EQ(CompareICState::RECEIVER, state());
+ ASM_LOCATION("CompareICStub[Receivers]");
Label miss;
@@ -3403,10 +3428,11 @@ void CompareICStub::GenerateObjects(MacroAssembler* masm) {
__ JumpIfEitherSmi(rhs, lhs, &miss);
- __ JumpIfNotObjectType(rhs, x10, x10, JS_OBJECT_TYPE, &miss);
- __ JumpIfNotObjectType(lhs, x10, x10, JS_OBJECT_TYPE, &miss);
+ STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
+ __ JumpIfObjectType(rhs, x10, x10, FIRST_JS_RECEIVER_TYPE, &miss, lt);
+ __ JumpIfObjectType(lhs, x10, x10, FIRST_JS_RECEIVER_TYPE, &miss, lt);
- DCHECK(GetCondition() == eq);
+ DCHECK_EQ(eq, GetCondition());
__ Sub(result, rhs, lhs);
__ Ret();
@@ -3415,8 +3441,8 @@ void CompareICStub::GenerateObjects(MacroAssembler* masm) {
}
-void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
- ASM_LOCATION("CompareICStub[KnownObjects]");
+void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
+ ASM_LOCATION("CompareICStub[KnownReceivers]");
Label miss;
Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
@@ -3442,7 +3468,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ Sub(result, rhs, lhs);
__ Ret();
} else if (is_strong(strength())) {
- __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
} else {
Register ncr = x2;
if (op() == Token::LT || op() == Token::LTE) {
@@ -3451,7 +3477,7 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ Mov(ncr, Smi::FromInt(LESS));
}
__ Push(lhs, rhs, ncr);
- __ TailCallRuntime(Runtime::kCompare, 3, 1);
+ __ TailCallRuntime(Runtime::kCompare);
}
__ Bind(&miss);
@@ -3479,7 +3505,7 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(left, right, op);
// Call the miss handler. This also pops the arguments.
- __ CallRuntime(Runtime::kCompareIC_Miss, 3);
+ __ CallRuntime(Runtime::kCompareIC_Miss);
// Compute the entry point of the rewritten stub.
__ Add(stub_entry, x0, Code::kHeaderSize - kHeapObjectTag);
@@ -3725,7 +3751,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ Ret();
__ Bind(&runtime);
- __ TailCallRuntime(Runtime::kSubString, 3, 1);
+ __ TailCallRuntime(Runtime::kSubString);
__ bind(&single_char);
// x1: result_length
@@ -3771,7 +3797,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ Ret();
__ Bind(&slow_string);
__ Push(x0); // Push argument.
- __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kStringToNumber);
__ Bind(&not_string);
Label not_oddball;
@@ -3782,7 +3808,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ Bind(&not_oddball);
__ Push(x0); // Push argument.
- __ TailCallRuntime(Runtime::kToNumber, 1, 1);
+ __ TailCallRuntime(Runtime::kToNumber);
}
@@ -3797,7 +3823,7 @@ void ToLengthStub::Generate(MacroAssembler* masm) {
__ Bind(&not_smi);
__ Push(x0); // Push argument.
- __ TailCallRuntime(Runtime::kToLength, 1, 1);
+ __ TailCallRuntime(Runtime::kToLength);
}
@@ -3829,7 +3855,7 @@ void ToStringStub::Generate(MacroAssembler* masm) {
__ Bind(&not_oddball);
__ Push(x0); // Push argument.
- __ TailCallRuntime(Runtime::kToString, 1, 1);
+ __ TailCallRuntime(Runtime::kToString);
}
@@ -3973,7 +3999,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
// Returns -1 (less), 0 (equal), or 1 (greater) tagged as a small integer.
__ Bind(&runtime);
__ Push(x1, x0);
- __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ __ TailCallRuntime(Runtime::kStringCompare);
}
@@ -4115,12 +4141,12 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
// We need extra registers for this, so we push the object and the address
// register temporarily.
__ Push(regs_.address(), regs_.object());
- __ EnsureNotWhite(val,
- regs_.scratch1(), // Scratch.
- regs_.object(), // Scratch.
- regs_.address(), // Scratch.
- regs_.scratch2(), // Scratch.
- &need_incremental_pop_scratch);
+ __ JumpIfWhite(val,
+ regs_.scratch1(), // Scratch.
+ regs_.object(), // Scratch.
+ regs_.address(), // Scratch.
+ regs_.scratch2(), // Scratch.
+ &need_incremental_pop_scratch);
__ Pop(regs_.object(), regs_.address());
regs_.Restore(masm); // Restore the extra scratch registers we used.
@@ -4170,76 +4196,6 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
}
-void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
- // x0 value element value to store
- // x3 index_smi element index as smi
- // sp[0] array_index_smi array literal index in function as smi
- // sp[1] array array literal
-
- Register value = x0;
- Register index_smi = x3;
-
- Register array = x1;
- Register array_map = x2;
- Register array_index_smi = x4;
- __ PeekPair(array_index_smi, array, 0);
- __ Ldr(array_map, FieldMemOperand(array, JSObject::kMapOffset));
-
- Label double_elements, smi_element, fast_elements, slow_elements;
- Register bitfield2 = x10;
- __ Ldrb(bitfield2, FieldMemOperand(array_map, Map::kBitField2Offset));
-
- // Jump if array's ElementsKind is not FAST*_SMI_ELEMENTS, FAST_ELEMENTS or
- // FAST_HOLEY_ELEMENTS.
- STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
- STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
- STATIC_ASSERT(FAST_ELEMENTS == 2);
- STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
- __ Cmp(bitfield2, Map::kMaximumBitField2FastHoleyElementValue);
- __ B(hi, &double_elements);
-
- __ JumpIfSmi(value, &smi_element);
-
- // Jump if array's ElementsKind is not FAST_ELEMENTS or FAST_HOLEY_ELEMENTS.
- __ Tbnz(bitfield2, MaskToBit(FAST_ELEMENTS << Map::ElementsKindBits::kShift),
- &fast_elements);
-
- // Store into the array literal requires an elements transition. Call into
- // the runtime.
- __ Bind(&slow_elements);
- __ Push(array, index_smi, value);
- __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ Ldr(x11, FieldMemOperand(x10, JSFunction::kLiteralsOffset));
- __ Push(x11, array_index_smi);
- __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
-
- // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
- __ Bind(&fast_elements);
- __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
- __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2));
- __ Add(x11, x11, FixedArray::kHeaderSize - kHeapObjectTag);
- __ Str(value, MemOperand(x11));
- // Update the write barrier for the array store.
- __ RecordWrite(x10, x11, value, kLRHasNotBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Ret();
-
- // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
- // and value is Smi.
- __ Bind(&smi_element);
- __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
- __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2));
- __ Str(value, FieldMemOperand(x11, FixedArray::kHeaderSize));
- __ Ret();
-
- __ Bind(&double_elements);
- __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(value, index_smi, x10, x11, d0,
- &slow_elements);
- __ Ret();
-}
-
-
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
CEntryStub ces(isolate(), 1, kSaveFPRegs);
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
@@ -5230,12 +5186,12 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// -- x0 : argc (only if argument_count() is ANY or MORE_THAN_ONE)
// -- x1 : constructor
// -- x2 : AllocationSite or undefined
- // -- x3 : original constructor
+ // -- x3 : new target
// -- sp[0] : last argument
// -----------------------------------
Register constructor = x1;
Register allocation_site = x2;
- Register original_constructor = x3;
+ Register new_target = x3;
if (FLAG_debug_code) {
// The array construct code is only set for the global and natives
@@ -5257,8 +5213,11 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ AssertUndefinedOrAllocationSite(allocation_site, x10);
}
+ // Enter the context of the Array function.
+ __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
+
Label subclassing;
- __ Cmp(original_constructor, constructor);
+ __ Cmp(new_target, constructor);
__ B(ne, &subclassing);
Register kind = x3;
@@ -5277,22 +5236,23 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
// Subclassing support.
__ Bind(&subclassing);
- __ Push(constructor, original_constructor);
- // Adjust argc.
switch (argument_count()) {
case ANY:
case MORE_THAN_ONE:
- __ add(x0, x0, Operand(2));
+ __ Poke(constructor, Operand(x0, LSL, kPointerSizeLog2));
+ __ Add(x0, x0, Operand(3));
break;
case NONE:
- __ Mov(x0, Operand(2));
+ __ Poke(constructor, 0 * kPointerSize);
+ __ Mov(x0, Operand(3));
break;
case ONE:
- __ Mov(x0, Operand(3));
+ __ Poke(constructor, 1 * kPointerSize);
+ __ Mov(x0, Operand(4));
break;
}
- __ JumpToExternalReference(
- ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate()));
+ __ Push(new_target, allocation_site);
+ __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
}
@@ -5408,7 +5368,7 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
__ Bind(&slow_case);
__ SmiTag(slot);
__ Push(slot);
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
+ __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
}
@@ -5528,8 +5488,7 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
__ Push(slot, value);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
- : Runtime::kStoreGlobalViaContext_Sloppy,
- 2, 1);
+ : Runtime::kStoreGlobalViaContext_Sloppy);
}
@@ -5682,7 +5641,7 @@ static void CallApiFunctionAndReturn(
// Re-throw by promoting a scheduled exception.
__ Bind(&promote_scheduled_exception);
- __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
+ __ TailCallRuntime(Runtime::kPromoteScheduledException);
// HandleScope limit has changed. Delete allocated extensions.
__ Bind(&delete_allocated_handles);