summaryrefslogtreecommitdiff
path: root/deps/v8/src/arm/code-stubs-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm/code-stubs-arm.cc')
-rw-r--r--deps/v8/src/arm/code-stubs-arm.cc795
1 files changed, 234 insertions, 561 deletions
diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc
index 31e3e95f03..264f24f8da 100644
--- a/deps/v8/src/arm/code-stubs-arm.cc
+++ b/deps/v8/src/arm/code-stubs-arm.cc
@@ -22,60 +22,15 @@
namespace v8 {
namespace internal {
+#define __ ACCESS_MASM(masm)
-static void InitializeArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
-}
-
-
-static void InitializeInternalArrayConstructorDescriptor(
- Isolate* isolate, CodeStubDescriptor* descriptor,
- int constant_stack_parameter_count) {
- Address deopt_handler = Runtime::FunctionForId(
- Runtime::kInternalArrayConstructor)->entry;
-
- if (constant_stack_parameter_count == 0) {
- descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- } else {
- descriptor->Initialize(r0, deopt_handler, constant_stack_parameter_count,
- JS_FUNCTION_STUB_MODE);
- }
-}
-
-
-void ArrayNoArgumentConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
-}
-
-
-void ArraySingleArgumentConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
-}
-
-
-void ArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
-}
-
-
-void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
+void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
+ __ lsl(r5, r0, Operand(kPointerSizeLog2));
+ __ str(r1, MemOperand(sp, r5));
+ __ Push(r1);
+ __ Push(r2);
+ __ add(r0, r0, Operand(3));
+ __ TailCallRuntime(Runtime::kNewArray);
}
void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
@@ -83,20 +38,12 @@ void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
+void FastFunctionBindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
-}
-
-
-void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
+ Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
+ descriptor->Initialize(r0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
}
-
-#define __ ACCESS_MASM(masm)
-
static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Condition cond);
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
@@ -747,53 +694,19 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
__ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
}
-
void MathPowStub::Generate(MacroAssembler* masm) {
- const Register base = r1;
const Register exponent = MathPowTaggedDescriptor::exponent();
DCHECK(exponent.is(r2));
- const Register heapnumbermap = r5;
- const Register heapnumber = r0;
- const DwVfpRegister double_base = d0;
- const DwVfpRegister double_exponent = d1;
- const DwVfpRegister double_result = d2;
- const DwVfpRegister double_scratch = d3;
+ const LowDwVfpRegister double_base = d0;
+ const LowDwVfpRegister double_exponent = d1;
+ const LowDwVfpRegister double_result = d2;
+ const LowDwVfpRegister double_scratch = d3;
const SwVfpRegister single_scratch = s6;
const Register scratch = r9;
const Register scratch2 = r4;
Label call_runtime, done, int_exponent;
- if (exponent_type() == ON_STACK) {
- Label base_is_smi, unpack_exponent;
- // The exponent and base are supplied as arguments on the stack.
- // This can only happen if the stub is called from non-optimized code.
- // Load input parameters from stack to double registers.
- __ ldr(base, MemOperand(sp, 1 * kPointerSize));
- __ ldr(exponent, MemOperand(sp, 0 * kPointerSize));
-
- __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
-
- __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
- __ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset));
- __ cmp(scratch, heapnumbermap);
- __ b(ne, &call_runtime);
-
- __ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
- __ jmp(&unpack_exponent);
-
- __ bind(&base_is_smi);
- __ vmov(single_scratch, scratch);
- __ vcvt_f64_s32(double_base, single_scratch);
- __ bind(&unpack_exponent);
-
- __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
-
- __ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
- __ cmp(scratch, heapnumbermap);
- __ b(ne, &call_runtime);
- __ vldr(double_exponent,
- FieldMemOperand(exponent, HeapNumber::kValueOffset));
- } else if (exponent_type() == TAGGED) {
+ if (exponent_type() == TAGGED) {
// Base is already in double_base.
__ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
@@ -802,57 +715,9 @@ void MathPowStub::Generate(MacroAssembler* masm) {
}
if (exponent_type() != INTEGER) {
- Label int_exponent_convert;
// Detect integer exponents stored as double.
- __ vcvt_u32_f64(single_scratch, double_exponent);
- // We do not check for NaN or Infinity here because comparing numbers on
- // ARM correctly distinguishes NaNs. We end up calling the built-in.
- __ vcvt_f64_u32(double_scratch, single_scratch);
- __ VFPCompareAndSetFlags(double_scratch, double_exponent);
- __ b(eq, &int_exponent_convert);
-
- if (exponent_type() == ON_STACK) {
- // Detect square root case. Crankshaft detects constant +/-0.5 at
- // compile time and uses DoMathPowHalf instead. We then skip this check
- // for non-constant cases of +/-0.5 as these hardly occur.
- Label not_plus_half;
-
- // Test for 0.5.
- __ vmov(double_scratch, 0.5, scratch);
- __ VFPCompareAndSetFlags(double_exponent, double_scratch);
- __ b(ne, &not_plus_half);
-
- // Calculates square root of base. Check for the special case of
- // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
- __ vmov(double_scratch, -V8_INFINITY, scratch);
- __ VFPCompareAndSetFlags(double_base, double_scratch);
- __ vneg(double_result, double_scratch, eq);
- __ b(eq, &done);
-
- // Add +0 to convert -0 to +0.
- __ vadd(double_scratch, double_base, kDoubleRegZero);
- __ vsqrt(double_result, double_scratch);
- __ jmp(&done);
-
- __ bind(&not_plus_half);
- __ vmov(double_scratch, -0.5, scratch);
- __ VFPCompareAndSetFlags(double_exponent, double_scratch);
- __ b(ne, &call_runtime);
-
- // Calculates square root of base. Check for the special case of
- // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
- __ vmov(double_scratch, -V8_INFINITY, scratch);
- __ VFPCompareAndSetFlags(double_base, double_scratch);
- __ vmov(double_result, kDoubleRegZero, eq);
- __ b(eq, &done);
-
- // Add +0 to convert -0 to +0.
- __ vadd(double_scratch, double_base, kDoubleRegZero);
- __ vmov(double_result, 1.0, scratch);
- __ vsqrt(double_scratch, double_scratch);
- __ vdiv(double_result, double_result, double_scratch);
- __ jmp(&done);
- }
+ __ TryDoubleToInt32Exact(scratch, double_exponent, double_scratch);
+ __ b(eq, &int_exponent);
__ push(lr);
{
@@ -860,16 +725,11 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(isolate()),
- 0, 2);
+ ExternalReference::power_double_double_function(isolate()), 0, 2);
}
__ pop(lr);
__ MovFromFloatResult(double_result);
- __ jmp(&done);
-
- __ bind(&int_exponent_convert);
- __ vcvt_u32_f64(single_scratch, double_exponent);
- __ vmov(scratch, single_scratch);
+ __ b(&done);
}
// Calculate power with integer exponent.
@@ -887,12 +747,11 @@ void MathPowStub::Generate(MacroAssembler* masm) {
// Get absolute value of exponent.
__ cmp(scratch, Operand::Zero());
- __ mov(scratch2, Operand::Zero(), LeaveCC, mi);
- __ sub(scratch, scratch2, scratch, LeaveCC, mi);
+ __ rsb(scratch, scratch, Operand::Zero(), LeaveCC, mi);
Label while_true;
__ bind(&while_true);
- __ mov(scratch, Operand(scratch, ASR, 1), SetCC);
+ __ mov(scratch, Operand(scratch, LSR, 1), SetCC);
__ vmul(double_result, double_result, double_scratch, cs);
__ vmul(double_scratch, double_scratch, double_scratch, ne);
__ b(ne, &while_true);
@@ -911,38 +770,20 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ vcvt_f64_s32(double_exponent, single_scratch);
// Returning or bailing out.
- if (exponent_type() == ON_STACK) {
- // The arguments are still on the stack.
- __ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMathPowRT);
-
- // The stub is called from non-optimized code, which expects the result
- // as heap number in exponent.
- __ bind(&done);
- __ AllocateHeapNumber(
- heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
- __ vstr(double_result,
- FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- DCHECK(heapnumber.is(r0));
- __ Ret(2);
- } else {
- __ push(lr);
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ PrepareCallCFunction(0, 2, scratch);
- __ MovToFloatParameters(double_base, double_exponent);
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()),
- 0, 2);
- }
- __ pop(lr);
- __ MovFromFloatResult(double_result);
-
- __ bind(&done);
- __ Ret();
+ __ push(lr);
+ {
+ AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(0, 2, scratch);
+ __ MovToFloatParameters(double_base, double_exponent);
+ __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
+ 0, 2);
}
-}
+ __ pop(lr);
+ __ MovFromFloatResult(double_result);
+ __ bind(&done);
+ __ Ret();
+}
bool CEntryStub::NeedsImmovableCode() {
return true;
@@ -953,13 +794,12 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
- ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
+ CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
CreateWeakCellStub::GenerateAheadOfTime(isolate);
BinaryOpICStub::GenerateAheadOfTime(isolate);
BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
StoreFastElementStub::GenerateAheadOfTime(isolate);
- TypeofStub::GenerateAheadOfTime(isolate);
}
@@ -1003,7 +843,9 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Enter the exit frame that transitions from JavaScript to C++.
FrameScope scope(masm, StackFrame::MANUAL);
- __ EnterExitFrame(save_doubles());
+ __ EnterExitFrame(save_doubles(), 0, is_builtin_exit()
+ ? StackFrame::BUILTIN_EXIT
+ : StackFrame::EXIT);
// Store a copy of argc in callee-saved registers for later.
__ mov(r4, Operand(r0));
@@ -1072,8 +914,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
}
// Result returned in r0, r1:r0 or r2:r1:r0 - do not destroy these registers!
- __ VFPEnsureFPSCRState(r3);
-
// Check result for exception sentinel.
Label exception_returned;
__ CompareRoot(r0, Heap::kExceptionRootIndex);
@@ -1183,7 +1023,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
// Set up the reserved register for 0.0.
__ vmov(kDoubleRegZero, 0.0);
- __ VFPEnsureFPSCRState(r4);
// Get address of argv, see stm above.
// r0: code entry
@@ -1266,12 +1105,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// restores all kCalleeSaved registers (including cp and fp) to their
// saved values before returning a failure to C.
- // Clear any pending exceptions.
- __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
- __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate())));
- __ str(r5, MemOperand(ip));
-
// Invoke the function by calling through JS entry trampoline builtin.
// Notice that we cannot store a reference to the trampoline code directly in
// this stub, because runtime stubs are not traversed when doing GC.
@@ -1333,126 +1166,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
}
-void InstanceOfStub::Generate(MacroAssembler* masm) {
- Register const object = r1; // Object (lhs).
- Register const function = r0; // Function (rhs).
- Register const object_map = r2; // Map of {object}.
- Register const function_map = r3; // Map of {function}.
- Register const function_prototype = r4; // Prototype of {function}.
- Register const scratch = r5;
-
- DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
- DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
-
- // Check if {object} is a smi.
- Label object_is_smi;
- __ JumpIfSmi(object, &object_is_smi);
-
- // Lookup the {function} and the {object} map in the global instanceof cache.
- // Note: This is safe because we clear the global instanceof cache whenever
- // we change the prototype of any object.
- Label fast_case, slow_case;
- __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
- __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
- __ b(ne, &fast_case);
- __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
- __ b(ne, &fast_case);
- __ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
- __ Ret();
-
- // If {object} is a smi we can safely return false if {function} is a JS
- // function, otherwise we have to miss to the runtime and throw an exception.
- __ bind(&object_is_smi);
- __ JumpIfSmi(function, &slow_case);
- __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
- __ b(ne, &slow_case);
- __ LoadRoot(r0, Heap::kFalseValueRootIndex);
- __ Ret();
-
- // Fast-case: The {function} must be a valid JSFunction.
- __ bind(&fast_case);
- __ JumpIfSmi(function, &slow_case);
- __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
- __ b(ne, &slow_case);
-
- // Go to the runtime if the function is not a constructor.
- __ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
- __ tst(scratch, Operand(1 << Map::kIsConstructor));
- __ b(eq, &slow_case);
-
- // Ensure that {function} has an instance prototype.
- __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
- __ b(ne, &slow_case);
-
- // Get the "prototype" (or initial map) of the {function}.
- __ ldr(function_prototype,
- FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
- __ AssertNotSmi(function_prototype);
-
- // Resolve the prototype if the {function} has an initial map. Afterwards the
- // {function_prototype} will be either the JSReceiver prototype object or the
- // hole value, which means that no instances of the {function} were created so
- // far and hence we should return false.
- Label function_prototype_valid;
- __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
- __ b(ne, &function_prototype_valid);
- __ ldr(function_prototype,
- FieldMemOperand(function_prototype, Map::kPrototypeOffset));
- __ bind(&function_prototype_valid);
- __ AssertNotSmi(function_prototype);
-
- // Update the global instanceof cache with the current {object} map and
- // {function}. The cached answer will be set when it is known below.
- __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
- __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
-
- // Loop through the prototype chain looking for the {function} prototype.
- // Assume true, and change to false if not found.
- Register const object_instance_type = function_map;
- Register const map_bit_field = function_map;
- Register const null = scratch;
- Register const result = r0;
-
- Label done, loop, fast_runtime_fallback;
- __ LoadRoot(result, Heap::kTrueValueRootIndex);
- __ LoadRoot(null, Heap::kNullValueRootIndex);
- __ bind(&loop);
-
- // Check if the object needs to be access checked.
- __ ldrb(map_bit_field, FieldMemOperand(object_map, Map::kBitFieldOffset));
- __ tst(map_bit_field, Operand(1 << Map::kIsAccessCheckNeeded));
- __ b(ne, &fast_runtime_fallback);
- // Check if the current object is a Proxy.
- __ CompareInstanceType(object_map, object_instance_type, JS_PROXY_TYPE);
- __ b(eq, &fast_runtime_fallback);
-
- __ ldr(object, FieldMemOperand(object_map, Map::kPrototypeOffset));
- __ cmp(object, function_prototype);
- __ b(eq, &done);
- __ cmp(object, null);
- __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
- __ b(ne, &loop);
- __ LoadRoot(result, Heap::kFalseValueRootIndex);
- __ bind(&done);
- __ StoreRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
- __ Ret();
-
- // Found Proxy or access check needed: Call the runtime
- __ bind(&fast_runtime_fallback);
- __ Push(object, function_prototype);
- // Invalidate the instanceof cache.
- __ Move(scratch, Smi::FromInt(0));
- __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
- __ TailCallRuntime(Runtime::kHasInPrototypeChain);
-
- // Slow-case: Call the %InstanceOf runtime function.
- __ bind(&slow_case);
- __ Push(object, function);
- __ TailCallRuntime(is_es6_instanceof() ? Runtime::kOrdinaryHasInstance
- : Runtime::kInstanceOf);
-}
-
-
void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
Label miss;
Register receiver = LoadDescriptor::ReceiverRegister();
@@ -1488,7 +1201,6 @@ void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
&miss, // When not a string.
&miss, // When not a number.
&miss, // When index out of range.
- STRING_INDEX_IS_ARRAY_INDEX,
RECEIVER_IS_STRING);
char_at_generator.GenerateFast(masm);
__ Ret();
@@ -1787,9 +1499,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ ldr(r0, MemOperand(sp, kLastMatchInfoOffset));
__ JumpIfSmi(r0, &runtime);
- __ CompareObjectType(r0, r2, r2, JS_ARRAY_TYPE);
+ __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE);
__ b(ne, &runtime);
- // Check that the JSArray is in fast case.
+ // Check that the object has fast elements.
__ ldr(last_match_info_elements,
FieldMemOperand(r0, JSArray::kElementsOffset));
__ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
@@ -1915,9 +1627,11 @@ static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
// Number-of-arguments register must be smi-tagged to call out.
__ SmiTag(r0);
__ Push(r3, r2, r1, r0);
+ __ Push(cp);
__ CallStub(stub);
+ __ Pop(cp);
__ Pop(r3, r2, r1, r0);
__ SmiUntag(r0);
}
@@ -1932,6 +1646,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// r2 : feedback vector
// r3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
+ Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@@ -1951,7 +1666,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
Register weak_value = r9;
__ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
__ cmp(r1, weak_value);
- __ b(eq, &done);
+ __ b(eq, &done_increment_count);
__ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &done);
__ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
@@ -1974,7 +1689,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &megamorphic);
- __ jmp(&done);
+ __ jmp(&done_increment_count);
__ bind(&miss);
@@ -2003,11 +1718,28 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub);
- __ b(&done);
+ __ b(&done_initialize_count);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub);
+
+ __ bind(&done_initialize_count);
+ // Initialize the call counter.
+ __ Move(r5, Operand(Smi::FromInt(1)));
+ __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
+ __ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
+ __ b(&done);
+
+ __ bind(&done_increment_count);
+
+ // Increment the call count for monomorphic function calls.
+ __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
+ __ add(r5, r5, Operand(FixedArray::kHeaderSize + kPointerSize));
+ __ ldr(r4, FieldMemOperand(r5, 0));
+ __ add(r4, r4, Operand(Smi::FromInt(1)));
+ __ str(r4, FieldMemOperand(r5, 0));
+
__ bind(&done);
}
@@ -2069,7 +1801,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(r3, FieldMemOperand(r2, 0));
- __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ add(r3, r3, Operand(Smi::FromInt(1)));
__ str(r3, FieldMemOperand(r2, 0));
__ mov(r2, r4);
@@ -2117,7 +1849,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(r3, FieldMemOperand(r2, 0));
- __ add(r3, r3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ add(r3, r3, Operand(Smi::FromInt(1)));
__ str(r3, FieldMemOperand(r2, 0));
__ bind(&call_function);
@@ -2188,7 +1920,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ b(ne, &miss);
// Initialize the call counter.
- __ Move(r5, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
+ __ Move(r5, Operand(Smi::FromInt(1)));
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
@@ -2199,9 +1931,9 @@ void CallICStub::Generate(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
- __ Push(r1);
+ __ Push(cp, r1);
__ CallStub(&create_stub);
- __ Pop(r1);
+ __ Pop(cp, r1);
}
__ jmp(&call_function);
@@ -2286,13 +2018,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
// index_ is consumed by runtime conversion function.
__ Push(object_, index_);
}
- if (index_flags_ == STRING_INDEX_IS_NUMBER) {
- __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
- } else {
- DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
- // NumberToSmi discards numbers that are not exact integers.
- __ CallRuntime(Runtime::kNumberToSmi);
- }
+ __ CallRuntime(Runtime::kNumberToSmi);
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
__ Move(index_, r0);
@@ -2622,67 +2348,13 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// r3: from index (untagged)
__ SmiTag(r3, r3);
StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
- STRING_INDEX_IS_NUMBER, RECEIVER_IS_STRING);
+ RECEIVER_IS_STRING);
generator.GenerateFast(masm);
__ Drop(3);
__ Ret();
generator.SkipSlow(masm, &runtime);
}
-
-void ToNumberStub::Generate(MacroAssembler* masm) {
- // The ToNumber stub takes one argument in r0.
- STATIC_ASSERT(kSmiTag == 0);
- __ tst(r0, Operand(kSmiTagMask));
- __ Ret(eq);
-
- __ CompareObjectType(r0, r1, r1, HEAP_NUMBER_TYPE);
- // r0: receiver
- // r1: receiver instance type
- __ Ret(eq);
-
- NonNumberToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub);
-}
-
-void NonNumberToNumberStub::Generate(MacroAssembler* masm) {
- // The NonNumberToNumber stub takes one argument in r0.
- __ AssertNotNumber(r0);
-
- __ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
- // r0: receiver
- // r1: receiver instance type
- StringToNumberStub stub(masm->isolate());
- __ TailCallStub(&stub, lo);
-
- Label not_oddball;
- __ cmp(r1, Operand(ODDBALL_TYPE));
- __ b(ne, &not_oddball);
- __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset));
- __ Ret();
- __ bind(&not_oddball);
-
- __ Push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kToNumber);
-}
-
-void StringToNumberStub::Generate(MacroAssembler* masm) {
- // The StringToNumber stub takes one argument in r0.
- __ AssertString(r0);
-
- // Check if string has a cached array index.
- Label runtime;
- __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset));
- __ tst(r2, Operand(String::kContainsCachedArrayIndexMask));
- __ b(ne, &runtime);
- __ IndexFromHash(r2, r0);
- __ Ret();
-
- __ bind(&runtime);
- __ Push(r0); // Push argument.
- __ TailCallRuntime(Runtime::kStringToNumber);
-}
-
void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in r0.
Label is_number;
@@ -2848,7 +2520,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
// Load r2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ Move(r2, handle(isolate()->heap()->undefined_value()));
+ __ Move(r2, isolate()->factory()->undefined_value());
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
@@ -3227,7 +2899,6 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
// GC safe. The RegExp backend also relies on this.
__ str(lr, MemOperand(sp, 0));
__ blx(ip); // Call the C++ function.
- __ VFPEnsureFPSCRState(r2);
__ ldr(pc, MemOperand(sp, 0));
}
@@ -3694,14 +3365,14 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- LoadICStub stub(isolate(), state());
+ LoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
- KeyedLoadICStub stub(isolate(), state());
+ KeyedLoadICStub stub(isolate());
stub.GenerateForTrampoline(masm);
}
@@ -3839,11 +3510,8 @@ void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
__ bind(&not_array);
__ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
__ b(ne, &miss);
- Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
- Code::ComputeHandlerFlags(Code::LOAD_IC));
- masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
- receiver, name, feedback,
- receiver_map, scratch1, r9);
+ masm->isolate()->load_stub_cache()->GenerateProbe(
+ masm, receiver, name, feedback, receiver_map, scratch1, r9);
__ bind(&miss);
LoadIC::GenerateMiss(masm);
@@ -3922,37 +3590,30 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
__ jmp(&compare_map);
}
-
-void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
- VectorStoreICStub stub(isolate(), state());
+void StoreICTrampolineStub::Generate(MacroAssembler* masm) {
+ __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
+ StoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
-
-void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
- VectorKeyedStoreICStub stub(isolate(), state());
+void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
+ __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
+ KeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
+void StoreICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
-void VectorStoreICStub::Generate(MacroAssembler* masm) {
- GenerateImpl(masm, false);
-}
-
-
-void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
+void StoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
GenerateImpl(masm, true);
}
-
-void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
- Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
- Register key = VectorStoreICDescriptor::NameRegister(); // r2
- Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
- Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
- DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
+void StoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
+ Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // r1
+ Register key = StoreWithVectorDescriptor::NameRegister(); // r2
+ Register vector = StoreWithVectorDescriptor::VectorRegister(); // r3
+ Register slot = StoreWithVectorDescriptor::SlotRegister(); // r4
+ DCHECK(StoreWithVectorDescriptor::ValueRegister().is(r0)); // r0
Register feedback = r5;
Register receiver_map = r6;
Register scratch1 = r9;
@@ -3984,11 +3645,8 @@ void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
__ bind(&not_array);
__ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
__ b(ne, &miss);
- Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
- Code::ComputeHandlerFlags(Code::STORE_IC));
- masm->isolate()->stub_cache()->GenerateProbe(
- masm, Code::STORE_IC, code_flags, receiver, key, feedback, receiver_map,
- scratch1, scratch2);
+ masm->isolate()->store_stub_cache()->GenerateProbe(
+ masm, receiver, key, feedback, receiver_map, scratch1, scratch2);
__ bind(&miss);
StoreIC::GenerateMiss(masm);
@@ -3998,13 +3656,11 @@ void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
__ jmp(&compare_map);
}
-
-void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
+void KeyedStoreICStub::Generate(MacroAssembler* masm) {
GenerateImpl(masm, false);
}
-
-void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
+void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
GenerateImpl(masm, true);
}
@@ -4070,13 +3726,12 @@ static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
__ jmp(miss);
}
-
-void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
- Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // r1
- Register key = VectorStoreICDescriptor::NameRegister(); // r2
- Register vector = VectorStoreICDescriptor::VectorRegister(); // r3
- Register slot = VectorStoreICDescriptor::SlotRegister(); // r4
- DCHECK(VectorStoreICDescriptor::ValueRegister().is(r0)); // r0
+void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
+ Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // r1
+ Register key = StoreWithVectorDescriptor::NameRegister(); // r2
+ Register vector = StoreWithVectorDescriptor::VectorRegister(); // r3
+ Register slot = StoreWithVectorDescriptor::SlotRegister(); // r4
+ DCHECK(StoreWithVectorDescriptor::ValueRegister().is(r0)); // r0
Register feedback = r5;
Register receiver_map = r6;
Register scratch1 = r9;
@@ -4331,19 +3986,13 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
}
}
-
-void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
+void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
isolate);
ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
isolate);
- ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
- isolate);
-}
-
-
-void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
- Isolate* isolate) {
+ ArrayNArgumentsConstructorStub stub(isolate);
+ stub.GetCode();
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
@@ -4351,8 +4000,6 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
stubh1.GetCode();
InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode();
- InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
- stubh3.GetCode();
}
}
@@ -4372,13 +4019,15 @@ void ArrayConstructorStub::GenerateDispatchToArrayStub(
CreateArrayDispatchOneArgument(masm, mode);
__ bind(&not_one_case);
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else if (argument_count() == NONE) {
CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
} else if (argument_count() == ONE) {
CreateArrayDispatchOneArgument(masm, mode);
} else if (argument_count() == MORE_THAN_ONE) {
- CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
+ ArrayNArgumentsConstructorStub stub(masm->isolate());
+ __ TailCallStub(&stub);
} else {
UNREACHABLE();
}
@@ -4460,7 +4109,7 @@ void InternalArrayConstructorStub::GenerateCase(
InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo);
- InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
+ ArrayNArgumentsConstructorStub stubN(isolate());
__ TailCallStub(&stubN, hi);
if (IsFastPackedElementsKind(kind)) {
@@ -4562,15 +4211,15 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
__ bind(&done_allocate);
// Initialize the JSObject fields.
- __ str(r2, MemOperand(r0, JSObject::kMapOffset));
+ __ str(r2, FieldMemOperand(r0, JSObject::kMapOffset));
__ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
- __ str(r3, MemOperand(r0, JSObject::kPropertiesOffset));
- __ str(r3, MemOperand(r0, JSObject::kElementsOffset));
+ __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
+ __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
- __ add(r1, r0, Operand(JSObject::kHeaderSize));
+ __ add(r1, r0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
// ----------- S t a t e -------------
- // -- r0 : result (untagged)
+ // -- r0 : result (tagged)
// -- r1 : result fields (untagged)
// -- r5 : result end (untagged)
// -- r2 : initial map
@@ -4588,10 +4237,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
{
// Initialize all in-object fields with undefined.
__ InitializeFieldsWithFiller(r1, r5, r6);
-
- // Add the object tag to make the JSObject real.
- STATIC_ASSERT(kHeapObjectTag == 1);
- __ add(r0, r0, Operand(kHeapObjectTag));
__ Ret();
}
__ bind(&slack_tracking);
@@ -4610,10 +4255,6 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
__ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
__ InitializeFieldsWithFiller(r1, r5, r6);
- // Add the object tag to make the JSObject real.
- STATIC_ASSERT(kHeapObjectTag == 1);
- __ add(r0, r0, Operand(kHeapObjectTag));
-
// Check if we can finalize the instance size.
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
__ tst(r3, Operand(Map::ConstructionCounter::kMask));
@@ -4640,10 +4281,10 @@ void FastNewObjectStub::Generate(MacroAssembler* masm) {
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ Pop(r2);
}
- STATIC_ASSERT(kHeapObjectTag == 1);
- __ sub(r0, r0, Operand(kHeapObjectTag));
__ ldrb(r5, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ add(r5, r0, Operand(r5, LSL, kPointerSizeLog2));
+ STATIC_ASSERT(kHeapObjectTag == 1);
+ __ sub(r5, r5, Operand(kHeapObjectTag));
__ b(&done_allocate);
// Fall back to %NewObject.
@@ -4662,20 +4303,20 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// -----------------------------------
__ AssertFunction(r1);
- // For Ignition we need to skip all possible handler/stub frames until
- // we reach the JavaScript frame for the function (similar to what the
- // runtime fallback implementation does). So make r2 point to that
- // JavaScript frame.
- {
- Label loop, loop_entry;
- __ mov(r2, fp);
- __ b(&loop_entry);
- __ bind(&loop);
+ // Make r2 point to the JavaScript frame.
+ __ mov(r2, fp);
+ if (skip_stub_frame()) {
+ // For Ignition we need to skip the handler/stub frame to reach the
+ // JavaScript frame for the function.
__ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
- __ bind(&loop_entry);
+ }
+ if (FLAG_debug_code) {
+ Label ok;
__ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r1);
- __ b(ne, &loop);
+ __ b(eq, &ok);
+ __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
+ __ bind(&ok);
}
// Check if we have rest parameters (only possible if we have an
@@ -4690,10 +4331,10 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// specified by the function's internal formal parameter count.
Label rest_parameters;
__ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
- __ ldr(r1,
- FieldMemOperand(r1, SharedFunctionInfo::kFormalParameterCountOffset));
- __ sub(r0, r0, r1, SetCC);
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r3,
+ FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ sub(r0, r0, r3, SetCC);
__ b(gt, &rest_parameters);
// Return an empty rest parameter array.
@@ -4706,7 +4347,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Allocate an empty rest parameter array.
Label allocate, done_allocate;
- __ Allocate(JSArray::kSize, r0, r1, r2, &allocate, TAG_OBJECT);
+ __ Allocate(JSArray::kSize, r0, r1, r2, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the rest parameter array in r0.
@@ -4740,15 +4381,16 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- cp : context
// -- r0 : number of rest parameters (tagged)
+ // -- r1 : function
// -- r2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the rest parameter array plus the backing store.
Label allocate, done_allocate;
- __ mov(r1, Operand(JSArray::kSize + FixedArray::kHeaderSize));
- __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
- __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
+ __ mov(r6, Operand(JSArray::kSize + FixedArray::kHeaderSize));
+ __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r3.
@@ -4780,17 +4422,25 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ mov(r0, r4);
__ Ret();
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
+ __ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(r1);
- __ Push(r0, r2, r1);
+ __ SmiTag(r6);
+ __ Push(r0, r2, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(r3, r0);
__ Pop(r0, r2);
}
__ jmp(&done_allocate);
+
+ // Fall back to %NewRestParameter.
+ __ bind(&too_big_for_new_space);
+ __ push(r1);
+ __ TailCallRuntime(Runtime::kNewRestParameter);
}
}
@@ -4804,23 +4454,40 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// -----------------------------------
__ AssertFunction(r1);
+ // Make r9 point to the JavaScript frame.
+ __ mov(r9, fp);
+ if (skip_stub_frame()) {
+ // For Ignition we need to skip the handler/stub frame to reach the
+ // JavaScript frame for the function.
+ __ ldr(r9, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
+ }
+ if (FLAG_debug_code) {
+ Label ok;
+ __ ldr(ip, MemOperand(r9, StandardFrameConstants::kFunctionOffset));
+ __ cmp(ip, r1);
+ __ b(eq, &ok);
+ __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
+ __ bind(&ok);
+ }
+
// TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
- __ add(r3, fp, Operand(r2, LSL, kPointerSizeLog2 - 1));
+ __ add(r3, r9, Operand(r2, LSL, kPointerSizeLog2 - 1));
__ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
// r1 : function
// r2 : number of parameters (tagged)
// r3 : parameters pointer
+ // r9 : JavaScript frame pointer
// Registers used over whole function:
// r5 : arguments count (tagged)
// r6 : mapped parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
- __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
+ __ ldr(r4, MemOperand(r9, StandardFrameConstants::kCallerFPOffset));
__ ldr(r0, MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
__ cmp(r0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &adaptor_frame);
@@ -4863,7 +4530,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
__ add(r9, r9, Operand(JSSloppyArgumentsObject::kSize));
// Do the allocation of all three objects in one go.
- __ Allocate(r9, r0, r9, r4, &runtime, TAG_OBJECT);
+ __ Allocate(r9, r0, r9, r4, &runtime, NO_ALLOCATION_FLAGS);
// r0 = address of new object(s) (tagged)
// r2 = argument count (smi-tagged)
@@ -5009,20 +4676,20 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// -----------------------------------
__ AssertFunction(r1);
- // For Ignition we need to skip all possible handler/stub frames until
- // we reach the JavaScript frame for the function (similar to what the
- // runtime fallback implementation does). So make r2 point to that
- // JavaScript frame.
- {
- Label loop, loop_entry;
- __ mov(r2, fp);
- __ b(&loop_entry);
- __ bind(&loop);
+ // Make r2 point to the JavaScript frame.
+ __ mov(r2, fp);
+ if (skip_stub_frame()) {
+ // For Ignition we need to skip the handler/stub frame to reach the
+ // JavaScript frame for the function.
__ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
- __ bind(&loop_entry);
+ }
+ if (FLAG_debug_code) {
+ Label ok;
__ ldr(ip, MemOperand(r2, StandardFrameConstants::kFunctionOffset));
__ cmp(ip, r1);
- __ b(ne, &loop);
+ __ b(eq, &ok);
+ __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
+ __ bind(&ok);
}
// Check if we have an arguments adaptor frame below the function frame.
@@ -5032,9 +4699,9 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ cmp(ip, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &arguments_adaptor);
{
- __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r0, FieldMemOperand(
- r1, SharedFunctionInfo::kFormalParameterCountOffset));
+ r4, SharedFunctionInfo::kFormalParameterCountOffset));
__ add(r2, r2, Operand(r0, LSL, kPointerSizeLog2 - 1));
__ add(r2, r2,
Operand(StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
@@ -5052,15 +4719,16 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- cp : context
// -- r0 : number of rest parameters (tagged)
+ // -- r1 : function
// -- r2 : pointer to first rest parameters
// -- lr : return address
// -----------------------------------
// Allocate space for the strict arguments object plus the backing store.
Label allocate, done_allocate;
- __ mov(r1, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
- __ add(r1, r1, Operand(r0, LSL, kPointerSizeLog2 - 1));
- __ Allocate(r1, r3, r4, r5, &allocate, TAG_OBJECT);
+ __ mov(r6, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
+ __ add(r6, r6, Operand(r0, LSL, kPointerSizeLog2 - 1));
+ __ Allocate(r6, r3, r4, r5, &allocate, NO_ALLOCATION_FLAGS);
__ bind(&done_allocate);
// Setup the elements array in r3.
@@ -5092,44 +4760,25 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ mov(r0, r4);
__ Ret();
- // Fall back to %AllocateInNewSpace.
+ // Fall back to %AllocateInNewSpace (if not too big).
+ Label too_big_for_new_space;
__ bind(&allocate);
+ __ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
+ __ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(r1);
- __ Push(r0, r2, r1);
+ __ SmiTag(r6);
+ __ Push(r0, r2, r6);
__ CallRuntime(Runtime::kAllocateInNewSpace);
__ mov(r3, r0);
__ Pop(r0, r2);
}
__ b(&done_allocate);
-}
-
-
-void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
- Register context = cp;
- Register result = r0;
- Register slot = r2;
-
- // Go up the context chain to the script context.
- for (int i = 0; i < depth(); ++i) {
- __ ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
- context = result;
- }
-
- // Load the PropertyCell value at the specified slot.
- __ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
- __ ldr(result, ContextMemOperand(result));
- __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
-
- // If the result is not the_hole, return. Otherwise, handle in the runtime.
- __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
- __ Ret(ne);
- // Fallback to runtime.
- __ SmiTag(slot);
- __ push(slot);
- __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
+ // Fall back to %NewStrictArguments.
+ __ bind(&too_big_for_new_space);
+ __ push(r1);
+ __ TailCallRuntime(Runtime::kNewStrictArguments);
}
@@ -5423,7 +5072,11 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
STATIC_ASSERT(FCA::kIsolateIndex == 1);
STATIC_ASSERT(FCA::kHolderIndex == 0);
- STATIC_ASSERT(FCA::kArgsLength == 7);
+ STATIC_ASSERT(FCA::kNewTargetIndex == 7);
+ STATIC_ASSERT(FCA::kArgsLength == 8);
+
+ // new target
+ __ PushRoot(Heap::kUndefinedValueRootIndex);
// context save
__ push(context);
@@ -5457,7 +5110,7 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
// Allocate the v8::Arguments structure in the arguments' space since
// it's not controlled by GC.
- const int kApiStackSpace = 4;
+ const int kApiStackSpace = 3;
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(false, kApiStackSpace);
@@ -5474,9 +5127,6 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
// FunctionCallbackInfo::length_ = argc
__ mov(ip, Operand(argc()));
__ str(ip, MemOperand(r0, 2 * kPointerSize));
- // FunctionCallbackInfo::is_construct_call_ = 0
- __ mov(ip, Operand::Zero());
- __ str(ip, MemOperand(r0, 3 * kPointerSize));
ExternalReference thunk_ref =
ExternalReference::invoke_function_callback(masm->isolate());
@@ -5493,8 +5143,8 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
}
MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
int stack_space = 0;
- MemOperand is_construct_call_operand = MemOperand(sp, 4 * kPointerSize);
- MemOperand* stack_space_operand = &is_construct_call_operand;
+ MemOperand length_operand = MemOperand(sp, 3 * kPointerSize);
+ MemOperand* stack_space_operand = &length_operand;
stack_space = argc() + FCA::kArgsLength + 1;
stack_space_operand = NULL;
@@ -5505,16 +5155,36 @@ void CallApiCallbackStub::Generate(MacroAssembler* masm) {
void CallApiGetterStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- sp[0] : name
- // -- sp[4 .. (4 + kArgsLength*4)] : v8::PropertyCallbackInfo::args_
- // -- ...
- // -- r2 : api_function_address
- // -----------------------------------
-
- Register api_function_address = ApiGetterDescriptor::function_address();
- DCHECK(api_function_address.is(r2));
-
+ // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
+ // name below the exit frame to make GC aware of them.
+ STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
+ STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
+ STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
+ STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
+ STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
+ STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
+ STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
+ STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
+
+ Register receiver = ApiGetterDescriptor::ReceiverRegister();
+ Register holder = ApiGetterDescriptor::HolderRegister();
+ Register callback = ApiGetterDescriptor::CallbackRegister();
+ Register scratch = r4;
+ DCHECK(!AreAliased(receiver, holder, callback, scratch));
+
+ Register api_function_address = r2;
+
+ __ push(receiver);
+ // Push data from AccessorInfo.
+ __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
+ __ push(scratch);
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ __ Push(scratch, scratch);
+ __ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
+ __ Push(scratch, holder);
+ __ Push(Smi::FromInt(0)); // should_throw_on_error -> false
+ __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
+ __ push(scratch);
// v8::PropertyCallbackInfo::args_ array and name handle.
const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
@@ -5534,6 +5204,10 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate());
+ __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
+ __ ldr(api_function_address,
+ FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
+
// +3 is to skip prolog, return address and name handle.
MemOperand return_value_operand(
fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
@@ -5541,7 +5215,6 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
kStackUnwindSpace, NULL, return_value_operand, NULL);
}
-
#undef __
} // namespace internal