diff options
Diffstat (limited to 'deps/v8/src/x87/code-stubs-x87.cc')
-rw-r--r-- | deps/v8/src/x87/code-stubs-x87.cc | 397 |
1 files changed, 161 insertions, 236 deletions
diff --git a/deps/v8/src/x87/code-stubs-x87.cc b/deps/v8/src/x87/code-stubs-x87.cc index 9d066483cf..1da5f41a88 100644 --- a/deps/v8/src/x87/code-stubs-x87.cc +++ b/deps/v8/src/x87/code-stubs-x87.cc @@ -334,7 +334,7 @@ void MathPowStub::Generate(MacroAssembler* masm) { if (exponent_type() == ON_STACK) { // The arguments are still on the stack. __ bind(&call_runtime); - __ TailCallRuntime(Runtime::kMathPowRT, 2, 1); + __ TailCallRuntime(Runtime::kMathPowRT); // The stub is called from non-optimized code, which expects the result // as heap number in exponent. @@ -388,7 +388,7 @@ void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { __ push(scratch); // return address // Perform tail call to the entry. - __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); + __ TailCallRuntime(Runtime::kLoadElementWithInterceptor); __ bind(&slow); PropertyAccessCompiler::TailCallBuiltin( @@ -488,7 +488,7 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { __ pop(ebx); // Return address. __ push(edx); __ push(ebx); - __ TailCallRuntime(Runtime::kArguments, 1, 1); + __ TailCallRuntime(Runtime::kArguments); } @@ -520,7 +520,7 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { __ push(edx); // Push parameters pointer. __ push(ecx); // Push parameter count. __ push(eax); // Push return address. - __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); + __ TailCallRuntime(Runtime::kNewSloppyArguments); } @@ -593,8 +593,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { // esp[8] = parameter count (tagged) // Get the arguments map from the current native context into edi. Label has_mapped_parameters, instantiate; - __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); - __ mov(edi, FieldOperand(edi, JSGlobalObject::kNativeContextOffset)); + __ mov(edi, NativeContextOperand()); __ mov(ebx, Operand(esp, 0 * kPointerSize)); __ test(ebx, ebx); __ j(not_zero, &has_mapped_parameters, Label::kNear); @@ -756,7 +755,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { __ push(edx); // Push parameters pointer. __ push(ecx); // Push parameter count. __ push(eax); // Push return address. - __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); + __ TailCallRuntime(Runtime::kNewSloppyArguments); } @@ -797,10 +796,8 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { __ Allocate(eax, eax, ebx, no_reg, &runtime, TAG_OBJECT); // Get the arguments map from the current native context. - __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); - __ mov(edi, FieldOperand(edi, JSGlobalObject::kNativeContextOffset)); - const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX); - __ mov(edi, Operand(edi, offset)); + __ mov(edi, NativeContextOperand()); + __ mov(edi, ContextOperand(edi, Context::STRICT_ARGUMENTS_MAP_INDEX)); __ mov(FieldOperand(eax, JSObject::kMapOffset), edi); __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), @@ -852,7 +849,35 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { __ push(edx); // Push parameters pointer. __ push(ecx); // Push parameter count. __ push(eax); // Push return address. - __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); + __ TailCallRuntime(Runtime::kNewStrictArguments); +} + + +void RestParamAccessStub::GenerateNew(MacroAssembler* masm) { + // ecx : number of parameters (tagged) + // edx : parameters pointer + // ebx : rest parameter index (tagged) + // esp[0] : return address + + // Check if the calling frame is an arguments adaptor frame. + Label runtime; + __ mov(edi, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); + __ mov(eax, Operand(edi, StandardFrameConstants::kContextOffset)); + __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ j(not_equal, &runtime); + + // Patch the arguments.length and the parameters pointer. + __ mov(ecx, Operand(edi, ArgumentsAdaptorFrameConstants::kLengthOffset)); + __ lea(edx, + Operand(edi, ecx, times_2, StandardFrameConstants::kCallerSPOffset)); + + __ bind(&runtime); + __ pop(eax); // Save return address. + __ push(ecx); // Push number of parameters. + __ push(edx); // Push parameters pointer. + __ push(ebx); // Push rest parameter index. + __ push(eax); // Push return address. + __ TailCallRuntime(Runtime::kNewRestParam); } @@ -861,7 +886,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // time or if regexp entry in generated code is turned off runtime switch or // at compilation. #ifdef V8_INTERPRETED_REGEXP - __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); + __ TailCallRuntime(Runtime::kRegExpExec); #else // V8_INTERPRETED_REGEXP // Stack frame on entry. @@ -1139,7 +1164,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ j(equal, &runtime); // For exception, throw the exception again. - __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1); + __ TailCallRuntime(Runtime::kRegExpExecReThrow); __ bind(&failure); // For failure to match, return null. @@ -1225,7 +1250,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Do the runtime call to execute the regexp. __ bind(&runtime); - __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); + __ TailCallRuntime(Runtime::kRegExpExec); // Deferred code for string handling. // (7) Not a long external string? If yes, go to (10). @@ -1380,7 +1405,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); // Call runtime on identical JSObjects. Otherwise return equal. - __ cmpb(ecx, static_cast<uint8_t>(FIRST_SPEC_OBJECT_TYPE)); + __ cmpb(ecx, static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE)); __ j(above_equal, &runtime_call, Label::kFar); // Call runtime on identical symbols since we need to throw a TypeError. __ cmpb(ecx, static_cast<uint8_t>(SYMBOL_TYPE)); @@ -1448,8 +1473,8 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { // Get the type of the first operand. // If the first object is a JS object, we have done pointer comparison. Label first_non_object; - STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); - __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); + STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); + __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); __ j(below, &first_non_object, Label::kNear); // Return non-zero (eax is not zero) @@ -1463,7 +1488,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { __ CmpInstanceType(ecx, ODDBALL_TYPE); __ j(equal, &return_not_equal); - __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx); + __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); __ j(above_equal, &return_not_equal); // Check for oddballs: true, false, null, undefined. @@ -1559,9 +1584,9 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { __ lea(ecx, Operand(eax, edx, times_1, 0)); __ test(ecx, Immediate(kSmiTagMask)); __ j(not_zero, &runtime_call, Label::kNear); - __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); + __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); __ j(below, &runtime_call, Label::kNear); - __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx); + __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ebx); __ j(below, &runtime_call, Label::kNear); // We do not bail out after this point. Both are JSObjects, and // they are equal if and only if both are undetectable. @@ -1590,8 +1615,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { // Figure out which native to call and setup the arguments. if (cc == equal) { __ push(ecx); - __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2, - 1); + __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals); } else { __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); @@ -1600,9 +1624,8 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) // tagged as a small integer. - __ TailCallRuntime( - is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3, - 1); + __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong + : Runtime::kCompare); } __ bind(&miss); @@ -1610,16 +1633,11 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) { } -static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, - bool is_super) { +static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { // eax : number of arguments to the construct function // ebx : feedback vector // edx : slot in feedback vector (Smi) // edi : the function to call - // esp[0]: original receiver (for IsSuperConstructorCall) - if (is_super) { - __ pop(ecx); - } { FrameScope scope(masm, StackFrame::INTERNAL); @@ -1630,29 +1648,19 @@ static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, __ push(edi); __ push(edx); __ push(ebx); - if (is_super) { - __ push(ecx); - } __ CallStub(stub); - if (is_super) { - __ pop(ecx); - } __ pop(ebx); __ pop(edx); __ pop(edi); __ pop(eax); __ SmiUntag(eax); } - - if (is_super) { - __ push(ecx); - } } -static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { +static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a feedback vector slot. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and // megamorphic. @@ -1660,7 +1668,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { // ebx : feedback vector // edx : slot in feedback vector (Smi) // edi : the function to call - // esp[0]: original receiver (for IsSuperConstructorCall) Isolate* isolate = masm->isolate(); Label initialize, done, miss, megamorphic, not_array_function; @@ -1726,12 +1733,12 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { // Create an AllocationSite if we don't already have it, store it in the // slot. CreateAllocationSiteStub create_stub(isolate); - CallStubInRecordCallTarget(masm, &create_stub, is_super); + CallStubInRecordCallTarget(masm, &create_stub); __ jmp(&done); __ bind(¬_array_function); CreateWeakCellStub weak_cell_stub(isolate); - CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); + CallStubInRecordCallTarget(masm, &weak_cell_stub); __ bind(&done); } @@ -1739,14 +1746,9 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) { void CallConstructStub::Generate(MacroAssembler* masm) { // eax : number of arguments // ebx : feedback vector - // ecx : original constructor (for IsSuperConstructorCall) // edx : slot in feedback vector (Smi, for RecordCallTarget) // edi : constructor function - if (IsSuperConstructorCall()) { - __ push(ecx); - } - Label non_function; // Check that function is not a smi. __ JumpIfSmi(edi, &non_function); @@ -1754,29 +1756,22 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); __ j(not_equal, &non_function); - if (RecordCallTarget()) { - GenerateRecordCallTarget(masm, IsSuperConstructorCall()); + GenerateRecordCallTarget(masm); - Label feedback_register_initialized; - // Put the AllocationSite from the feedback vector into ebx, or undefined. - __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, - FixedArray::kHeaderSize)); - Handle<Map> allocation_site_map = - isolate()->factory()->allocation_site_map(); - __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); - __ j(equal, &feedback_register_initialized); - __ mov(ebx, isolate()->factory()->undefined_value()); - __ bind(&feedback_register_initialized); + Label feedback_register_initialized; + // Put the AllocationSite from the feedback vector into ebx, or undefined. + __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, + FixedArray::kHeaderSize)); + Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map(); + __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); + __ j(equal, &feedback_register_initialized); + __ mov(ebx, isolate()->factory()->undefined_value()); + __ bind(&feedback_register_initialized); - __ AssertUndefinedOrAllocationSite(ebx); - } + __ AssertUndefinedOrAllocationSite(ebx); - if (IsSuperConstructorCall()) { - __ pop(edx); - } else { - // Pass original constructor to construct stub. - __ mov(edx, edi); - } + // Pass new target to construct stub. + __ mov(edx, edi); // Tail call to the function-specific construct stub (still in the caller // context at this point). @@ -1786,7 +1781,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ jmp(ecx); __ bind(&non_function); - if (IsSuperConstructorCall()) __ Drop(1); __ mov(edx, edi); __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } @@ -1824,11 +1818,7 @@ void CallICStub::Generate(MacroAssembler* masm) { // edx - slot id // ebx - vector Isolate* isolate = masm->isolate(); - const int with_types_offset = - FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); - const int generic_offset = - FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); - Label extra_checks_or_miss, call; + Label extra_checks_or_miss, call, call_function; int argc = arg_count(); ParameterCount actual(argc); @@ -1862,9 +1852,10 @@ void CallICStub::Generate(MacroAssembler* masm) { FixedArray::kHeaderSize + kPointerSize), Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); - __ bind(&call); + __ bind(&call_function); __ Set(eax, argc); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()), + RelocInfo::CODE_TARGET); __ bind(&extra_checks_or_miss); Label uninitialized, miss, not_allocation_site; @@ -1899,10 +1890,11 @@ void CallICStub::Generate(MacroAssembler* masm) { __ mov( FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); - // We have to update statistics for runtime profiling. - __ sub(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); - __ add(FieldOperand(ebx, generic_offset), Immediate(Smi::FromInt(1))); - __ jmp(&call); + + __ bind(&call); + __ Set(eax, argc); + __ Jump(masm->isolate()->builtins()->Call(convert_mode()), + RelocInfo::CODE_TARGET); __ bind(&uninitialized); @@ -1919,8 +1911,11 @@ void CallICStub::Generate(MacroAssembler* masm) { __ cmp(edi, ecx); __ j(equal, &miss); - // Update stats. - __ add(FieldOperand(ebx, with_types_offset), Immediate(Smi::FromInt(1))); + // Make sure the function belongs to the same native context. + __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset)); + __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX)); + __ cmp(ecx, NativeContextOperand()); + __ j(not_equal, &miss); // Initialize the call counter. __ mov(FieldOperand(ebx, edx, times_half_pointer_size, @@ -1939,7 +1934,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ pop(edi); } - __ jmp(&call); + __ jmp(&call_function); // We are here because tracing is on or we encountered a MISS case we can't // handle here. @@ -1962,7 +1957,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) { __ push(edx); // Call the entry. - __ CallRuntime(Runtime::kCallIC_Miss, 3); + __ CallRuntime(Runtime::kCallIC_Miss); // Move result to edi and exit the internal frame. __ mov(edi, eax); @@ -2274,14 +2269,6 @@ void InstanceOfStub::Generate(MacroAssembler* masm) { static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype)); __ j(not_zero, &slow_case); - // Ensure that {function} is not bound. - Register const shared_info = scratch; - __ mov(shared_info, - FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); - __ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset, - SharedFunctionInfo::kBoundFunction); - __ j(not_zero, &slow_case); - // Get the "prototype" (or initial map) of the {function}. __ mov(function_prototype, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); @@ -2307,28 +2294,48 @@ void InstanceOfStub::Generate(MacroAssembler* masm) { // Loop through the prototype chain looking for the {function} prototype. // Assume true, and change to false if not found. - Register const object_prototype = object_map; - Label done, loop; + Label done, loop, fast_runtime_fallback; __ mov(eax, isolate()->factory()->true_value()); __ bind(&loop); - __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); - __ cmp(object_prototype, function_prototype); + + // Check if the object needs to be access checked. + __ test_b(FieldOperand(object_map, Map::kBitFieldOffset), + 1 << Map::kIsAccessCheckNeeded); + __ j(not_zero, &fast_runtime_fallback, Label::kNear); + // Check if the current object is a Proxy. + __ CmpInstanceType(object_map, JS_PROXY_TYPE); + __ j(equal, &fast_runtime_fallback, Label::kNear); + + __ mov(object, FieldOperand(object_map, Map::kPrototypeOffset)); + __ cmp(object, function_prototype); __ j(equal, &done, Label::kNear); - __ cmp(object_prototype, isolate()->factory()->null_value()); - __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); + __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); + __ cmp(object, isolate()->factory()->null_value()); __ j(not_equal, &loop); __ mov(eax, isolate()->factory()->false_value()); + __ bind(&done); __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex); __ ret(0); - // Slow-case: Call the runtime function. + // Found Proxy or access check needed: Call the runtime. + __ bind(&fast_runtime_fallback); + __ PopReturnAddressTo(scratch); + __ Push(object); + __ Push(function_prototype); + __ PushReturnAddressFrom(scratch); + // Invalidate the instanceof cache. + __ Move(eax, Immediate(Smi::FromInt(0))); + __ StoreRoot(eax, scratch, Heap::kInstanceofCacheFunctionRootIndex); + __ TailCallRuntime(Runtime::kHasInPrototypeChain); + + // Slow-case: Call the %InstanceOf runtime function. __ bind(&slow_case); - __ pop(scratch); // Pop return address. - __ push(object); // Push {object}. - __ push(function); // Push {function}. - __ push(scratch); // Push return address. - __ TailCallRuntime(Runtime::kInstanceOf, 2, 1); + __ PopReturnAddressTo(scratch); + __ Push(object); + __ Push(function); + __ PushReturnAddressFrom(scratch); + __ TailCallRuntime(Runtime::kInstanceOf); } @@ -2387,11 +2394,11 @@ void StringCharCodeAtGenerator::GenerateSlow( __ push(object_); __ push(index_); // Consumed by runtime conversion function. if (index_flags_ == STRING_INDEX_IS_NUMBER) { - __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); + __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero); } else { DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); // NumberToSmi discards numbers that are not exact integers. - __ CallRuntime(Runtime::kNumberToSmi, 1); + __ CallRuntime(Runtime::kNumberToSmi); } if (!index_.is(eax)) { // Save the conversion result before the pop instructions below @@ -2421,7 +2428,7 @@ void StringCharCodeAtGenerator::GenerateSlow( __ push(object_); __ SmiTag(index_); __ push(index_); - __ CallRuntime(Runtime::kStringCharCodeAtRT, 2); + __ CallRuntime(Runtime::kStringCharCodeAtRT); if (!result_.is(eax)) { __ mov(result_, eax); } @@ -2467,7 +2474,7 @@ void StringCharFromCodeGenerator::GenerateSlow( __ bind(&slow_case_); call_helper.BeforeCall(masm); __ push(code_); - __ CallRuntime(Runtime::kStringCharFromCode, 1); + __ CallRuntime(Runtime::kStringCharFromCode); if (!result_.is(eax)) { __ mov(result_, eax); } @@ -2717,7 +2724,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { // Just jump to runtime to create the sub string. __ bind(&runtime); - __ TailCallRuntime(Runtime::kSubString, 3, 1); + __ TailCallRuntime(Runtime::kSubString); __ bind(&single_char); // eax: string @@ -2762,7 +2769,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) { __ pop(ecx); // Pop return address. __ push(eax); // Push argument. __ push(ecx); // Push return address. - __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); + __ TailCallRuntime(Runtime::kStringToNumber); __ bind(¬_string); Label not_oddball; @@ -2775,7 +2782,7 @@ void ToNumberStub::Generate(MacroAssembler* masm) { __ pop(ecx); // Pop return address. __ push(eax); // Push argument. __ push(ecx); // Push return address. - __ TailCallRuntime(Runtime::kToNumber, 1, 1); + __ TailCallRuntime(Runtime::kToNumber); } @@ -2794,7 +2801,7 @@ void ToLengthStub::Generate(MacroAssembler* masm) { __ pop(ecx); // Pop return address. __ push(eax); // Push argument. __ push(ecx); // Push return address. - __ TailCallRuntime(Runtime::kToLength, 1, 1); + __ TailCallRuntime(Runtime::kToLength); } @@ -2829,7 +2836,7 @@ void ToStringStub::Generate(MacroAssembler* masm) { __ pop(ecx); // Pop return address. __ push(eax); // Push argument. __ push(ecx); // Push return address. - __ TailCallRuntime(Runtime::kToString, 1, 1); + __ TailCallRuntime(Runtime::kToString); } @@ -2992,7 +2999,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { __ Push(edx); __ Push(eax); __ PushReturnAddressFrom(ecx); - __ TailCallRuntime(Runtime::kStringCompare, 2, 1); + __ TailCallRuntime(Runtime::kStringCompare); } @@ -3037,7 +3044,7 @@ void CompareICStub::GenerateBooleans(MacroAssembler* masm) { __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance); __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance); if (op() != Token::EQ_STRICT && is_strong(strength())) { - __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); + __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); } else { if (!Token::IsEqualityOp(op())) { __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); @@ -3305,9 +3312,9 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { __ push(right); __ push(tmp1); if (equality) { - __ TailCallRuntime(Runtime::kStringEquals, 2, 1); + __ TailCallRuntime(Runtime::kStringEquals); } else { - __ TailCallRuntime(Runtime::kStringCompare, 2, 1); + __ TailCallRuntime(Runtime::kStringCompare); } __ bind(&miss); @@ -3315,19 +3322,20 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { } -void CompareICStub::GenerateObjects(MacroAssembler* masm) { - DCHECK(state() == CompareICState::OBJECT); +void CompareICStub::GenerateReceivers(MacroAssembler* masm) { + DCHECK_EQ(CompareICState::RECEIVER, state()); Label miss; __ mov(ecx, edx); __ and_(ecx, eax); __ JumpIfSmi(ecx, &miss, Label::kNear); - __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx); - __ j(not_equal, &miss, Label::kNear); - __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx); - __ j(not_equal, &miss, Label::kNear); + STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); + __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); + __ j(below, &miss, Label::kNear); + __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); + __ j(below, &miss, Label::kNear); - DCHECK(GetCondition() == equal); + DCHECK_EQ(equal, GetCondition()); __ sub(eax, edx); __ ret(0); @@ -3336,7 +3344,7 @@ void CompareICStub::GenerateObjects(MacroAssembler* masm) { } -void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) { +void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) { Label miss; Handle<WeakCell> cell = Map::WeakCellForMap(known_map_); __ mov(ecx, edx); @@ -3353,14 +3361,14 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) { __ sub(eax, edx); __ ret(0); } else if (is_strong(strength())) { - __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1); + __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); } else { __ PopReturnAddressTo(ecx); __ Push(edx); __ Push(eax); __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition())))); __ PushReturnAddressFrom(ecx); - __ TailCallRuntime(Runtime::kCompare, 3, 1); + __ TailCallRuntime(Runtime::kCompare); } __ bind(&miss); @@ -3377,7 +3385,7 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) { __ push(edx); // And also use them as the arguments. __ push(eax); __ push(Immediate(Smi::FromInt(op()))); - __ CallRuntime(Runtime::kCompareIC_Miss, 3); + __ CallRuntime(Runtime::kCompareIC_Miss); // Compute the entry point of the rewritten stub. __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); __ pop(eax); @@ -3768,11 +3776,10 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( // We need an extra register for this, so we push the object register // temporarily. __ push(regs_.object()); - __ EnsureNotWhite(regs_.scratch0(), // The value. - regs_.scratch1(), // Scratch. - regs_.object(), // Scratch. - &need_incremental_pop_object, - Label::kNear); + __ JumpIfWhite(regs_.scratch0(), // The value. + regs_.scratch1(), // Scratch. + regs_.object(), // Scratch. + &need_incremental_pop_object, Label::kNear); __ pop(regs_.object()); regs_.Restore(masm); @@ -3792,89 +3799,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker( } -void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : element value to store - // -- ecx : element index as smi - // -- esp[0] : return address - // -- esp[4] : array literal index in function - // -- esp[8] : array literal - // clobbers ebx, edx, edi - // ----------------------------------- - - Label element_done; - Label double_elements; - Label smi_element; - Label slow_elements; - Label slow_elements_from_double; - Label fast_elements; - - // Get array literal index, array literal and its map. - __ mov(edx, Operand(esp, 1 * kPointerSize)); - __ mov(ebx, Operand(esp, 2 * kPointerSize)); - __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset)); - - __ CheckFastElements(edi, &double_elements); - - // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements - __ JumpIfSmi(eax, &smi_element); - __ CheckFastSmiElements(edi, &fast_elements, Label::kNear); - - // Store into the array literal requires a elements transition. Call into - // the runtime. - - __ bind(&slow_elements); - __ pop(edi); // Pop return address and remember to put back later for tail - // call. - __ push(ebx); - __ push(ecx); - __ push(eax); - __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); - __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); - __ push(edx); - __ push(edi); // Return return address so that tail call returns to right - // place. - __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); - - __ bind(&slow_elements_from_double); - __ pop(edx); - __ jmp(&slow_elements); - - // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. - __ bind(&fast_elements); - __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); - __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size, - FixedArrayBase::kHeaderSize)); - __ mov(Operand(ecx, 0), eax); - // Update the write barrier for the array store. - __ RecordWrite(ebx, ecx, eax, kDontSaveFPRegs, EMIT_REMEMBERED_SET, - OMIT_SMI_CHECK); - __ ret(0); - - // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS, - // and value is Smi. - __ bind(&smi_element); - __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); - __ mov(FieldOperand(ebx, ecx, times_half_pointer_size, - FixedArrayBase::kHeaderSize), eax); - __ ret(0); - - // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS. - __ bind(&double_elements); - - __ push(edx); - __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset)); - __ StoreNumberToDoubleElements(eax, - edx, - ecx, - edi, - &slow_elements_from_double, - false); - __ pop(edx); - __ ret(0); -} - - void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { CEntryStub ces(isolate(), 1, kSaveFPRegs); __ call(ces.GetCode(), RelocInfo::CODE_TARGET); @@ -4756,6 +4680,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { Label subclassing; + // Enter the context of the Array function. + __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); + __ cmp(edx, edi); __ j(not_equal, &subclassing); @@ -4777,27 +4704,26 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) { // Subclassing. __ bind(&subclassing); - __ pop(ecx); // return address. - __ push(edi); - __ push(edx); - - // Adjust argc. switch (argument_count()) { case ANY: case MORE_THAN_ONE: - __ add(eax, Immediate(2)); + __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); + __ add(eax, Immediate(3)); break; case NONE: - __ mov(eax, Immediate(2)); + __ mov(Operand(esp, 1 * kPointerSize), edi); + __ mov(eax, Immediate(3)); break; case ONE: - __ mov(eax, Immediate(3)); + __ mov(Operand(esp, 2 * kPointerSize), edi); + __ mov(eax, Immediate(4)); break; } - - __ push(ecx); - __ JumpToExternalReference( - ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate())); + __ PopReturnAddressTo(ecx); + __ Push(edx); + __ Push(ebx); + __ PushReturnAddressFrom(ecx); + __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); } @@ -4914,7 +4840,7 @@ void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { __ Pop(result_reg); // Pop return address. __ Push(slot_reg); __ Push(result_reg); // Push return address. - __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); + __ TailCallRuntime(Runtime::kLoadGlobalViaContext); } @@ -5037,8 +4963,7 @@ void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { __ Push(cell_reg); // Push return address. __ TailCallRuntime(is_strict(language_mode()) ? Runtime::kStoreGlobalViaContext_Strict - : Runtime::kStoreGlobalViaContext_Sloppy, - 2, 1); + : Runtime::kStoreGlobalViaContext_Sloppy); } @@ -5175,7 +5100,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, __ CmpInstanceType(map, LAST_NAME_TYPE); __ j(below_equal, &ok, Label::kNear); - __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); + __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); __ j(above_equal, &ok, Label::kNear); __ cmp(map, isolate->factory()->heap_number_map()); @@ -5209,7 +5134,7 @@ static void CallApiFunctionAndReturn(MacroAssembler* masm, // Re-throw by promoting a scheduled exception. __ bind(&promote_scheduled_exception); - __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1); + __ TailCallRuntime(Runtime::kPromoteScheduledException); // HandleScope limit has changed. Delete allocated extensions. ExternalReference delete_extensions = |