summaryrefslogtreecommitdiff
path: root/deps/v8/src/x87/code-stubs-x87.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/x87/code-stubs-x87.cc')
-rw-r--r--deps/v8/src/x87/code-stubs-x87.cc1213
1 files changed, 685 insertions, 528 deletions
diff --git a/deps/v8/src/x87/code-stubs-x87.cc b/deps/v8/src/x87/code-stubs-x87.cc
index bba43276fa..0d59b18068 100644
--- a/deps/v8/src/x87/code-stubs-x87.cc
+++ b/deps/v8/src/x87/code-stubs-x87.cc
@@ -15,6 +15,7 @@
#include "src/regexp/jsregexp.h"
#include "src/regexp/regexp-macro-assembler.h"
#include "src/runtime/runtime.h"
+#include "src/x87/code-stubs-x87.h"
#include "src/x87/frames-x87.h"
namespace v8 {
@@ -492,72 +493,78 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
+ // ecx : number of parameters (tagged)
+ // edx : parameters pointer
+ // edi : function
// esp[0] : return address
- // esp[4] : number of parameters
- // esp[8] : receiver displacement
- // esp[12] : function
+
+ DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
+ DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
+ DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
- __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
- __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
+ __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(not_equal, &runtime, Label::kNear);
// Patch the arguments.length and the parameters pointer.
- __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ mov(Operand(esp, 1 * kPointerSize), ecx);
- __ lea(edx, Operand(edx, ecx, times_2,
- StandardFrameConstants::kCallerSPOffset));
- __ mov(Operand(esp, 2 * kPointerSize), edx);
+ __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ lea(edx,
+ Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
__ bind(&runtime);
+ __ pop(eax); // Pop return address.
+ __ push(edi); // Push function.
+ __ push(edx); // Push parameters pointer.
+ __ push(ecx); // Push parameter count.
+ __ push(eax); // Push return address.
__ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
}
void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
+ // ecx : number of parameters (tagged)
+ // edx : parameters pointer
+ // edi : function
// esp[0] : return address
- // esp[4] : number of parameters (tagged)
- // esp[8] : receiver displacement
- // esp[12] : function
- // ebx = parameter count (tagged)
- __ mov(ebx, Operand(esp, 1 * kPointerSize));
+ DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
+ DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
+ DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
// Check if the calling frame is an arguments adaptor frame.
- // TODO(rossberg): Factor out some of the bits that are shared with the other
- // Generate* functions.
- Label runtime;
- Label adaptor_frame, try_allocate;
- __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
- __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ Label adaptor_frame, try_allocate, runtime;
+ __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
+ __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &adaptor_frame, Label::kNear);
// No adaptor, parameter count = argument count.
- __ mov(ecx, ebx);
+ __ mov(ebx, ecx);
+ __ push(ecx);
__ jmp(&try_allocate, Label::kNear);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
+ __ mov(ebx, ecx);
+ __ push(ecx);
+ __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(edx, Operand(edx, ecx, times_2,
StandardFrameConstants::kCallerSPOffset));
- __ mov(Operand(esp, 2 * kPointerSize), edx);
// ebx = parameter count (tagged)
// ecx = argument count (smi-tagged)
- // esp[4] = parameter count (tagged)
- // esp[8] = address of receiver argument
// Compute the mapped parameter count = min(ebx, ecx) in ebx.
__ cmp(ebx, ecx);
__ j(less_equal, &try_allocate, Label::kNear);
__ mov(ebx, ecx);
+ // Save mapped parameter count and function.
__ bind(&try_allocate);
-
- // Save mapped parameter count.
+ __ push(edi);
__ push(ebx);
// Compute the sizes of backing store, parameter map, and arguments object.
@@ -577,13 +584,13 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
__ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
// Do the allocation of all three objects in one go.
- __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
+ __ Allocate(ebx, eax, edi, no_reg, &runtime, TAG_OBJECT);
// eax = address of new object(s) (tagged)
// ecx = argument count (smi-tagged)
// esp[0] = mapped parameter count (tagged)
+ // esp[4] = function
// esp[8] = parameter count (tagged)
- // esp[12] = address of receiver argument
// Get the arguments map from the current native context into edi.
Label has_mapped_parameters, instantiate;
__ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
@@ -606,8 +613,8 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// ecx = argument count (smi-tagged)
// edi = address of arguments map (tagged)
// esp[0] = mapped parameter count (tagged)
+ // esp[4] = function
// esp[8] = parameter count (tagged)
- // esp[12] = address of receiver argument
// Copy the JS object part.
__ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
__ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
@@ -617,11 +624,11 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
- __ mov(edx, Operand(esp, 4 * kPointerSize));
- __ AssertNotSmi(edx);
+ __ mov(edi, Operand(esp, 1 * kPointerSize));
+ __ AssertNotSmi(edi);
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
- Heap::kArgumentsCalleeIndex * kPointerSize),
- edx);
+ Heap::kArgumentsCalleeIndex * kPointerSize),
+ edi);
// Use the length (smi tagged) and set that as an in-object property too.
__ AssertSmi(ecx);
@@ -639,11 +646,13 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// eax = address of new object (tagged)
// ebx = mapped parameter count (tagged)
// ecx = argument count (tagged)
+ // edx = address of receiver argument
// edi = address of parameter map or backing store (tagged)
// esp[0] = mapped parameter count (tagged)
+ // esp[4] = function
// esp[8] = parameter count (tagged)
- // esp[12] = address of receiver argument
- // Free a register.
+ // Free two registers.
+ __ push(edx);
__ push(eax);
// Initialize parameter map. If there are no mapped arguments, we're done.
@@ -669,9 +678,9 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// We loop from right to left.
Label parameters_loop, parameters_test;
__ push(ecx);
- __ mov(eax, Operand(esp, 2 * kPointerSize));
+ __ mov(eax, Operand(esp, 3 * kPointerSize));
__ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
- __ add(ebx, Operand(esp, 4 * kPointerSize));
+ __ add(ebx, Operand(esp, 5 * kPointerSize));
__ sub(ebx, eax);
__ mov(ecx, isolate()->factory()->the_hole_value());
__ mov(edx, edi);
@@ -683,9 +692,10 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// edi = address of backing store (tagged)
// esp[0] = argument count (tagged)
// esp[4] = address of new object (tagged)
- // esp[8] = mapped parameter count (tagged)
- // esp[16] = parameter count (tagged)
- // esp[20] = address of receiver argument
+ // esp[8] = address of receiver argument
+ // esp[12] = mapped parameter count (tagged)
+ // esp[16] = function
+ // esp[20] = parameter count (tagged)
__ jmp(&parameters_test, Label::kNear);
__ bind(&parameters_loop);
@@ -703,17 +713,18 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// ecx = argument count (tagged)
// edi = address of backing store (tagged)
// esp[0] = address of new object (tagged)
- // esp[4] = mapped parameter count (tagged)
- // esp[12] = parameter count (tagged)
- // esp[16] = address of receiver argument
+ // esp[4] = address of receiver argument
+ // esp[8] = mapped parameter count (tagged)
+ // esp[12] = function
+ // esp[16] = parameter count (tagged)
// Copy arguments header and remaining slots (if there are any).
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(isolate()->factory()->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Label arguments_loop, arguments_test;
- __ mov(ebx, Operand(esp, 1 * kPointerSize));
- __ mov(edx, Operand(esp, 4 * kPointerSize));
+ __ mov(ebx, Operand(esp, 2 * kPointerSize));
+ __ mov(edx, Operand(esp, 1 * kPointerSize));
__ sub(edx, ebx); // Is there a smarter way to do negative scaling?
__ sub(edx, ebx);
__ jmp(&arguments_test, Label::kNear);
@@ -730,57 +741,60 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
// Restore.
__ pop(eax); // Address of arguments object.
- __ pop(ebx); // Parameter count.
+ __ Drop(4);
- // Return and remove the on-stack parameters.
- __ ret(3 * kPointerSize);
+ // Return.
+ __ ret(0);
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
- __ pop(eax); // Remove saved parameter count.
- __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
+ __ pop(eax); // Remove saved mapped parameter count.
+ __ pop(edi); // Pop saved function.
+ __ pop(eax); // Remove saved parameter count.
+ __ pop(eax); // Pop return address.
+ __ push(edi); // Push function.
+ __ push(edx); // Push parameters pointer.
+ __ push(ecx); // Push parameter count.
+ __ push(eax); // Push return address.
__ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
+ // ecx : number of parameters (tagged)
+ // edx : parameters pointer
+ // edi : function
// esp[0] : return address
- // esp[4] : number of parameters
- // esp[8] : receiver displacement
- // esp[12] : function
- // Check if the calling frame is an arguments adaptor frame.
- Label adaptor_frame, try_allocate, runtime;
- __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
- __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ j(equal, &adaptor_frame, Label::kNear);
+ DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
+ DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
+ DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
- // Get the length from the frame.
- __ mov(ecx, Operand(esp, 1 * kPointerSize));
- __ jmp(&try_allocate, Label::kNear);
+ // Check if the calling frame is an arguments adaptor frame.
+ Label try_allocate, runtime;
+ __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+ __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
+ __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ j(not_equal, &try_allocate, Label::kNear);
// Patch the arguments.length and the parameters pointer.
- __ bind(&adaptor_frame);
- __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
-
- __ lea(edx, Operand(edx, ecx, times_2,
- StandardFrameConstants::kCallerSPOffset));
- __ mov(Operand(esp, 1 * kPointerSize), ecx);
- __ mov(Operand(esp, 2 * kPointerSize), edx);
+ __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
+ __ lea(edx,
+ Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
// Try the new space allocation. Start out with computing the size of
// the arguments object and the elements array.
Label add_arguments_object;
__ bind(&try_allocate);
- __ test(ecx, ecx);
+ __ mov(eax, ecx);
+ __ test(eax, eax);
__ j(zero, &add_arguments_object, Label::kNear);
- __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
+ __ lea(eax, Operand(eax, times_2, FixedArray::kHeaderSize));
__ bind(&add_arguments_object);
- __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
+ __ add(eax, Immediate(Heap::kStrictArgumentsObjectSize));
// Do the allocation of both objects in one go.
- __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
+ __ Allocate(eax, eax, ebx, no_reg, &runtime, TAG_OBJECT);
// Get the arguments map from the current native context.
__ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
@@ -796,7 +810,6 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
- __ mov(ecx, Operand(esp, 1 * kPointerSize));
__ AssertSmi(ecx);
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
@@ -807,17 +820,14 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ test(ecx, ecx);
__ j(zero, &done, Label::kNear);
- // Get the parameters pointer from the stack.
- __ mov(edx, Operand(esp, 2 * kPointerSize));
-
// Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(isolate()->factory()->fixed_array_map()));
-
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
+
// Untag the length for the loop below.
__ SmiUntag(ecx);
@@ -831,42 +841,21 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ dec(ecx);
__ j(not_zero, &loop);
- // Return and remove the on-stack parameters.
+ // Return.
__ bind(&done);
- __ ret(3 * kPointerSize);
+ __ ret(0);
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
+ __ pop(eax); // Pop return address.
+ __ push(edi); // Push function.
+ __ push(edx); // Push parameters pointer.
+ __ push(ecx); // Push parameter count.
+ __ push(eax); // Push return address.
__ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
}
-void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
- // esp[0] : return address
- // esp[4] : language mode
- // esp[8] : index of rest parameter
- // esp[12] : number of parameters
- // esp[16] : receiver displacement
-
- // Check if the calling frame is an arguments adaptor frame.
- Label runtime;
- __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
- __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
- __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ j(not_equal, &runtime);
-
- // Patch the arguments.length and the parameters pointer.
- __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ mov(Operand(esp, 3 * kPointerSize), ecx);
- __ lea(edx, Operand(edx, ecx, times_2,
- StandardFrameConstants::kCallerSPOffset));
- __ mov(Operand(esp, 4 * kPointerSize), edx);
-
- __ bind(&runtime);
- __ TailCallRuntime(Runtime::kNewRestParam, 4, 1);
-}
-
-
void RegExpExecStub::Generate(MacroAssembler* masm) {
// Just jump directly to runtime if native RegExp is not selected at compile
// time or if regexp entry in generated code is turned off runtime switch or
@@ -1599,25 +1588,21 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
__ push(eax);
// Figure out which native to call and setup the arguments.
- if (cc == equal && strict()) {
+ if (cc == equal) {
__ push(ecx);
- __ TailCallRuntime(Runtime::kStrictEquals, 2, 1);
+ __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals, 2,
+ 1);
} else {
- Builtins::JavaScript builtin;
- if (cc == equal) {
- builtin = Builtins::EQUALS;
- } else {
- builtin =
- is_strong(strength()) ? Builtins::COMPARE_STRONG : Builtins::COMPARE;
- __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
- }
+ __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
// Restore return address on the stack.
__ push(ecx);
// Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
- __ InvokeBuiltin(builtin, JUMP_FUNCTION);
+ __ TailCallRuntime(
+ is_strong(strength()) ? Runtime::kCompare_Strong : Runtime::kCompare, 3,
+ 1);
}
__ bind(&miss);
@@ -1695,27 +1680,25 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
__ j(equal, &done, Label::kFar);
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
Heap::kWeakCellMapRootIndex);
- __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
+ __ j(not_equal, &check_allocation_site);
// If the weak cell is cleared, we have a new chance to become monomorphic.
__ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
__ jmp(&megamorphic);
- if (!FLAG_pretenuring_call_new) {
- __ bind(&check_allocation_site);
- // If we came here, we need to see if we are the array function.
- // If we didn't have a matching function, and we didn't find the megamorph
- // sentinel, then we have in the slot either some other function or an
- // AllocationSite.
- __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
- __ j(not_equal, &miss);
+ __ bind(&check_allocation_site);
+ // If we came here, we need to see if we are the array function.
+ // If we didn't have a matching function, and we didn't find the megamorph
+ // sentinel, then we have in the slot either some other function or an
+ // AllocationSite.
+ __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, &miss);
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, &megamorphic);
- __ jmp(&done, Label::kFar);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, &megamorphic);
+ __ jmp(&done, Label::kFar);
__ bind(&miss);
@@ -1734,24 +1717,21 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
// An uninitialized cache is patched with the function or sentinel to
// indicate the ElementsKind if function is the Array constructor.
__ bind(&initialize);
- if (!FLAG_pretenuring_call_new) {
- // Make sure the function is the Array() function
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
- __ cmp(edi, ecx);
- __ j(not_equal, &not_array_function);
-
- // The target function is the Array constructor,
- // Create an AllocationSite if we don't already have it, store it in the
- // slot.
- CreateAllocationSiteStub create_stub(isolate);
- CallStubInRecordCallTarget(masm, &create_stub, is_super);
- __ jmp(&done);
-
- __ bind(&not_array_function);
- }
+ // Make sure the function is the Array() function
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
+ __ cmp(edi, ecx);
+ __ j(not_equal, &not_array_function);
- CreateWeakCellStub create_stub(isolate);
+ // The target function is the Array constructor,
+ // Create an AllocationSite if we don't already have it, store it in the
+ // slot.
+ CreateAllocationSiteStub create_stub(isolate);
CallStubInRecordCallTarget(masm, &create_stub, is_super);
+ __ jmp(&done);
+
+ __ bind(&not_array_function);
+ CreateWeakCellStub weak_cell_stub(isolate);
+ CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super);
__ bind(&done);
}
@@ -1770,33 +1750,9 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
}
-static void EmitSlowCase(Isolate* isolate,
- MacroAssembler* masm,
- int argc,
- Label* non_function) {
- // Check for function proxy.
- __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, non_function);
- __ pop(ecx);
- __ push(edi); // put proxy as additional argument under return address
- __ push(ecx);
- __ Move(eax, Immediate(argc + 1));
- __ Move(ebx, Immediate(0));
- __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
- {
- Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
- __ jmp(adaptor, RelocInfo::CODE_TARGET);
- }
-
- // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
- // of the original receiver from the call site).
- __ bind(non_function);
- __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
- __ Move(eax, Immediate(argc));
- __ Move(ebx, Immediate(0));
- __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
- __ jmp(adaptor, RelocInfo::CODE_TARGET);
+static void EmitSlowCase(Isolate* isolate, MacroAssembler* masm, int argc) {
+ __ Set(eax, argc);
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
@@ -1817,11 +1773,11 @@ static void CallFunctionNoFeedback(MacroAssembler* masm,
int argc, bool needs_checks,
bool call_as_method) {
// edi : the function to call
- Label slow, non_function, wrap, cont;
+ Label slow, wrap, cont;
if (needs_checks) {
// Check that the function really is a JavaScript function.
- __ JumpIfSmi(edi, &non_function);
+ __ JumpIfSmi(edi, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
@@ -1856,8 +1812,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm,
if (needs_checks) {
// Slow-case: Non-function called.
__ bind(&slow);
- // (non_function is bound in EmitSlowCase)
- EmitSlowCase(masm->isolate(), masm, argc, &non_function);
+ EmitSlowCase(masm->isolate(), masm, argc);
}
if (call_as_method) {
@@ -1878,39 +1833,31 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
// ecx : original constructor (for IsSuperConstructorCall)
// edx : slot in feedback vector (Smi, for RecordCallTarget)
// edi : constructor function
- Label slow, non_function_call;
if (IsSuperConstructorCall()) {
__ push(ecx);
}
+ Label non_function;
// Check that function is not a smi.
- __ JumpIfSmi(edi, &non_function_call);
+ __ JumpIfSmi(edi, &non_function);
// Check that function is a JSFunction.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(not_equal, &slow);
+ __ j(not_equal, &non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
- if (FLAG_pretenuring_call_new) {
- // Put the AllocationSite from the feedback vector into ebx.
- // By adding kPointerSize we encode that we know the AllocationSite
- // entry is at the feedback vector slot given by edx + 1.
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- } else {
- Label feedback_register_initialized;
- // Put the AllocationSite from the feedback vector into ebx, or undefined.
- __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
- FixedArray::kHeaderSize));
- Handle<Map> allocation_site_map =
- isolate()->factory()->allocation_site_map();
- __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
- __ j(equal, &feedback_register_initialized);
- __ mov(ebx, isolate()->factory()->undefined_value());
- __ bind(&feedback_register_initialized);
- }
+ Label feedback_register_initialized;
+ // Put the AllocationSite from the feedback vector into ebx, or undefined.
+ __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ Handle<Map> allocation_site_map =
+ isolate()->factory()->allocation_site_map();
+ __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
+ __ j(equal, &feedback_register_initialized);
+ __ mov(ebx, isolate()->factory()->undefined_value());
+ __ bind(&feedback_register_initialized);
__ AssertUndefinedOrAllocationSite(ebx);
}
@@ -1922,69 +1869,33 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ mov(edx, edi);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = ecx;
- __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(jmp_reg, FieldOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
- __ jmp(jmp_reg);
-
- // edi: called object
- // eax: number of arguments
- // ecx: object map
- // esp[0]: original receiver (for IsSuperConstructorCall)
- Label do_call;
- __ bind(&slow);
- __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function_call);
- __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
- __ jmp(&do_call);
-
- __ bind(&non_function_call);
- __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ bind(&do_call);
- if (IsSuperConstructorCall()) {
- __ Drop(1);
- }
- // Set expected number of arguments to zero (not changing eax).
- __ Move(ebx, Immediate(0));
- Handle<Code> arguments_adaptor =
- isolate()->builtins()->ArgumentsAdaptorTrampoline();
- __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
-}
-
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
+ __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
+ __ jmp(ecx);
-static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
- __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
- __ mov(vector, FieldOperand(vector,
- SharedFunctionInfo::kFeedbackVectorOffset));
+ __ bind(&non_function);
+ if (IsSuperConstructorCall()) __ Drop(1);
+ __ mov(edx, edi);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
-void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
+void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// edi - function
// edx - slot id
// ebx - vector
- Label miss;
- int argc = arg_count();
- ParameterCount actual(argc);
-
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
- __ j(not_equal, &miss);
+ __ j(not_equal, miss);
__ mov(eax, arg_count());
+ // Reload ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
- // Verify that ecx contains an AllocationSite
- Factory* factory = masm->isolate()->factory();
- __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- factory->allocation_site_map());
- __ j(not_equal, &miss);
-
// Increment the call count for monomorphic function calls.
__ add(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
@@ -1995,17 +1906,7 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
- __ bind(&miss);
- GenerateMiss(masm);
-
- // The slow case, we need this no matter what to complete a call after a miss.
- CallFunctionNoFeedback(masm,
- arg_count(),
- true,
- CallAsMethod());
-
// Unreachable.
- __ int3();
}
@@ -2019,7 +1920,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
const int generic_offset =
FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
Label extra_checks_or_miss, slow_start;
- Label slow, non_function, wrap, cont;
+ Label slow, wrap, cont;
Label have_js_function;
int argc = arg_count();
ParameterCount actual(argc);
@@ -2072,7 +1973,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
__ bind(&slow);
- EmitSlowCase(isolate, masm, argc, &non_function);
+ EmitSlowCase(isolate, masm, argc);
if (CallAsMethod()) {
__ bind(&wrap);
@@ -2080,11 +1981,21 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
- Label uninitialized, miss;
+ Label uninitialized, miss, not_allocation_site;
__ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
__ j(equal, &slow_start);
+ // Check if we have an allocation site.
+ __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
+ Heap::kAllocationSiteMapRootIndex);
+ __ j(not_equal, &not_allocation_site);
+
+ // We have an allocation site.
+ HandleArrayCase(masm, &miss);
+
+ __ bind(&not_allocation_site);
+
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@@ -2153,7 +2064,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&slow_start);
// Check that the function really is a JavaScript function.
- __ JumpIfSmi(edi, &non_function);
+ __ JumpIfSmi(edi, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
@@ -2168,16 +2079,13 @@ void CallICStub::Generate(MacroAssembler* masm) {
void CallICStub::GenerateMiss(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::INTERNAL);
- // Push the receiver and the function and feedback info.
+ // Push the function and feedback info.
__ push(edi);
__ push(ebx);
__ push(edx);
// Call the entry.
- Runtime::FunctionId id = GetICState() == DEFAULT
- ? Runtime::kCallIC_Miss
- : Runtime::kCallIC_Customization_Miss;
- __ CallRuntime(id, 3);
+ __ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ mov(edi, eax);
@@ -2430,233 +2338,108 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
}
-// Generate stub code for instanceof.
-// This code can patch a call site inlined cache of the instance of check,
-// which looks like this.
-//
-// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
-// 75 0a jne <some near label>
-// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
-//
-// If call site patching is requested the stack will have the delta from the
-// return address to the cmp instruction just below the return address. This
-// also means that call site patching can only take place with arguments in
-// registers. TOS looks like this when call site patching is requested
-//
-// esp[0] : return address
-// esp[4] : delta from return address to cmp instruction
-//
-void InstanceofStub::Generate(MacroAssembler* masm) {
- // Call site inlining and patching implies arguments in registers.
- DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
-
- // Fixed register usage throughout the stub.
- Register object = eax; // Object (lhs).
- Register map = ebx; // Map of the object.
- Register function = edx; // Function (rhs).
- Register prototype = edi; // Prototype of the function.
- Register scratch = ecx;
-
- // Constants describing the call site code to patch.
- static const int kDeltaToCmpImmediate = 2;
- static const int kDeltaToMov = 8;
- static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
- static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
- static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
-
- DCHECK_EQ(object.code(), InstanceofStub::left().code());
- DCHECK_EQ(function.code(), InstanceofStub::right().code());
-
- // Get the object and function - they are always both needed.
- Label slow, not_js_object;
- if (!HasArgsInRegisters()) {
- __ mov(object, Operand(esp, 2 * kPointerSize));
- __ mov(function, Operand(esp, 1 * kPointerSize));
- }
+void InstanceOfStub::Generate(MacroAssembler* masm) {
+ Register const object = edx; // Object (lhs).
+ Register const function = eax; // Function (rhs).
+ Register const object_map = ecx; // Map of {object}.
+ Register const function_map = ebx; // Map of {function}.
+ Register const function_prototype = function_map; // Prototype of {function}.
+ Register const scratch = edi;
+
+ DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
+ DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
+
+ // Check if {object} is a smi.
+ Label object_is_smi;
+ __ JumpIfSmi(object, &object_is_smi, Label::kNear);
+
+ // Lookup the {function} and the {object} map in the global instanceof cache.
+ // Note: This is safe because we clear the global instanceof cache whenever
+ // we change the prototype of any object.
+ Label fast_case, slow_case;
+ __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
+ __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ j(not_equal, &fast_case, Label::kNear);
+ __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
+ __ j(not_equal, &fast_case, Label::kNear);
+ __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
+ __ ret(0);
- // Check that the left hand is a JS object.
- __ JumpIfSmi(object, &not_js_object);
- __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
-
- // If there is a call site cache don't look in the global cache, but do the
- // real lookup and update the call site cache.
- if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
- // Look up the function and the map in the instanceof cache.
- Label miss;
- __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
- __ j(not_equal, &miss, Label::kNear);
- __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
- __ j(not_equal, &miss, Label::kNear);
- __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
- __ bind(&miss);
- }
+ // If {object} is a smi we can safely return false if {function} is a JS
+ // function, otherwise we have to miss to the runtime and throw an exception.
+ __ bind(&object_is_smi);
+ __ JumpIfSmi(function, &slow_case);
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
+ __ j(not_equal, &slow_case);
+ __ LoadRoot(eax, Heap::kFalseValueRootIndex);
+ __ ret(0);
- // Get the prototype of the function.
- __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
+ // Fast-case: The {function} must be a valid JSFunction.
+ __ bind(&fast_case);
+ __ JumpIfSmi(function, &slow_case);
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
+ __ j(not_equal, &slow_case);
- // Check that the function prototype is a JS object.
- __ JumpIfSmi(prototype, &slow);
- __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
+ // Ensure that {function} has an instance prototype.
+ __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
+ static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
+ __ j(not_zero, &slow_case);
- // Update the global instanceof or call site inlined cache with the current
- // map and function. The cached answer will be set when it is known below.
- if (!HasCallSiteInlineCheck()) {
- __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
- __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
- } else {
- // The constants for the code patching are based on no push instructions
- // at the call site.
- DCHECK(HasArgsInRegisters());
- // Get return address and delta to inlined map check.
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
- __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
- }
- __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
- __ mov(Operand(scratch, 0), map);
- __ push(map);
- // Scratch points at the cell payload. Calculate the start of the object.
- __ sub(scratch, Immediate(Cell::kValueOffset - 1));
- __ RecordWriteField(scratch, Cell::kValueOffset, map, function,
- kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ pop(map);
- }
+ // Ensure that {function} is not bound.
+ Register const shared_info = scratch;
+ __ mov(shared_info,
+ FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
+ __ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
+ SharedFunctionInfo::kBoundFunction);
+ __ j(not_zero, &slow_case);
- // Loop through the prototype chain of the object looking for the function
- // prototype.
- __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
- Label loop, is_instance, is_not_instance;
+ // Get the "prototype" (or initial map) of the {function}.
+ __ mov(function_prototype,
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
+ __ AssertNotSmi(function_prototype);
+
+ // Resolve the prototype if the {function} has an initial map. Afterwards the
+ // {function_prototype} will be either the JSReceiver prototype object or the
+ // hole value, which means that no instances of the {function} were created so
+ // far and hence we should return false.
+ Label function_prototype_valid;
+ Register const function_prototype_map = scratch;
+ __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
+ __ j(not_equal, &function_prototype_valid, Label::kNear);
+ __ mov(function_prototype,
+ FieldOperand(function_prototype, Map::kPrototypeOffset));
+ __ bind(&function_prototype_valid);
+ __ AssertNotSmi(function_prototype);
+
+ // Update the global instanceof cache with the current {object} map and
+ // {function}. The cached answer will be set when it is known below.
+ __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
+
+ // Loop through the prototype chain looking for the {function} prototype.
+ // Assume true, and change to false if not found.
+ Register const object_prototype = object_map;
+ Label done, loop;
+ __ mov(eax, isolate()->factory()->true_value());
__ bind(&loop);
- __ cmp(scratch, prototype);
- __ j(equal, &is_instance, Label::kNear);
- Factory* factory = isolate()->factory();
- __ cmp(scratch, Immediate(factory->null_value()));
- __ j(equal, &is_not_instance, Label::kNear);
- __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
- __ jmp(&loop);
-
- __ bind(&is_instance);
- if (!HasCallSiteInlineCheck()) {
- __ mov(eax, Immediate(0));
- __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->true_value());
- }
- } else {
- // Get return address and delta to inlined map check.
- __ mov(eax, factory->true_value());
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
- }
- __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
- if (!ReturnTrueFalseObject()) {
- __ Move(eax, Immediate(0));
- }
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&is_not_instance);
- if (!HasCallSiteInlineCheck()) {
- __ mov(eax, Immediate(Smi::FromInt(1)));
- __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- }
- } else {
- // Get return address and delta to inlined map check.
- __ mov(eax, factory->false_value());
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
- }
- __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
- if (!ReturnTrueFalseObject()) {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- Label object_not_null, object_not_null_or_smi;
- __ bind(&not_js_object);
- // Before null, smi and string value checks, check that the rhs is a function
- // as for a non-function rhs an exception needs to be thrown.
- __ JumpIfSmi(function, &slow, Label::kNear);
- __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
- __ j(not_equal, &slow, Label::kNear);
-
- // Null is not instance of anything.
- __ cmp(object, factory->null_value());
- __ j(not_equal, &object_not_null, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&object_not_null);
- // Smi values is not instance of anything.
- __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&object_not_null_or_smi);
- // String values is not instance of anything.
- Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
- __ j(NegateCondition(is_string), &slow, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
+ __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object_prototype, function_prototype);
+ __ j(equal, &done, Label::kNear);
+ __ cmp(object_prototype, isolate()->factory()->null_value());
+ __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
+ __ j(not_equal, &loop);
+ __ mov(eax, isolate()->factory()->false_value());
+ __ bind(&done);
+ __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
+ __ ret(0);
- // Slow-case: Go through the JavaScript implementation.
- __ bind(&slow);
- if (!ReturnTrueFalseObject()) {
- // Tail call the builtin which returns 0 or 1.
- if (HasArgsInRegisters()) {
- // Push arguments below return address.
- __ pop(scratch);
- __ push(object);
- __ push(function);
- __ push(scratch);
- }
- __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
- } else {
- // Call the builtin and convert 0/1 to true/false.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(object);
- __ push(function);
- __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
- }
- Label true_value, done;
- __ test(eax, eax);
- __ j(zero, &true_value, Label::kNear);
- __ mov(eax, factory->false_value());
- __ jmp(&done, Label::kNear);
- __ bind(&true_value);
- __ mov(eax, factory->true_value());
- __ bind(&done);
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
- }
+ // Slow-case: Call the runtime function.
+ __ bind(&slow_case);
+ __ pop(scratch); // Pop return address.
+ __ push(object); // Push {object}.
+ __ push(function); // Push {function}.
+ __ push(scratch); // Push return address.
+ __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}
@@ -3103,7 +2886,42 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
__ pop(ecx); // Pop return address.
__ push(eax); // Push argument.
__ push(ecx); // Push return address.
- __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
+ __ TailCallRuntime(Runtime::kToNumber, 1, 1);
+}
+
+
+void ToStringStub::Generate(MacroAssembler* masm) {
+ // The ToString stub takes one argument in eax.
+ Label is_number;
+ __ JumpIfSmi(eax, &is_number, Label::kNear);
+
+ Label not_string;
+ __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
+ // eax: receiver
+ // edi: receiver map
+ __ j(above_equal, &not_string, Label::kNear);
+ __ Ret();
+ __ bind(&not_string);
+
+ Label not_heap_number;
+ __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
+ __ j(not_equal, &not_heap_number, Label::kNear);
+ __ bind(&is_number);
+ NumberToStringStub stub(isolate());
+ __ TailCallStub(&stub);
+ __ bind(&not_heap_number);
+
+ Label not_oddball;
+ __ CmpInstanceType(edi, ODDBALL_TYPE);
+ __ j(not_equal, &not_oddball, Label::kNear);
+ __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
+ __ Ret();
+ __ bind(&not_oddball);
+
+ __ pop(ecx); // Pop return address.
+ __ push(eax); // Push argument.
+ __ push(ecx); // Push return address.
+ __ TailCallRuntime(Runtime::kToString, 1, 1);
}
@@ -3233,41 +3051,39 @@ void StringHelper::GenerateOneByteCharsCompareLoop(
void StringCompareStub::Generate(MacroAssembler* masm) {
- Label runtime;
-
- // Stack frame on entry.
- // esp[0]: return address
- // esp[4]: right string
- // esp[8]: left string
-
- __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
- __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
+ // ----------- S t a t e -------------
+ // -- edx : left string
+ // -- eax : right string
+ // -- esp[0] : return address
+ // -----------------------------------
+ __ AssertString(edx);
+ __ AssertString(eax);
Label not_same;
__ cmp(edx, eax);
__ j(not_equal, &not_same, Label::kNear);
- STATIC_ASSERT(EQUAL == 0);
- STATIC_ASSERT(kSmiTag == 0);
__ Move(eax, Immediate(Smi::FromInt(EQUAL)));
__ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
- __ ret(2 * kPointerSize);
+ __ Ret();
__ bind(&not_same);
// Check that both objects are sequential one-byte strings.
+ Label runtime;
__ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
// Compare flat one-byte strings.
- // Drop arguments from the stack.
- __ pop(ecx);
- __ add(esp, Immediate(2 * kPointerSize));
- __ push(ecx);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
edi);
// Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
// tagged as a small integer.
__ bind(&runtime);
+ __ PopReturnAddressTo(ecx);
+ __ Push(edx);
+ __ Push(eax);
+ __ PushReturnAddressFrom(ecx);
__ TailCallRuntime(Runtime::kStringCompare, 2, 1);
}
@@ -3300,6 +3116,37 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
}
+void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
+ DCHECK_EQ(CompareICState::BOOLEAN, state());
+ Label miss;
+ Label::Distance const miss_distance =
+ masm->emit_debug_code() ? Label::kFar : Label::kNear;
+
+ __ JumpIfSmi(edx, &miss, miss_distance);
+ __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ JumpIfSmi(eax, &miss, miss_distance);
+ __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
+ __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
+ if (op() != Token::EQ_STRICT && is_strong(strength())) {
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ } else {
+ if (!Token::IsEqualityOp(op())) {
+ __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
+ __ AssertSmi(eax);
+ __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
+ __ AssertSmi(edx);
+ __ xchg(eax, edx);
+ }
+ __ sub(eax, edx);
+ __ Ret();
+ }
+
+ __ bind(&miss);
+ GenerateMiss(masm);
+}
+
+
void CompareICStub::GenerateSmis(MacroAssembler* masm) {
DCHECK(state() == CompareICState::SMI);
Label miss;
@@ -3589,15 +3436,24 @@ void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
__ JumpIfSmi(ecx, &miss, Label::kNear);
__ GetWeakValue(edi, cell);
- __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
- __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
- __ cmp(ecx, edi);
+ __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
__ j(not_equal, &miss, Label::kNear);
- __ cmp(ebx, edi);
+ __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
__ j(not_equal, &miss, Label::kNear);
- __ sub(eax, edx);
- __ ret(0);
+ if (Token::IsEqualityOp(op())) {
+ __ sub(eax, edx);
+ __ ret(0);
+ } else if (is_strong(strength())) {
+ __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion, 0, 1);
+ } else {
+ __ PopReturnAddressTo(ecx);
+ __ Push(edx);
+ __ Push(eax);
+ __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
+ __ PushReturnAddressFrom(ecx);
+ __ TailCallRuntime(Runtime::kCompare, 3, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -4127,14 +3983,14 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
+ __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
LoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
+ __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
KeyedLoadICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
@@ -4358,14 +4214,14 @@ void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister());
+ __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister());
+ __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
VectorKeyedStoreICStub stub(isolate(), state());
stub.GenerateForTrampoline(masm);
}
@@ -4381,11 +4237,180 @@ void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
}
+// value is on the stack already.
+static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
+ Register key, Register vector,
+ Register slot, Register feedback,
+ Label* miss) {
+ // feedback initially contains the feedback array
+ Label next, next_loop, prepare_next;
+ Label load_smi_map, compare_map;
+ Label start_polymorphic;
+ ExternalReference virtual_register =
+ ExternalReference::vector_store_virtual_register(masm->isolate());
+
+ __ push(receiver);
+ __ push(vector);
+
+ Register receiver_map = receiver;
+ Register cached_map = vector;
+
+ // Receiver might not be a heap object.
+ __ JumpIfSmi(receiver, &load_smi_map);
+ __ mov(receiver_map, FieldOperand(receiver, 0));
+ __ bind(&compare_map);
+ __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
+
+ // A named keyed store might have a 2 element array, all other cases can count
+ // on an array with at least 2 {map, handler} pairs, so they can go right
+ // into polymorphic array handling.
+ __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
+ __ j(not_equal, &start_polymorphic);
+
+ // found, now call handler.
+ Register handler = feedback;
+ DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
+ __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
+ __ pop(vector);
+ __ pop(receiver);
+ __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
+ __ mov(Operand::StaticVariable(virtual_register), handler);
+ __ pop(handler); // Pop "value".
+ __ jmp(Operand::StaticVariable(virtual_register));
+
+ // Polymorphic, we have to loop from 2 to N
+
+ // TODO(mvstanton): I think there is a bug here, we are assuming the
+ // array has more than one map/handler pair, but we call this function in the
+ // keyed store with a string key case, where it might be just an array of two
+ // elements.
+
+ __ bind(&start_polymorphic);
+ __ push(key);
+ Register counter = key;
+ __ mov(counter, Immediate(Smi::FromInt(2)));
+ __ bind(&next_loop);
+ __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
+ __ j(not_equal, &prepare_next);
+ __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
+ __ pop(key);
+ __ pop(vector);
+ __ pop(receiver);
+ __ mov(Operand::StaticVariable(virtual_register), handler);
+ __ pop(handler); // Pop "value".
+ __ jmp(Operand::StaticVariable(virtual_register));
+
+ __ bind(&prepare_next);
+ __ add(counter, Immediate(Smi::FromInt(2)));
+ __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
+ __ j(less, &next_loop);
+
+ // We exhausted our array of map handler pairs.
+ __ pop(key);
+ __ pop(vector);
+ __ pop(receiver);
+ __ jmp(miss);
+
+ __ bind(&load_smi_map);
+ __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
+ __ jmp(&compare_map);
+}
+
+
+static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
+ Register key, Register vector,
+ Register slot, Register weak_cell,
+ Label* miss) {
+ // The store ic value is on the stack.
+ DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
+ ExternalReference virtual_register =
+ ExternalReference::vector_store_virtual_register(masm->isolate());
+
+ // feedback initially contains the feedback array
+ Label compare_smi_map;
+
+ // Move the weak map into the weak_cell register.
+ Register ic_map = weak_cell;
+ __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
+
+ // Receiver might not be a heap object.
+ __ JumpIfSmi(receiver, &compare_smi_map);
+ __ cmp(ic_map, FieldOperand(receiver, 0));
+ __ j(not_equal, miss);
+ __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
+ // Put the store ic value back in it's register.
+ __ mov(Operand::StaticVariable(virtual_register), weak_cell);
+ __ pop(weak_cell); // Pop "value".
+ // jump to the handler.
+ __ jmp(Operand::StaticVariable(virtual_register));
+
+ // In microbenchmarks, it made sense to unroll this code so that the call to
+ // the handler is duplicated for a HeapObject receiver and a Smi receiver.
+ __ bind(&compare_smi_map);
+ __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, miss);
+ __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
+ __ mov(Operand::StaticVariable(virtual_register), weak_cell);
+ __ pop(weak_cell); // Pop "value".
+ // jump to the handler.
+ __ jmp(Operand::StaticVariable(virtual_register));
+}
+
+
void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
+ Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
+ Register key = VectorStoreICDescriptor::NameRegister(); // ecx
+ Register value = VectorStoreICDescriptor::ValueRegister(); // eax
+ Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
+ Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
Label miss;
- // TODO(mvstanton): Implement.
+ __ push(value);
+
+ Register scratch = value;
+ __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+
+ // Is it a weak cell?
+ Label try_array;
+ Label not_array, smi_key, key_okay;
+ __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
+ __ j(not_equal, &try_array);
+ HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
+
+ // Is it a fixed array?
+ __ bind(&try_array);
+ __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, &not_array);
+ HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
+
+ __ bind(&not_array);
+ __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
+ __ j(not_equal, &miss);
+
+ __ pop(value);
+ __ push(slot);
+ __ push(vector);
+ Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
+ Code::ComputeHandlerFlags(Code::STORE_IC));
+ masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
+ receiver, key, slot, no_reg);
+ __ pop(vector);
+ __ pop(slot);
+ Label no_pop_miss;
+ __ jmp(&no_pop_miss);
+
__ bind(&miss);
+ __ pop(value);
+ __ bind(&no_pop_miss);
StoreIC::GenerateMiss(masm);
}
@@ -4400,29 +4425,161 @@ void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
}
+static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
+ Register receiver, Register key,
+ Register vector, Register slot,
+ Register feedback, Label* miss) {
+ // feedback initially contains the feedback array
+ Label next, next_loop, prepare_next;
+ Label load_smi_map, compare_map;
+ Label transition_call;
+ Label pop_and_miss;
+ ExternalReference virtual_register =
+ ExternalReference::vector_store_virtual_register(masm->isolate());
+
+ __ push(receiver);
+ __ push(vector);
+
+ Register receiver_map = receiver;
+ Register cached_map = vector;
+
+ // Receiver might not be a heap object.
+ __ JumpIfSmi(receiver, &load_smi_map);
+ __ mov(receiver_map, FieldOperand(receiver, 0));
+ __ bind(&compare_map);
+
+ // Polymorphic, we have to loop from 0 to N - 1
+ __ push(key);
+ // On the stack we have:
+ // key (esp)
+ // vector
+ // receiver
+ // value
+ Register counter = key;
+ __ mov(counter, Immediate(Smi::FromInt(0)));
+ __ bind(&next_loop);
+ __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
+ __ j(not_equal, &prepare_next);
+ __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
+ __ j(not_equal, &transition_call);
+ __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize + 2 * kPointerSize));
+ __ pop(key);
+ __ pop(vector);
+ __ pop(receiver);
+ __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
+ __ mov(Operand::StaticVariable(virtual_register), feedback);
+ __ pop(feedback); // Pop "value".
+ __ jmp(Operand::StaticVariable(virtual_register));
+
+ __ bind(&transition_call);
+ // Oh holy hell this will be tough.
+ // The map goes in vector register.
+ __ mov(receiver, FieldOperand(cached_map, WeakCell::kValueOffset));
+ // The weak cell may have been cleared.
+ __ JumpIfSmi(receiver, &pop_and_miss);
+ // slot goes on the stack, and holds return address.
+ __ xchg(slot, Operand(esp, 4 * kPointerSize));
+ // Get the handler in value.
+ __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
+ FixedArray::kHeaderSize + 2 * kPointerSize));
+ __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
+ // Pop key into place.
+ __ pop(key);
+ // Put the return address on top of stack, vector goes in slot.
+ __ xchg(slot, Operand(esp, 0));
+ // put the map on the stack, receiver holds receiver.
+ __ xchg(receiver, Operand(esp, 1 * kPointerSize));
+ // put the vector on the stack, slot holds value.
+ __ xchg(slot, Operand(esp, 2 * kPointerSize));
+ // feedback (value) = value, slot = handler.
+ __ xchg(feedback, slot);
+ __ jmp(slot);
+
+ __ bind(&prepare_next);
+ __ add(counter, Immediate(Smi::FromInt(3)));
+ __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
+ __ j(less, &next_loop);
+
+ // We exhausted our array of map handler pairs.
+ __ bind(&pop_and_miss);
+ __ pop(key);
+ __ pop(vector);
+ __ pop(receiver);
+ __ jmp(miss);
+
+ __ bind(&load_smi_map);
+ __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
+ __ jmp(&compare_map);
+}
+
+
void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
+ Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // edx
+ Register key = VectorStoreICDescriptor::NameRegister(); // ecx
+ Register value = VectorStoreICDescriptor::ValueRegister(); // eax
+ Register vector = VectorStoreICDescriptor::VectorRegister(); // ebx
+ Register slot = VectorStoreICDescriptor::SlotRegister(); // edi
Label miss;
- // TODO(mvstanton): Implement.
+ __ push(value);
+
+ Register scratch = value;
+ __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+
+ // Is it a weak cell?
+ Label try_array;
+ Label not_array, smi_key, key_okay;
+ __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
+ __ j(not_equal, &try_array);
+ HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
+
+ // Is it a fixed array?
+ __ bind(&try_array);
+ __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, &not_array);
+ HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
+ &miss);
+
+ __ bind(&not_array);
+ Label try_poly_name;
+ __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
+ __ j(not_equal, &try_poly_name);
+
+ __ pop(value);
+
+ Handle<Code> megamorphic_stub =
+ KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
+ __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
+
+ __ bind(&try_poly_name);
+ // We might have a name in feedback, and a fixed array in the next slot.
+ __ cmp(key, scratch);
+ __ j(not_equal, &miss);
+ // If the name comparison succeeded, we know we have a fixed array with
+ // at least one map/handler pair.
+ __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
+ FixedArray::kHeaderSize + kPointerSize));
+ HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
+
__ bind(&miss);
+ __ pop(value);
KeyedStoreIC::GenerateMiss(masm);
}
void CallICTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, ebx);
+ __ EmitLoadTypeFeedbackVector(ebx);
CallICStub stub(isolate(), state());
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
}
-void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
- EmitLoadTypeFeedbackVector(masm, ebx);
- CallIC_ArrayStub stub(isolate(), state());
- __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());