summaryrefslogtreecommitdiff
path: root/deps/v8/src/x64/code-stubs-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/x64/code-stubs-x64.cc')
-rw-r--r--deps/v8/src/x64/code-stubs-x64.cc428
1 files changed, 5 insertions, 423 deletions
diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc
index d62aafe573..27c7475945 100644
--- a/deps/v8/src/x64/code-stubs-x64.cc
+++ b/deps/v8/src/x64/code-stubs-x64.cc
@@ -32,17 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
-void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
- Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
- descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
-}
-
-void FastFunctionBindStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
- descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
-}
-
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
ExternalReference miss) {
// Update the static counter each time a new code stub is generated.
@@ -1117,9 +1106,11 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
if (cc == equal) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(rdx);
- __ Push(rax);
- __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
+ __ Push(rsi);
+ __ Call(strict() ? isolate()->builtins()->StrictEqual()
+ : isolate()->builtins()->Equal(),
+ RelocInfo::CODE_TARGET);
+ __ Pop(rsi);
}
// Turn true into 0 and false into some non-zero value.
STATIC_ASSERT(EQUAL == 0);
@@ -1976,37 +1967,6 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
-
-void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- String::Encoding encoding) {
- // Nothing to do for zero characters.
- Label done;
- __ testl(count, count);
- __ j(zero, &done, Label::kNear);
-
- // Make count the number of bytes to copy.
- if (encoding == String::TWO_BYTE_ENCODING) {
- STATIC_ASSERT(2 == sizeof(uc16));
- __ addl(count, count);
- }
-
- // Copy remaining characters.
- Label loop;
- __ bind(&loop);
- __ movb(kScratchRegister, Operand(src, 0));
- __ movb(Operand(dest, 0), kScratchRegister);
- __ incp(src);
- __ incp(dest);
- __ decl(count);
- __ j(not_zero, &loop);
-
- __ bind(&done);
-}
-
-
void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
Register left,
Register right,
@@ -2616,61 +2576,6 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ jmp(done);
}
-
-// Probe the name dictionary in the |elements| register. Jump to the
-// |done| label if a property with the given name is found leaving the
-// index into the dictionary in |r1|. Jump to the |miss| label
-// otherwise.
-void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
- Label* miss,
- Label* done,
- Register elements,
- Register name,
- Register r0,
- Register r1) {
- DCHECK(!elements.is(r0));
- DCHECK(!elements.is(r1));
- DCHECK(!name.is(r0));
- DCHECK(!name.is(r1));
-
- __ AssertName(name);
-
- __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
- __ decl(r0);
-
- for (int i = 0; i < kInlinedProbes; i++) {
- // Compute the masked index: (hash + i + i * i) & mask.
- __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
- __ shrl(r1, Immediate(Name::kHashShift));
- if (i > 0) {
- __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
- }
- __ andp(r1, r0);
-
- // Scale the index by multiplying by the entry size.
- STATIC_ASSERT(NameDictionary::kEntrySize == 3);
- __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
-
- // Check if the key is identical to the name.
- __ cmpp(name, Operand(elements, r1, times_pointer_size,
- kElementsStartOffset - kHeapObjectTag));
- __ j(equal, done);
- }
-
- NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
- POSITIVE_LOOKUP);
- __ Push(name);
- __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
- __ shrl(r0, Immediate(Name::kHashShift));
- __ Push(r0);
- __ CallStub(&stub);
-
- __ testp(r0, r0);
- __ j(zero, miss);
- __ jmp(done);
-}
-
-
void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
// This stub overrides SometimesSetsUpAFrame() to return false. That means
// we cannot call anything that could cause a GC from this stub.
@@ -2949,203 +2854,6 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
__ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
}
-static void HandleArrayCases(MacroAssembler* masm, Register feedback,
- Register receiver_map, Register scratch1,
- Register scratch2, Register scratch3,
- bool is_polymorphic, Label* miss) {
- // feedback initially contains the feedback array
- Label next_loop, prepare_next;
- Label start_polymorphic;
-
- Register counter = scratch1;
- Register length = scratch2;
- Register cached_map = scratch3;
-
- __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
- __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
- __ j(not_equal, &start_polymorphic);
-
- // found, now call handler.
- Register handler = feedback;
- __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
- __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
- __ jmp(handler);
-
- // Polymorphic, we have to loop from 2 to N
- __ bind(&start_polymorphic);
- __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
- if (!is_polymorphic) {
- // If the IC could be monomorphic we have to make sure we don't go past the
- // end of the feedback array.
- __ cmpl(length, Immediate(2));
- __ j(equal, miss);
- }
- __ movl(counter, Immediate(2));
-
- __ bind(&next_loop);
- __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize));
- __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
- __ j(not_equal, &prepare_next);
- __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
- __ jmp(handler);
-
- __ bind(&prepare_next);
- __ addl(counter, Immediate(2));
- __ cmpl(counter, length);
- __ j(less, &next_loop);
-
- // We exhausted our array of map handler pairs.
- __ jmp(miss);
-}
-
-
-static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
- Register receiver_map, Register feedback,
- Register vector, Register integer_slot,
- Label* compare_map, Label* load_smi_map,
- Label* try_array) {
- __ JumpIfSmi(receiver, load_smi_map);
- __ movp(receiver_map, FieldOperand(receiver, 0));
-
- __ bind(compare_map);
- __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
- __ j(not_equal, try_array);
- Register handler = feedback;
- __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
- __ jmp(handler);
-}
-
-void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
- KeyedStoreICStub stub(isolate(), state());
- stub.GenerateForTrampoline(masm);
-}
-
-void KeyedStoreICStub::Generate(MacroAssembler* masm) {
- GenerateImpl(masm, false);
-}
-
-void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
- GenerateImpl(masm, true);
-}
-
-
-static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
- Register receiver_map,
- Register feedback, Register scratch,
- Register scratch1,
- Register scratch2, Label* miss) {
- // feedback initially contains the feedback array
- Label next, next_loop, prepare_next;
- Label transition_call;
-
- Register cached_map = scratch;
- Register counter = scratch1;
- Register length = scratch2;
-
- // Polymorphic, we have to loop from 0 to N - 1
- __ movp(counter, Immediate(0));
- __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset));
- __ SmiToInteger32(length, length);
-
- __ bind(&next_loop);
- __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize));
- __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
- __ j(not_equal, &prepare_next);
- __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
- __ j(not_equal, &transition_call);
- __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize + 2 * kPointerSize));
- __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
- __ jmp(feedback);
-
- __ bind(&transition_call);
- DCHECK(receiver_map.is(StoreTransitionDescriptor::MapRegister()));
- __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
- // The weak cell may have been cleared.
- __ JumpIfSmi(receiver_map, miss);
- // Get the handler in value.
- __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size,
- FixedArray::kHeaderSize + 2 * kPointerSize));
- __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize));
- __ jmp(feedback);
-
- __ bind(&prepare_next);
- __ addl(counter, Immediate(3));
- __ cmpl(counter, length);
- __ j(less, &next_loop);
-
- // We exhausted our array of map handler pairs.
- __ jmp(miss);
-}
-
-void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
- Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // rdx
- Register key = StoreWithVectorDescriptor::NameRegister(); // rcx
- Register vector = StoreWithVectorDescriptor::VectorRegister(); // rbx
- Register slot = StoreWithVectorDescriptor::SlotRegister(); // rdi
- DCHECK(StoreWithVectorDescriptor::ValueRegister().is(rax)); // rax
- Register feedback = r8;
- Register integer_slot = r9;
- Register receiver_map = r11;
- DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map));
-
- __ SmiToInteger32(integer_slot, slot);
- __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
- FixedArray::kHeaderSize));
-
- // Try to quickly handle the monomorphic case without knowing for sure
- // if we have a weak cell in feedback. We do know it's safe to look
- // at WeakCell::kValueOffset.
- Label try_array, load_smi_map, compare_map;
- Label not_array, miss;
- HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
- integer_slot, &compare_map, &load_smi_map, &try_array);
-
- // Is it a fixed array?
- __ bind(&try_array);
- __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
- __ j(not_equal, &not_array);
- HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot,
- r15, r14, &miss);
-
- __ bind(&not_array);
- Label try_poly_name;
- __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
- __ j(not_equal, &try_poly_name);
-
- Handle<Code> megamorphic_stub =
- KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
- __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
-
- __ bind(&try_poly_name);
- // We might have a name in feedback, and a fixed array in the next slot.
- __ cmpp(key, feedback);
- __ j(not_equal, &miss);
- // If the name comparison succeeded, we know we have a fixed array with
- // at least one map/handler pair.
- __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
- FixedArray::kHeaderSize + kPointerSize));
- HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false,
- &miss);
-
- __ bind(&miss);
- KeyedStoreIC::GenerateMiss(masm);
-
- __ bind(&load_smi_map);
- __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
- __ jmp(&compare_map);
-}
-
-
void CallICTrampolineStub::Generate(MacroAssembler* masm) {
__ EmitLoadTypeFeedbackVector(rbx);
CallICStub stub(isolate(), state());
@@ -3507,132 +3215,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
-
-void FastNewObjectStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- rdi : target
- // -- rdx : new target
- // -- rsi : context
- // -- rsp[0] : return address
- // -----------------------------------
- __ AssertFunction(rdi);
- __ AssertReceiver(rdx);
-
- // Verify that the new target is a JSFunction.
- Label new_object;
- __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx);
- __ j(not_equal, &new_object);
-
- // Load the initial map and verify that it's in fact a map.
- __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset));
- __ JumpIfSmi(rcx, &new_object);
- __ CmpObjectType(rcx, MAP_TYPE, rbx);
- __ j(not_equal, &new_object);
-
- // Fall back to runtime if the target differs from the new target's
- // initial map constructor.
- __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset));
- __ j(not_equal, &new_object);
-
- // Allocate the JSObject on the heap.
- Label allocate, done_allocate;
- __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
- __ leal(rbx, Operand(rbx, times_pointer_size, 0));
- __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
- __ bind(&done_allocate);
-
- // Initialize the JSObject fields.
- __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx);
- __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
- __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
- __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
- STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
- __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize));
-
- // ----------- S t a t e -------------
- // -- rax : result (tagged)
- // -- rbx : result fields (untagged)
- // -- rdi : result end (untagged)
- // -- rcx : initial map
- // -- rsi : context
- // -- rsp[0] : return address
- // -----------------------------------
-
- // Perform in-object slack tracking if requested.
- Label slack_tracking;
- STATIC_ASSERT(Map::kNoSlackTracking == 0);
- __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
- __ testl(FieldOperand(rcx, Map::kBitField3Offset),
- Immediate(Map::ConstructionCounter::kMask));
- __ j(not_zero, &slack_tracking, Label::kNear);
- {
- // Initialize all in-object fields with undefined.
- __ InitializeFieldsWithFiller(rbx, rdi, r11);
- __ Ret();
- }
- __ bind(&slack_tracking);
- {
- // Decrease generous allocation count.
- STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
- __ subl(FieldOperand(rcx, Map::kBitField3Offset),
- Immediate(1 << Map::ConstructionCounter::kShift));
-
- // Initialize the in-object fields with undefined.
- __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset));
- __ negp(rdx);
- __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0));
- __ InitializeFieldsWithFiller(rbx, rdx, r11);
-
- // Initialize the remaining (reserved) fields with one pointer filler map.
- __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex);
- __ InitializeFieldsWithFiller(rdx, rdi, r11);
-
- // Check if we can finalize the instance size.
- Label finalize;
- STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
- __ testl(FieldOperand(rcx, Map::kBitField3Offset),
- Immediate(Map::ConstructionCounter::kMask));
- __ j(zero, &finalize, Label::kNear);
- __ Ret();
-
- // Finalize the instance size.
- __ bind(&finalize);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(rax);
- __ Push(rcx);
- __ CallRuntime(Runtime::kFinalizeInstanceSize);
- __ Pop(rax);
- }
- __ Ret();
- }
-
- // Fall back to %AllocateInNewSpace.
- __ bind(&allocate);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Integer32ToSmi(rbx, rbx);
- __ Push(rcx);
- __ Push(rbx);
- __ CallRuntime(Runtime::kAllocateInNewSpace);
- __ Pop(rcx);
- }
- __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset));
- __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0));
- STATIC_ASSERT(kHeapObjectTag == 1);
- __ decp(rdi); // Remove the tag from the end address.
- __ jmp(&done_allocate);
-
- // Fall back to %NewObject.
- __ bind(&new_object);
- __ PopReturnAddressTo(rcx);
- __ Push(rdi);
- __ Push(rdx);
- __ PushReturnAddressFrom(rcx);
- __ TailCallRuntime(Runtime::kNewObject);
-}
-
-
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rdi : function