summaryrefslogtreecommitdiff
path: root/deps/v8/src/ppc/code-stubs-ppc.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ppc/code-stubs-ppc.cc')
-rw-r--r--deps/v8/src/ppc/code-stubs-ppc.cc366
1 files changed, 33 insertions, 333 deletions
diff --git a/deps/v8/src/ppc/code-stubs-ppc.cc b/deps/v8/src/ppc/code-stubs-ppc.cc
index 6dd897b031..ce423ea53a 100644
--- a/deps/v8/src/ppc/code-stubs-ppc.cc
+++ b/deps/v8/src/ppc/code-stubs-ppc.cc
@@ -1719,7 +1719,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// r5 : feedback vector
// r6 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
- Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@@ -1742,7 +1741,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
Register weak_value = r10;
__ LoadP(weak_value, FieldMemOperand(r8, WeakCell::kValueOffset));
__ cmp(r4, weak_value);
- __ beq(&done_increment_count);
+ __ beq(&done);
__ CompareRoot(r8, Heap::kmegamorphic_symbolRootIndex);
__ beq(&done);
__ LoadP(feedback_map, FieldMemOperand(r8, HeapObject::kMapOffset));
@@ -1765,7 +1764,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8);
__ cmp(r4, r8);
__ bne(&megamorphic);
- __ b(&done_increment_count);
+ __ b(&done);
__ bind(&miss);
@@ -1795,32 +1794,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub);
- __ b(&done_initialize_count);
+ __ b(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub);
- __ bind(&done_initialize_count);
- // Initialize the call counter.
- __ LoadSmiLiteral(r8, Smi::FromInt(1));
- __ SmiToPtrArrayOffset(r7, r6);
- __ add(r7, r5, r7);
- __ StoreP(r8, FieldMemOperand(r7, count_offset), r0);
- __ b(&done);
-
- __ bind(&done_increment_count);
+ __ bind(&done);
- // Increment the call count for monomorphic function calls.
+ // Increment the call count for all function calls.
__ SmiToPtrArrayOffset(r8, r6);
__ add(r8, r5, r8);
__ LoadP(r7, FieldMemOperand(r8, count_offset));
__ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0);
__ StoreP(r7, FieldMemOperand(r8, count_offset), r0);
-
- __ bind(&done);
}
@@ -1872,6 +1861,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+ Register slot, Register temp) {
+ const int count_offset = FixedArray::kHeaderSize + kPointerSize;
+ __ SmiToPtrArrayOffset(temp, slot);
+ __ add(feedback_vector, feedback_vector, temp);
+ __ LoadP(slot, FieldMemOperand(feedback_vector, count_offset));
+ __ AddSmiLiteral(slot, slot, Smi::FromInt(1), temp);
+ __ StoreP(slot, FieldMemOperand(feedback_vector, count_offset), temp);
+}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r4 - function
@@ -1885,12 +1884,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
__ mov(r3, Operand(arg_count()));
// Increment the call count for monomorphic function calls.
- const int count_offset = FixedArray::kHeaderSize + kPointerSize;
- __ SmiToPtrArrayOffset(r8, r6);
- __ add(r5, r5, r8);
- __ LoadP(r6, FieldMemOperand(r5, count_offset));
- __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
- __ StoreP(r6, FieldMemOperand(r5, count_offset), r0);
+ IncrementCallCount(masm, r5, r6, r0);
__ mr(r5, r7);
__ mr(r6, r4);
@@ -1903,7 +1897,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// r4 - function
// r6 - slot id (Smi)
// r5 - vector
- Label extra_checks_or_miss, call, call_function;
+ Label extra_checks_or_miss, call, call_function, call_count_incremented;
int argc = arg_count();
ParameterCount actual(argc);
@@ -1934,13 +1928,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(r4, &extra_checks_or_miss);
+ __ bind(&call_function);
+
// Increment the call count for monomorphic function calls.
- const int count_offset = FixedArray::kHeaderSize + kPointerSize;
- __ LoadP(r6, FieldMemOperand(r9, count_offset));
- __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
- __ StoreP(r6, FieldMemOperand(r9, count_offset), r0);
+ IncrementCallCount(masm, r5, r6, r0);
- __ bind(&call_function);
__ mov(r3, Operand(argc));
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
@@ -1980,6 +1972,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0);
__ bind(&call);
+
+ // Increment the call count for megamorphic function calls.
+ IncrementCallCount(masm, r5, r6, r0);
+
+ __ bind(&call_count_incremented);
__ mov(r3, Operand(argc));
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
@@ -2006,10 +2003,6 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ cmp(r7, ip);
__ bne(&miss);
- // Initialize the call counter.
- __ LoadSmiLiteral(r8, Smi::FromInt(1));
- __ StoreP(r8, FieldMemOperand(r9, count_offset), r0);
-
// Store the function. Use a stub since we need a frame for allocation.
// r5 - vector
// r6 - slot
@@ -2017,9 +2010,13 @@ void CallICStub::Generate(MacroAssembler* masm) {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
+ __ Push(r5);
+ __ Push(r6);
__ Push(cp, r4);
__ CallStub(&create_stub);
__ Pop(cp, r4);
+ __ Pop(r6);
+ __ Pop(r5);
}
__ b(&call_function);
@@ -2029,7 +2026,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&miss);
GenerateMiss(masm);
- __ b(&call);
+ __ b(&call_count_incremented);
}
@@ -2211,290 +2208,6 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, Register dest,
}
-void SubStringStub::Generate(MacroAssembler* masm) {
- Label runtime;
-
- // Stack frame on entry.
- // lr: return address
- // sp[0]: to
- // sp[4]: from
- // sp[8]: string
-
- // This stub is called from the native-call %_SubString(...), so
- // nothing can be assumed about the arguments. It is tested that:
- // "string" is a sequential string,
- // both "from" and "to" are smis, and
- // 0 <= from <= to <= string.length.
- // If any of these assumptions fail, we call the runtime system.
-
- const int kToOffset = 0 * kPointerSize;
- const int kFromOffset = 1 * kPointerSize;
- const int kStringOffset = 2 * kPointerSize;
-
- __ LoadP(r5, MemOperand(sp, kToOffset));
- __ LoadP(r6, MemOperand(sp, kFromOffset));
-
- // If either to or from had the smi tag bit set, then fail to generic runtime
- __ JumpIfNotSmi(r5, &runtime);
- __ JumpIfNotSmi(r6, &runtime);
- __ SmiUntag(r5);
- __ SmiUntag(r6, SetRC);
- // Both r5 and r6 are untagged integers.
-
- // We want to bailout to runtime here if From is negative.
- __ blt(&runtime, cr0); // From < 0.
-
- __ cmpl(r6, r5);
- __ bgt(&runtime); // Fail if from > to.
- __ sub(r5, r5, r6);
-
- // Make sure first argument is a string.
- __ LoadP(r3, MemOperand(sp, kStringOffset));
- __ JumpIfSmi(r3, &runtime);
- Condition is_string = masm->IsObjectStringType(r3, r4);
- __ b(NegateCondition(is_string), &runtime, cr0);
-
- Label single_char;
- __ cmpi(r5, Operand(1));
- __ b(eq, &single_char);
-
- // Short-cut for the case of trivial substring.
- Label return_r3;
- // r3: original string
- // r5: result string length
- __ LoadP(r7, FieldMemOperand(r3, String::kLengthOffset));
- __ SmiUntag(r0, r7);
- __ cmpl(r5, r0);
- // Return original string.
- __ beq(&return_r3);
- // Longer than original string's length or negative: unsafe arguments.
- __ bgt(&runtime);
- // Shorter than original string's length: an actual substring.
-
- // Deal with different string types: update the index if necessary
- // and put the underlying string into r8.
- // r3: original string
- // r4: instance type
- // r5: length
- // r6: from index (untagged)
- Label underlying_unpacked, sliced_string, seq_or_external_string;
- // If the string is not indirect, it can only be sequential or external.
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ andi(r0, r4, Operand(kIsIndirectStringMask));
- __ beq(&seq_or_external_string, cr0);
-
- __ andi(r0, r4, Operand(kSlicedNotConsMask));
- __ bne(&sliced_string, cr0);
- // Cons string. Check whether it is flat, then fetch first part.
- __ LoadP(r8, FieldMemOperand(r3, ConsString::kSecondOffset));
- __ CompareRoot(r8, Heap::kempty_stringRootIndex);
- __ bne(&runtime);
- __ LoadP(r8, FieldMemOperand(r3, ConsString::kFirstOffset));
- // Update instance type.
- __ LoadP(r4, FieldMemOperand(r8, HeapObject::kMapOffset));
- __ lbz(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
- __ b(&underlying_unpacked);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ LoadP(r8, FieldMemOperand(r3, SlicedString::kParentOffset));
- __ LoadP(r7, FieldMemOperand(r3, SlicedString::kOffsetOffset));
- __ SmiUntag(r4, r7);
- __ add(r6, r6, r4); // Add offset to index.
- // Update instance type.
- __ LoadP(r4, FieldMemOperand(r8, HeapObject::kMapOffset));
- __ lbz(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
- __ b(&underlying_unpacked);
-
- __ bind(&seq_or_external_string);
- // Sequential or external string. Just move string to the expected register.
- __ mr(r8, r3);
-
- __ bind(&underlying_unpacked);
-
- if (FLAG_string_slices) {
- Label copy_routine;
- // r8: underlying subject string
- // r4: instance type of underlying subject string
- // r5: length
- // r6: adjusted start index (untagged)
- __ cmpi(r5, Operand(SlicedString::kMinLength));
- // Short slice. Copy instead of slicing.
- __ blt(&copy_routine);
- // Allocate new sliced string. At this point we do not reload the instance
- // type including the string encoding because we simply rely on the info
- // provided by the original string. It does not matter if the original
- // string's encoding is wrong because we always have to recheck encoding of
- // the newly created string's parent anyways due to externalized strings.
- Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ andi(r0, r4, Operand(kStringEncodingMask));
- __ beq(&two_byte_slice, cr0);
- __ AllocateOneByteSlicedString(r3, r5, r9, r10, &runtime);
- __ b(&set_slice_header);
- __ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(r3, r5, r9, r10, &runtime);
- __ bind(&set_slice_header);
- __ SmiTag(r6);
- __ StoreP(r8, FieldMemOperand(r3, SlicedString::kParentOffset), r0);
- __ StoreP(r6, FieldMemOperand(r3, SlicedString::kOffsetOffset), r0);
- __ b(&return_r3);
-
- __ bind(&copy_routine);
- }
-
- // r8: underlying subject string
- // r4: instance type of underlying subject string
- // r5: length
- // r6: adjusted start index (untagged)
- Label two_byte_sequential, sequential_string, allocate_result;
- STATIC_ASSERT(kExternalStringTag != 0);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ andi(r0, r4, Operand(kExternalStringTag));
- __ beq(&sequential_string, cr0);
-
- // Handle external string.
- // Rule out short external strings.
- STATIC_ASSERT(kShortExternalStringTag != 0);
- __ andi(r0, r4, Operand(kShortExternalStringTag));
- __ bne(&runtime, cr0);
- __ LoadP(r8, FieldMemOperand(r8, ExternalString::kResourceDataOffset));
- // r8 already points to the first character of underlying string.
- __ b(&allocate_result);
-
- __ bind(&sequential_string);
- // Locate first character of underlying subject string.
- STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
- __ addi(r8, r8, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
- __ bind(&allocate_result);
- // Sequential acii string. Allocate the result.
- STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
- __ andi(r0, r4, Operand(kStringEncodingMask));
- __ beq(&two_byte_sequential, cr0);
-
- // Allocate and copy the resulting one-byte string.
- __ AllocateOneByteString(r3, r5, r7, r9, r10, &runtime);
-
- // Locate first character of substring to copy.
- __ add(r8, r8, r6);
- // Locate first character of result.
- __ addi(r4, r3, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
- // r3: result string
- // r4: first character of result string
- // r5: result string length
- // r8: first character of substring to copy
- STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
- StringHelper::GenerateCopyCharacters(masm, r4, r8, r5, r6,
- String::ONE_BYTE_ENCODING);
- __ b(&return_r3);
-
- // Allocate and copy the resulting two-byte string.
- __ bind(&two_byte_sequential);
- __ AllocateTwoByteString(r3, r5, r7, r9, r10, &runtime);
-
- // Locate first character of substring to copy.
- __ ShiftLeftImm(r4, r6, Operand(1));
- __ add(r8, r8, r4);
- // Locate first character of result.
- __ addi(r4, r3, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // r3: result string.
- // r4: first character of result.
- // r5: result length.
- // r8: first character of substring to copy.
- STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
- StringHelper::GenerateCopyCharacters(masm, r4, r8, r5, r6,
- String::TWO_BYTE_ENCODING);
-
- __ bind(&return_r3);
- Counters* counters = isolate()->counters();
- __ IncrementCounter(counters->sub_string_native(), 1, r6, r7);
- __ Drop(3);
- __ Ret();
-
- // Just jump to runtime to create the sub string.
- __ bind(&runtime);
- __ TailCallRuntime(Runtime::kSubString);
-
- __ bind(&single_char);
- // r3: original string
- // r4: instance type
- // r5: length
- // r6: from index (untagged)
- __ SmiTag(r6, r6);
- StringCharAtGenerator generator(r3, r6, r5, r3, &runtime, &runtime, &runtime,
- RECEIVER_IS_STRING);
- generator.GenerateFast(masm);
- __ Drop(3);
- __ Ret();
- generator.SkipSlow(masm, &runtime);
-}
-
-void ToStringStub::Generate(MacroAssembler* masm) {
- // The ToString stub takes one argument in r3.
- Label is_number;
- __ JumpIfSmi(r3, &is_number);
-
- __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
- // r3: receiver
- // r4: receiver instance type
- __ Ret(lt);
-
- Label not_heap_number;
- __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
- __ bne(&not_heap_number);
- __ bind(&is_number);
- NumberToStringStub stub(isolate());
- __ TailCallStub(&stub);
- __ bind(&not_heap_number);
-
- Label not_oddball;
- __ cmpi(r4, Operand(ODDBALL_TYPE));
- __ bne(&not_oddball);
- __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset));
- __ Ret();
- __ bind(&not_oddball);
-
- __ push(r3); // Push argument.
- __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
- // The ToName stub takes one argument in r3.
- Label is_number;
- __ JumpIfSmi(r3, &is_number);
-
- STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
- __ CompareObjectType(r3, r4, r4, LAST_NAME_TYPE);
- // r3: receiver
- // r4: receiver instance type
- __ Ret(le);
-
- Label not_heap_number;
- __ cmpi(r4, Operand(HEAP_NUMBER_TYPE));
- __ bne(&not_heap_number);
- __ bind(&is_number);
- NumberToStringStub stub(isolate());
- __ TailCallStub(&stub);
- __ bind(&not_heap_number);
-
- Label not_oddball;
- __ cmpi(r4, Operand(ODDBALL_TYPE));
- __ bne(&not_oddball);
- __ LoadP(r3, FieldMemOperand(r3, Oddball::kToStringOffset));
- __ Ret();
- __ bind(&not_oddball);
-
- __ push(r3); // Push argument.
- __ TailCallRuntime(Runtime::kToName);
-}
-
-
void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
Register left,
Register right,
@@ -3407,19 +3120,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_scratch;
- DCHECK((~Page::kPageAlignmentMask & 0xffff) == 0);
- __ lis(r0, Operand((~Page::kPageAlignmentMask >> 16)));
- __ and_(regs_.scratch0(), regs_.object(), r0);
- __ LoadP(
- regs_.scratch1(),
- MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
- __ subi(regs_.scratch1(), regs_.scratch1(), Operand(1));
- __ StoreP(
- regs_.scratch1(),
- MemOperand(regs_.scratch0(), MemoryChunk::kWriteBarrierCounterOffset));
- __ cmpi(regs_.scratch1(), Operand::Zero()); // PPC, we could do better here
- __ blt(&need_incremental);
-
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@@ -3854,7 +3554,7 @@ static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
__ LoadP(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
// Load the map into the correct register.
- DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+ DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
__ mr(feedback, too_far);
__ addi(ip, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4581,7 +4281,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
- __ Cmpi(r10, Operand(Page::kMaxRegularHeapObjectSize), r0);
+ __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
@@ -4972,7 +4672,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
- __ Cmpi(r10, Operand(Page::kMaxRegularHeapObjectSize), r0);
+ __ Cmpi(r10, Operand(kMaxRegularHeapObjectSize), r0);
__ bgt(&too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);