summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips/code-stubs-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips/code-stubs-mips.cc')
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc350
1 files changed, 28 insertions, 322 deletions
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 844958ec47..43e67354f2 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -1782,7 +1782,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// a2 : feedback vector
// a3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
- Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@@ -1801,7 +1800,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
Register feedback_map = t1;
Register weak_value = t4;
__ lw(weak_value, FieldMemOperand(t2, WeakCell::kValueOffset));
- __ Branch(&done_increment_count, eq, a1, Operand(weak_value));
+ __ Branch(&done, eq, a1, Operand(weak_value));
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&done, eq, t2, Operand(at));
__ lw(feedback_map, FieldMemOperand(t2, HeapObject::kMapOffset));
@@ -1823,7 +1822,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Make sure the function is the Array() function
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t2);
__ Branch(&megamorphic, ne, a1, Operand(t2));
- __ jmp(&done_increment_count);
+ __ jmp(&done);
__ bind(&miss);
@@ -1850,28 +1849,19 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub);
- __ Branch(&done_initialize_count);
+ __ Branch(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub);
- __ bind(&done_initialize_count);
- // Initialize the call counter.
- __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ li(t0, Operand(Smi::FromInt(1)));
- __ Branch(USE_DELAY_SLOT, &done);
- __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
- __ bind(&done_increment_count);
+ __ bind(&done);
- // Increment the call count for monomorphic function calls.
+ // Increment the call count for all function calls.
__ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
__ lw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
__ Addu(t0, t0, Operand(Smi::FromInt(1)));
__ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
- __ bind(&done);
}
@@ -1917,6 +1907,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
+// Note: feedback_vector and slot are clobbered after the call.
+static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
+ Register slot) {
+ __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize);
+ __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+ __ Addu(slot, slot, Operand(Smi::FromInt(1)));
+ __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function
@@ -1929,10 +1927,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
__ li(a0, Operand(arg_count()));
// Increment the call count for monomorphic function calls.
- __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
- __ Addu(a3, a3, Operand(Smi::FromInt(1)));
- __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+ IncrementCallCount(masm, a2, a3);
__ mov(a2, t0);
__ mov(a3, a1);
@@ -1945,7 +1940,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// a1 - function
// a3 - slot id (Smi)
// a2 - vector
- Label extra_checks_or_miss, call, call_function;
+ Label extra_checks_or_miss, call, call_function, call_count_incremented;
int argc = arg_count();
ParameterCount actual(argc);
@@ -1974,13 +1969,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(a1, &extra_checks_or_miss);
+ __ bind(&call_function);
+
// Increment the call count for monomorphic function calls.
- __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
- __ Addu(a3, a3, Operand(Smi::FromInt(1)));
- __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
+ IncrementCallCount(masm, a2, a3);
- __ bind(&call_function);
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
@@ -2021,6 +2014,10 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
__ bind(&call);
+ IncrementCallCount(masm, a2, a3);
+
+ __ bind(&call_count_incremented);
+
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg),
USE_DELAY_SLOT);
@@ -2046,11 +2043,6 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ lw(t1, NativeContextMemOperand());
__ Branch(&miss, ne, t0, Operand(t1));
- // Initialize the call counter.
- __ Lsa(at, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ li(t0, Operand(Smi::FromInt(1)));
- __ sw(t0, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-
// Store the function. Use a stub since we need a frame for allocation.
// a2 - vector
// a3 - slot
@@ -2058,9 +2050,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
+ __ Push(a2, a3);
__ Push(cp, a1);
__ CallStub(&create_stub);
__ Pop(cp, a1);
+ __ Pop(a2, a3);
}
__ Branch(&call_function);
@@ -2070,7 +2064,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&miss);
GenerateMiss(masm);
- __ Branch(&call);
+ __ Branch(&call_count_incremented);
}
@@ -2275,293 +2269,6 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
}
-void SubStringStub::Generate(MacroAssembler* masm) {
- Label runtime;
- // Stack frame on entry.
- // ra: return address
- // sp[0]: to
- // sp[4]: from
- // sp[8]: string
-
- // This stub is called from the native-call %_SubString(...), so
- // nothing can be assumed about the arguments. It is tested that:
- // "string" is a sequential string,
- // both "from" and "to" are smis, and
- // 0 <= from <= to <= string.length.
- // If any of these assumptions fail, we call the runtime system.
-
- const int kToOffset = 0 * kPointerSize;
- const int kFromOffset = 1 * kPointerSize;
- const int kStringOffset = 2 * kPointerSize;
-
- __ lw(a2, MemOperand(sp, kToOffset));
- __ lw(a3, MemOperand(sp, kFromOffset));
- STATIC_ASSERT(kFromOffset == kToOffset + 4);
- STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
-
- // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
- // safe in this case.
- __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
- __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
- // Both a2 and a3 are untagged integers.
-
- __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
-
- __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
- __ Subu(a2, a2, a3);
-
- // Make sure first argument is a string.
- __ lw(v0, MemOperand(sp, kStringOffset));
- __ JumpIfSmi(v0, &runtime);
- __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
- __ And(t0, a1, Operand(kIsNotStringMask));
-
- __ Branch(&runtime, ne, t0, Operand(zero_reg));
-
- Label single_char;
- __ Branch(&single_char, eq, a2, Operand(1));
-
- // Short-cut for the case of trivial substring.
- Label return_v0;
- // v0: original string
- // a2: result string length
- __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
- __ sra(t0, t0, 1);
- // Return original string.
- __ Branch(&return_v0, eq, a2, Operand(t0));
- // Longer than original string's length or negative: unsafe arguments.
- __ Branch(&runtime, hi, a2, Operand(t0));
- // Shorter than original string's length: an actual substring.
-
- // Deal with different string types: update the index if necessary
- // and put the underlying string into t1.
- // v0: original string
- // a1: instance type
- // a2: length
- // a3: from index (untagged)
- Label underlying_unpacked, sliced_string, seq_or_external_string;
- // If the string is not indirect, it can only be sequential or external.
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ And(t0, a1, Operand(kIsIndirectStringMask));
- __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
- // t0 is used as a scratch register and can be overwritten in either case.
- __ And(t0, a1, Operand(kSlicedNotConsMask));
- __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
- // Cons string. Check whether it is flat, then fetch first part.
- __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
- __ LoadRoot(t0, Heap::kempty_stringRootIndex);
- __ Branch(&runtime, ne, t1, Operand(t0));
- __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
- // Update instance type.
- __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
- __ jmp(&underlying_unpacked);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ sra(t0, t0, 1); // Add offset to index.
- __ Addu(a3, a3, t0);
- // Update instance type.
- __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
- __ jmp(&underlying_unpacked);
-
- __ bind(&seq_or_external_string);
- // Sequential or external string. Just move string to the expected register.
- __ mov(t1, v0);
-
- __ bind(&underlying_unpacked);
-
- if (FLAG_string_slices) {
- Label copy_routine;
- // t1: underlying subject string
- // a1: instance type of underlying subject string
- // a2: length
- // a3: adjusted start index (untagged)
- // Short slice. Copy instead of slicing.
- __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
- // Allocate new sliced string. At this point we do not reload the instance
- // type including the string encoding because we simply rely on the info
- // provided by the original string. It does not matter if the original
- // string's encoding is wrong because we always have to recheck encoding of
- // the newly created string's parent anyways due to externalized strings.
- Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ And(t0, a1, Operand(kStringEncodingMask));
- __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
- __ AllocateOneByteSlicedString(v0, a2, t2, t3, &runtime);
- __ jmp(&set_slice_header);
- __ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
- __ bind(&set_slice_header);
- __ sll(a3, a3, 1);
- __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ jmp(&return_v0);
-
- __ bind(&copy_routine);
- }
-
- // t1: underlying subject string
- // a1: instance type of underlying subject string
- // a2: length
- // a3: adjusted start index (untagged)
- Label two_byte_sequential, sequential_string, allocate_result;
- STATIC_ASSERT(kExternalStringTag != 0);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ And(t0, a1, Operand(kExternalStringTag));
- __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
-
- // Handle external string.
- // Rule out short external strings.
- STATIC_ASSERT(kShortExternalStringTag != 0);
- __ And(t0, a1, Operand(kShortExternalStringTag));
- __ Branch(&runtime, ne, t0, Operand(zero_reg));
- __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
- // t1 already points to the first character of underlying string.
- __ jmp(&allocate_result);
-
- __ bind(&sequential_string);
- // Locate first character of underlying subject string.
- STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
- __ Addu(t1, t1, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
- __ bind(&allocate_result);
- // Sequential acii string. Allocate the result.
- STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
- __ And(t0, a1, Operand(kStringEncodingMask));
- __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
-
- // Allocate and copy the resulting ASCII string.
- __ AllocateOneByteString(v0, a2, t0, t2, t3, &runtime);
-
- // Locate first character of substring to copy.
- __ Addu(t1, t1, a3);
-
- // Locate first character of result.
- __ Addu(a1, v0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
-
- // v0: result string
- // a1: first character of result string
- // a2: result string length
- // t1: first character of substring to copy
- STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
- StringHelper::GenerateCopyCharacters(
- masm, a1, t1, a2, a3, String::ONE_BYTE_ENCODING);
- __ jmp(&return_v0);
-
- // Allocate and copy the resulting two-byte string.
- __ bind(&two_byte_sequential);
- __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
-
- // Locate first character of substring to copy.
- STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ Lsa(t1, t1, a3, 1);
- // Locate first character of result.
- __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // v0: result string.
- // a1: first character of result.
- // a2: result length.
- // t1: first character of substring to copy.
- STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
- StringHelper::GenerateCopyCharacters(
- masm, a1, t1, a2, a3, String::TWO_BYTE_ENCODING);
-
- __ bind(&return_v0);
- Counters* counters = isolate()->counters();
- __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
- __ DropAndRet(3);
-
- // Just jump to runtime to create the sub string.
- __ bind(&runtime);
- __ TailCallRuntime(Runtime::kSubString);
-
- __ bind(&single_char);
- // v0: original string
- // a1: instance type
- // a2: length
- // a3: from index (untagged)
- __ SmiTag(a3, a3);
- StringCharAtGenerator generator(v0, a3, a2, v0, &runtime, &runtime, &runtime,
- RECEIVER_IS_STRING);
- generator.GenerateFast(masm);
- __ DropAndRet(3);
- generator.SkipSlow(masm, &runtime);
-}
-
-
-void ToStringStub::Generate(MacroAssembler* masm) {
- // The ToString stub takes on argument in a0.
- Label is_number;
- __ JumpIfSmi(a0, &is_number);
-
- Label not_string;
- __ GetObjectType(a0, a1, a1);
- // a0: receiver
- // a1: receiver instance type
- __ Branch(&not_string, ge, a1, Operand(FIRST_NONSTRING_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(&not_string);
-
- Label not_heap_number;
- __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
- __ bind(&is_number);
- NumberToStringStub stub(isolate());
- __ TailCallStub(&stub);
- __ bind(&not_heap_number);
-
- Label not_oddball;
- __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
- __ bind(&not_oddball);
-
- __ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToString);
-}
-
-
-void ToNameStub::Generate(MacroAssembler* masm) {
- // The ToName stub takes on argument in a0.
- Label is_number;
- __ JumpIfSmi(a0, &is_number);
-
- Label not_name;
- STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
- __ GetObjectType(a0, a1, a1);
- // a0: receiver
- // a1: receiver instance type
- __ Branch(&not_name, gt, a1, Operand(LAST_NAME_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a0);
- __ bind(&not_name);
-
- Label not_heap_number;
- __ Branch(&not_heap_number, ne, a1, Operand(HEAP_NUMBER_TYPE));
- __ bind(&is_number);
- NumberToStringStub stub(isolate());
- __ TailCallStub(&stub);
- __ bind(&not_heap_number);
-
- Label not_oddball;
- __ Branch(&not_oddball, ne, a1, Operand(ODDBALL_TYPE));
- __ Ret(USE_DELAY_SLOT);
- __ lw(v0, FieldMemOperand(a0, Oddball::kToStringOffset));
- __ bind(&not_oddball);
-
- __ push(a0); // Push argument.
- __ TailCallRuntime(Runtime::kToName);
-}
-
-
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@@ -3915,7 +3622,7 @@ static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
__ lw(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
// Load the map into the correct register.
- DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
+ DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
__ mov(feedback, too_far);
__ Addu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4624,7 +4331,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
Label too_big_for_new_space;
__ bind(&allocate);
__ Branch(&too_big_for_new_space, gt, t0,
- Operand(Page::kMaxRegularHeapObjectSize));
+ Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(t0);
@@ -4968,8 +4675,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
- __ Branch(&too_big_for_new_space, gt, t0,
- Operand(Page::kMaxRegularHeapObjectSize));
+ __ Branch(&too_big_for_new_space, gt, t0, Operand(kMaxRegularHeapObjectSize));
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(t0);