summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32/macro-assembler-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ia32/macro-assembler-ia32.cc')
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc274
1 files changed, 229 insertions, 45 deletions
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index 12daec8285..f9fd8d6a40 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -204,15 +204,15 @@ void MacroAssembler::RememberedSetHelper(
mov(Operand::StaticVariable(store_buffer), scratch);
// Call stub on end of buffer.
// Check for end of buffer.
- test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
+ test(scratch, Immediate(StoreBuffer::kStoreBufferMask));
if (and_then == kReturnAtEnd) {
Label buffer_overflowed;
- j(not_equal, &buffer_overflowed, Label::kNear);
+ j(equal, &buffer_overflowed, Label::kNear);
ret(0);
bind(&buffer_overflowed);
} else {
DCHECK(and_then == kFallThroughAtEnd);
- j(equal, &done, Label::kNear);
+ j(not_equal, &done, Label::kNear);
}
StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
CallStub(&store_buffer_overflow);
@@ -454,7 +454,7 @@ void MacroAssembler::RecordWriteField(
lea(dst, FieldOperand(object, offset));
if (emit_debug_code()) {
Label ok;
- test_b(dst, (1 << kPointerSizeLog2) - 1);
+ test_b(dst, Immediate((1 << kPointerSizeLog2) - 1));
j(zero, &ok, Label::kNear);
int3();
bind(&ok);
@@ -487,7 +487,7 @@ void MacroAssembler::RecordWriteForMap(
if (emit_debug_code()) {
Label ok;
lea(address, FieldOperand(object, HeapObject::kMapOffset));
- test_b(address, (1 << kPointerSizeLog2) - 1);
+ test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
j(zero, &ok, Label::kNear);
int3();
bind(&ok);
@@ -682,7 +682,6 @@ void MacroAssembler::DebugBreak() {
call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
}
-
void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
xorps(dst, dst);
cvtsi2sd(dst, src);
@@ -707,6 +706,71 @@ void MacroAssembler::Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
bind(&jmp_return);
}
+void MacroAssembler::ShlPair(Register high, Register low, uint8_t shift) {
+ if (shift >= 32) {
+ mov(high, low);
+ shl(high, shift - 32);
+ xor_(low, low);
+ } else {
+ shld(high, low, shift);
+ shl(low, shift);
+ }
+}
+
+void MacroAssembler::ShlPair_cl(Register high, Register low) {
+ shld_cl(high, low);
+ shl_cl(low);
+ Label done;
+ test(ecx, Immediate(0x20));
+ j(equal, &done, Label::kNear);
+ mov(high, low);
+ xor_(low, low);
+ bind(&done);
+}
+
+void MacroAssembler::ShrPair(Register high, Register low, uint8_t shift) {
+ if (shift >= 32) {
+ mov(low, high);
+ shr(low, shift - 32);
+ xor_(high, high);
+ } else {
+ shrd(high, low, shift);
+ shr(high, shift);
+ }
+}
+
+void MacroAssembler::ShrPair_cl(Register high, Register low) {
+ shrd_cl(low, high);
+ shr_cl(high);
+ Label done;
+ test(ecx, Immediate(0x20));
+ j(equal, &done, Label::kNear);
+ mov(low, high);
+ xor_(high, high);
+ bind(&done);
+}
+
+void MacroAssembler::SarPair(Register high, Register low, uint8_t shift) {
+ if (shift >= 32) {
+ mov(low, high);
+ sar(low, shift - 32);
+ sar(high, 31);
+ } else {
+ shrd(high, low, shift);
+ sar(high, shift);
+ }
+}
+
+void MacroAssembler::SarPair_cl(Register high, Register low) {
+ shrd_cl(low, high);
+ sar_cl(high);
+ Label done;
+ test(ecx, Immediate(0x20));
+ j(equal, &done, Label::kNear);
+ mov(low, high);
+ sar(high, 31);
+ bind(&done);
+}
bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
static const int kMaxImmediateBits = 17;
@@ -744,8 +808,7 @@ void MacroAssembler::CmpObjectType(Register heap_object,
void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
- cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
- static_cast<int8_t>(type));
+ cmpb(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
}
@@ -757,7 +820,7 @@ void MacroAssembler::CheckFastElements(Register map,
STATIC_ASSERT(FAST_ELEMENTS == 2);
STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastHoleyElementValue);
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
@@ -770,10 +833,10 @@ void MacroAssembler::CheckFastObjectElements(Register map,
STATIC_ASSERT(FAST_ELEMENTS == 2);
STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastHoleySmiElementValue);
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(below_equal, fail, distance);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastHoleyElementValue);
+ Immediate(Map::kMaximumBitField2FastHoleyElementValue));
j(above, fail, distance);
}
@@ -784,7 +847,7 @@ void MacroAssembler::CheckFastSmiElements(Register map,
STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
cmpb(FieldOperand(map, Map::kBitField2Offset),
- Map::kMaximumBitField2FastHoleySmiElementValue);
+ Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
j(above, fail, distance);
}
@@ -873,7 +936,7 @@ Condition MacroAssembler::IsObjectNameType(Register heap_object,
Register instance_type) {
mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
- cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
+ cmpb(instance_type, Immediate(LAST_NAME_TYPE));
return below_equal;
}
@@ -895,6 +958,15 @@ void MacroAssembler::AssertNumber(Register object) {
}
}
+void MacroAssembler::AssertNotNumber(Register object) {
+ if (emit_debug_code()) {
+ test(object, Immediate(kSmiTagMask));
+ Check(not_equal, kOperandIsANumber);
+ cmp(FieldOperand(object, HeapObject::kMapOffset),
+ isolate()->factory()->heap_number_map());
+ Check(not_equal, kOperandIsANumber);
+ }
+}
void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
@@ -988,15 +1060,12 @@ void MacroAssembler::AssertNotSmi(Register object) {
}
}
-
-void MacroAssembler::StubPrologue() {
+void MacroAssembler::StubPrologue(StackFrame::Type type) {
push(ebp); // Caller's frame pointer.
mov(ebp, esp);
- push(esi); // Callee's context.
- push(Immediate(Smi::FromInt(StackFrame::STUB)));
+ push(Immediate(Smi::FromInt(type)));
}
-
void MacroAssembler::Prologue(bool code_pre_aging) {
PredictableCodeSizeScope predictible_code_size_scope(this,
kNoCodeAgeSequenceLength);
@@ -1031,9 +1100,10 @@ void MacroAssembler::EnterFrame(StackFrame::Type type,
void MacroAssembler::EnterFrame(StackFrame::Type type) {
push(ebp);
mov(ebp, esp);
- push(esi);
push(Immediate(Smi::FromInt(type)));
- push(Immediate(CodeObject()));
+ if (type == StackFrame::INTERNAL) {
+ push(Immediate(CodeObject()));
+ }
if (emit_debug_code()) {
cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
Check(not_equal, kCodeObjectNotProperlyPatched);
@@ -1043,7 +1113,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
void MacroAssembler::LeaveFrame(StackFrame::Type type) {
if (emit_debug_code()) {
- cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
+ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
Immediate(Smi::FromInt(type)));
Check(equal, kStackFrameTypesMustMatch);
}
@@ -1053,15 +1123,17 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
void MacroAssembler::EnterExitFramePrologue() {
// Set up the frame structure on the stack.
- DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
- DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
- DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
+ DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
+ DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
+ DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
push(ebp);
mov(ebp, esp);
// Reserve room for entry stack pointer and push the code object.
- DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
+ push(Immediate(Smi::FromInt(StackFrame::EXIT)));
+ DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
push(Immediate(0)); // Saved entry sp, patched before call.
+ DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
// Save the frame pointer and the context in top.
@@ -1080,7 +1152,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
argc * kPointerSize;
sub(esp, Immediate(space));
- const int offset = -2 * kPointerSize;
+ const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
@@ -1123,7 +1195,7 @@ void MacroAssembler::EnterApiExitFrame(int argc) {
void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
// Optionally restore all XMM registers.
if (save_doubles) {
- const int offset = -2 * kPointerSize;
+ const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
@@ -1206,8 +1278,18 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
DCHECK(!holder_reg.is(scratch2));
DCHECK(!scratch1.is(scratch2));
- // Load current lexical context from the stack frame.
- mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
+ // Load current lexical context from the active StandardFrame, which
+ // may require crawling past STUB frames.
+ Label load_context;
+ Label has_context;
+ mov(scratch2, ebp);
+ bind(&load_context);
+ mov(scratch1,
+ MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
+ JumpIfNotSmi(scratch1, &has_context);
+ mov(scratch2, MemOperand(scratch2, CommonFrameConstants::kCallerFPOffset));
+ jmp(&load_context);
+ bind(&has_context);
// When generating debug code, make sure the lexical context is set.
if (emit_debug_code()) {
@@ -1920,7 +2002,7 @@ void MacroAssembler::BooleanBitTest(Register object,
int byte_index = bit_index / kBitsPerByte;
int byte_bit_index = bit_index & (kBitsPerByte - 1);
test_b(FieldOperand(object, field_offset + byte_index),
- static_cast<byte>(1 << byte_bit_index));
+ Immediate(1 << byte_bit_index));
}
@@ -2086,6 +2168,87 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
}
+void MacroAssembler::PrepareForTailCall(
+ const ParameterCount& callee_args_count, Register caller_args_count_reg,
+ Register scratch0, Register scratch1, ReturnAddressState ra_state,
+ int number_of_temp_values_after_return_address) {
+#if DEBUG
+ if (callee_args_count.is_reg()) {
+ DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
+ scratch1));
+ } else {
+ DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
+ }
+ DCHECK(ra_state != ReturnAddressState::kNotOnStack ||
+ number_of_temp_values_after_return_address == 0);
+#endif
+
+ // Calculate the destination address where we will put the return address
+ // after we drop current frame.
+ Register new_sp_reg = scratch0;
+ if (callee_args_count.is_reg()) {
+ sub(caller_args_count_reg, callee_args_count.reg());
+ lea(new_sp_reg,
+ Operand(ebp, caller_args_count_reg, times_pointer_size,
+ StandardFrameConstants::kCallerPCOffset -
+ number_of_temp_values_after_return_address * kPointerSize));
+ } else {
+ lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
+ StandardFrameConstants::kCallerPCOffset -
+ (callee_args_count.immediate() +
+ number_of_temp_values_after_return_address) *
+ kPointerSize));
+ }
+
+ if (FLAG_debug_code) {
+ cmp(esp, new_sp_reg);
+ Check(below, kStackAccessBelowStackPointer);
+ }
+
+ // Copy return address from caller's frame to current frame's return address
+ // to avoid its trashing and let the following loop copy it to the right
+ // place.
+ Register tmp_reg = scratch1;
+ if (ra_state == ReturnAddressState::kOnStack) {
+ mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
+ mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
+ tmp_reg);
+ } else {
+ DCHECK(ReturnAddressState::kNotOnStack == ra_state);
+ DCHECK_EQ(0, number_of_temp_values_after_return_address);
+ Push(Operand(ebp, StandardFrameConstants::kCallerPCOffset));
+ }
+
+ // Restore caller's frame pointer now as it could be overwritten by
+ // the copying loop.
+ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
+
+ // +2 here is to copy both receiver and return address.
+ Register count_reg = caller_args_count_reg;
+ if (callee_args_count.is_reg()) {
+ lea(count_reg, Operand(callee_args_count.reg(),
+ 2 + number_of_temp_values_after_return_address));
+ } else {
+ mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
+ number_of_temp_values_after_return_address));
+ // TODO(ishell): Unroll copying loop for small immediate values.
+ }
+
+ // Now copy callee arguments to the caller frame going backwards to avoid
+ // callee arguments corruption (source and destination areas could overlap).
+ Label loop, entry;
+ jmp(&entry, Label::kNear);
+ bind(&loop);
+ dec(count_reg);
+ mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
+ mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
+ bind(&entry);
+ cmp(count_reg, Immediate(0));
+ j(not_equal, &loop, Label::kNear);
+
+ // Leave current frame.
+ mov(esp, new_sp_reg);
+}
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
@@ -2160,7 +2323,7 @@ void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
Label skip_flooding;
ExternalReference step_in_enabled =
ExternalReference::debug_step_in_enabled_address(isolate());
- cmpb(Operand::StaticVariable(step_in_enabled), 0);
+ cmpb(Operand::StaticVariable(step_in_enabled), Immediate(0));
j(equal, &skip_flooding);
{
FrameScope frame(this,
@@ -2850,7 +3013,7 @@ void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
Label succeed;
test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
j(zero, &succeed);
- cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
+ cmpb(operand, Immediate(SYMBOL_TYPE));
j(not_equal, not_unique_name, distance);
bind(&succeed);
@@ -2998,8 +3161,7 @@ void MacroAssembler::CheckPageFlag(
and_(scratch, object);
}
if (mask < (1 << kBitsPerByte)) {
- test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
- static_cast<uint8_t>(mask));
+ test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
} else {
test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
}
@@ -3022,7 +3184,7 @@ void MacroAssembler::CheckPageFlagForMap(
DCHECK(!isolate()->heap()->mark_compact_collector()->
IsOnEvacuationCandidate(*map));
if (mask < (1 << kBitsPerByte)) {
- test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
+ test_b(Operand::StaticVariable(reference), Immediate(mask));
} else {
test(Operand::StaticVariable(reference), Immediate(mask));
}
@@ -3062,7 +3224,8 @@ void MacroAssembler::HasColor(Register object,
jmp(&other_color, Label::kNear);
bind(&word_boundary);
- test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
+ test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize),
+ Immediate(1));
j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
bind(&other_color);
@@ -3164,19 +3327,40 @@ void MacroAssembler::TestJSArrayForAllocationMemento(
Register receiver_reg,
Register scratch_reg,
Label* no_memento_found) {
- ExternalReference new_space_start =
- ExternalReference::new_space_start(isolate());
+ Label map_check;
+ Label top_check;
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address(isolate());
-
- lea(scratch_reg, Operand(receiver_reg,
- JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
- cmp(scratch_reg, Immediate(new_space_start));
- j(less, no_memento_found);
+ const int kMementoMapOffset = JSArray::kSize - kHeapObjectTag;
+ const int kMementoEndOffset = kMementoMapOffset + AllocationMemento::kSize;
+
+ // Bail out if the object is not in new space.
+ JumpIfNotInNewSpace(receiver_reg, scratch_reg, no_memento_found);
+ // If the object is in new space, we need to check whether it is on the same
+ // page as the current top.
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
+ xor_(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
+ test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
+ j(zero, &top_check);
+ // The object is on a different page than allocation top. Bail out if the
+ // object sits on the page boundary as no memento can follow and we cannot
+ // touch the memory following it.
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
+ xor_(scratch_reg, receiver_reg);
+ test(scratch_reg, Immediate(~Page::kPageAlignmentMask));
+ j(not_zero, no_memento_found);
+ // Continue with the actual map check.
+ jmp(&map_check);
+ // If top is on the same page as the current object, we need to check whether
+ // we are below top.
+ bind(&top_check);
+ lea(scratch_reg, Operand(receiver_reg, kMementoEndOffset));
cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
j(greater, no_memento_found);
- cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
- Immediate(isolate()->factory()->allocation_memento_map()));
+ // Memento map check.
+ bind(&map_check);
+ mov(scratch_reg, Operand(receiver_reg, kMementoMapOffset));
+ cmp(scratch_reg, Immediate(isolate()->factory()->allocation_memento_map()));
}