summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32/lithium-codegen-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ia32/lithium-codegen-ia32.cc')
-rw-r--r--deps/v8/src/ia32/lithium-codegen-ia32.cc295
1 files changed, 83 insertions, 212 deletions
diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc
index 71ad8387a0..850c182144 100644
--- a/deps/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc
@@ -8,13 +8,13 @@
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/codegen.h"
-#include "src/cpu-profiler.h"
#include "src/deoptimizer.h"
#include "src/hydrogen-osr.h"
#include "src/ia32/frames-ia32.h"
#include "src/ia32/lithium-codegen-ia32.h"
#include "src/ic/ic.h"
#include "src/ic/stub-cache.h"
+#include "src/profiler/cpu-profiler.h"
namespace v8 {
namespace internal {
@@ -76,7 +76,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(GetStackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
- if (!info()->IsStub()) {
+ if (info()->ShouldEnsureSpaceForLazyDeopt()) {
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
}
@@ -140,8 +140,7 @@ bool LCodeGen::GeneratePrologue() {
// Sloppy mode functions and builtins need to replace the receiver with the
// global proxy when called as functions (without an explicit receiver
// object).
- if (is_sloppy(info()->language_mode()) && info()->MayUseThis() &&
- !info()->is_native() && info()->scope()->has_this_declaration()) {
+ if (info()->MustReplaceUndefinedReceiverWithGlobalProxy()) {
Label ok;
// +1 for return address.
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
@@ -249,16 +248,27 @@ bool LCodeGen::GeneratePrologue() {
if (info()->saves_caller_doubles()) SaveCallerDoubles();
}
+ return !is_aborted();
+}
+
+
+void LCodeGen::DoPrologue(LPrologue* instr) {
+ Comment(";;; Prologue begin");
// Possibly allocate a local context.
- int heap_slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0) {
+ if (info_->num_heap_slots() > 0) {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is still in edi.
- DCHECK(!info()->scope()->is_script_scope());
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(isolate(), heap_slots);
+ int slots = info_->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
+ Safepoint::DeoptMode deopt_mode = Safepoint::kNoLazyDeopt;
+ if (info()->scope()->is_script_scope()) {
+ __ push(edi);
+ __ Push(info()->scope()->GetScopeInfo(info()->isolate()));
+ __ CallRuntime(Runtime::kNewScriptContext, 2);
+ deopt_mode = Safepoint::kLazyDeopt;
+ } else if (slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub(isolate(), slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.
need_write_barrier = false;
@@ -266,7 +276,8 @@ bool LCodeGen::GeneratePrologue() {
__ push(edi);
__ CallRuntime(Runtime::kNewFunctionContext, 1);
}
- RecordSafepoint(Safepoint::kNoLazyDeopt);
+ RecordSafepoint(deopt_mode);
+
// Context is returned in eax. It replaces the context passed to us.
// It's saved in the stack and kept live in esi.
__ mov(esi, eax);
@@ -303,13 +314,7 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; End allocate local context");
}
- // Trace the call.
- if (FLAG_trace && info()->IsOptimizing()) {
- // We have not executed any compiled code yet, so esi still holds the
- // incoming context.
- __ CallRuntime(Runtime::kTraceEnter, 0);
- }
- return !is_aborted();
+ Comment(";;; Prologue end");
}
@@ -496,7 +501,7 @@ bool LCodeGen::GenerateDeferredCode() {
bool LCodeGen::GenerateSafepointTable() {
DCHECK(is_done());
- if (!info()->IsStub()) {
+ if (info()->ShouldEnsureSpaceForLazyDeopt()) {
// For lazy deoptimization we need space to patch a call after every call.
// Ensure there is always space for such patching, even if the code ends
// in a call.
@@ -825,7 +830,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
DCHECK(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
- DCHECK(info()->IsOptimizing() || info()->IsStub());
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
@@ -1067,11 +1071,6 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
break;
}
- case CodeStub::StringCompare: {
- StringCompareStub stub(isolate());
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
- break;
- }
default:
UNREACHABLE();
}
@@ -2055,6 +2054,17 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
}
+template <class InstrType>
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
+ int true_block = instr->TrueDestination(chunk_);
+ if (cc == no_condition) {
+ __ jmp(chunk_->GetAssemblyLabel(true_block));
+ } else {
+ __ j(cc, chunk_->GetAssemblyLabel(true_block));
+ }
+}
+
+
template<class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
int false_block = instr->FalseDestination(chunk_);
@@ -2353,40 +2363,6 @@ void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
}
-Condition LCodeGen::EmitIsObject(Register input,
- Register temp1,
- Label* is_not_object,
- Label* is_object) {
- __ JumpIfSmi(input, is_not_object);
-
- __ cmp(input, isolate()->factory()->null_value());
- __ j(equal, is_object);
-
- __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
- // Undetectable objects behave like undefined.
- __ test_b(FieldOperand(temp1, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
- __ j(not_zero, is_not_object);
-
- __ movzx_b(temp1, FieldOperand(temp1, Map::kInstanceTypeOffset));
- __ cmp(temp1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
- __ j(below, is_not_object);
- __ cmp(temp1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
- return below_equal;
-}
-
-
-void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
- Register reg = ToRegister(instr->value());
- Register temp = ToRegister(instr->temp());
-
- Condition true_cond = EmitIsObject(
- reg, temp, instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
-
- EmitBranch(instr, true_cond);
-}
-
-
Condition LCodeGen::EmitIsString(Register input,
Register temp1,
Label* is_not_string,
@@ -2460,16 +2436,15 @@ static Condition ComputeCompareCondition(Token::Value op) {
void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
- Token::Value op = instr->op();
-
- Handle<Code> ic =
- CodeFactory::CompareIC(isolate(), op, Strength::WEAK).code();
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ DCHECK(ToRegister(instr->context()).is(esi));
+ DCHECK(ToRegister(instr->left()).is(edx));
+ DCHECK(ToRegister(instr->right()).is(eax));
- Condition condition = ComputeCompareCondition(op);
- __ test(eax, Operand(eax));
+ Handle<Code> code = CodeFactory::StringCompare(isolate()).code();
+ CallCode(code, RelocInfo::CODE_TARGET, instr);
+ __ test(eax, eax);
- EmitBranch(instr, condition);
+ EmitBranch(instr, ComputeCompareCondition(instr->op()));
}
@@ -2614,120 +2589,41 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
- // Object and function are in fixed registers defined by the stub.
DCHECK(ToRegister(instr->context()).is(esi));
- InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
+ DCHECK(ToRegister(instr->result()).is(eax));
+ InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-
- Label true_value, done;
- __ test(eax, Operand(eax));
- __ j(zero, &true_value, Label::kNear);
- __ mov(ToRegister(instr->result()), factory()->false_value());
- __ jmp(&done, Label::kNear);
- __ bind(&true_value);
- __ mov(ToRegister(instr->result()), factory()->true_value());
- __ bind(&done);
-}
-
-
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
- public:
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
- LInstanceOfKnownGlobal* instr)
- : LDeferredCode(codegen), instr_(instr) { }
- void Generate() override {
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
- }
- LInstruction* instr() override { return instr_; }
- Label* map_check() { return &map_check_; }
- private:
- LInstanceOfKnownGlobal* instr_;
- Label map_check_;
- };
-
- DeferredInstanceOfKnownGlobal* deferred;
- deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
-
- Label done, false_result;
- Register object = ToRegister(instr->value());
- Register temp = ToRegister(instr->temp());
-
- // A Smi is not an instance of anything.
- __ JumpIfSmi(object, &false_result, Label::kNear);
-
- // This is the inlined call site instanceof cache. The two occurences of the
- // hole value will be patched to the last map/result pair generated by the
- // instanceof stub.
- Label cache_miss;
- Register map = ToRegister(instr->temp());
- __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
- __ bind(deferred->map_check()); // Label for calculating code patching.
- Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
- __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map.
- __ j(not_equal, &cache_miss, Label::kNear);
- __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
- __ jmp(&done, Label::kNear);
-
- // The inlined call site cache did not match. Check for null and string
- // before calling the deferred code.
- __ bind(&cache_miss);
- // Null is not an instance of anything.
- __ cmp(object, factory()->null_value());
- __ j(equal, &false_result, Label::kNear);
-
- // String values are not instances of anything.
- Condition is_string = masm_->IsObjectStringType(object, temp, temp);
- __ j(is_string, &false_result, Label::kNear);
-
- // Go to the deferred code.
- __ jmp(deferred->entry());
-
- __ bind(&false_result);
- __ mov(ToRegister(instr->result()), factory()->false_value());
-
- // Here result has either true or false. Deferred code also produces true or
- // false object.
- __ bind(deferred->exit());
- __ bind(&done);
}
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
- PushSafepointRegistersScope scope(this);
+void LCodeGen::DoHasInPrototypeChainAndBranch(
+ LHasInPrototypeChainAndBranch* instr) {
+ Register const object = ToRegister(instr->object());
+ Register const object_map = ToRegister(instr->scratch());
+ Register const object_prototype = object_map;
+ Register const prototype = ToRegister(instr->prototype());
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kArgsInRegisters);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kCallSiteInlineCheck);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(isolate(), flags);
-
- // Get the temp register reserved by the instruction. This needs to be a
- // register which is pushed last by PushSafepointRegisters as top of the
- // stack is used to pass the offset to the location of the map check to
- // the stub.
- Register temp = ToRegister(instr->temp());
- DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
- __ LoadHeapObject(InstanceofStub::right(), instr->function());
- static const int kAdditionalDelta = 13;
- int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
- __ mov(temp, Immediate(delta));
- __ StoreToSafepointRegisterSlot(temp, temp);
- CallCodeGeneric(stub.GetCode(),
- RelocInfo::CODE_TARGET,
- instr,
- RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- // Get the deoptimization index of the LLazyBailout-environment that
- // corresponds to this instruction.
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
+ // The {object} must be a spec object. It's sufficient to know that {object}
+ // is not a smi, since all other non-spec objects have {null} prototypes and
+ // will be ruled out below.
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
+ __ test(object, Immediate(kSmiTagMask));
+ EmitFalseBranch(instr, zero);
+ }
- // Put the result value into the eax slot and restore all registers.
- __ StoreToSafepointRegisterSlot(eax, eax);
+ // Loop through the {object}s prototype chain looking for the {prototype}.
+ __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
+ Label loop;
+ __ bind(&loop);
+ __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object_prototype, prototype);
+ EmitTrueBranch(instr, equal);
+ __ cmp(object_prototype, factory()->null_value());
+ EmitFalseBranch(instr, equal);
+ __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
+ __ jmp(&loop);
}
@@ -3438,11 +3334,8 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
// Change context.
__ mov(esi, FieldOperand(function_reg, JSFunction::kContextOffset));
- // Set eax to arguments count if adaption is not needed. Assumes that eax
- // is available to write to at this point.
- if (dont_adapt_arguments) {
- __ mov(eax, arity);
- }
+ // Always initialize eax to the number of actual arguments.
+ __ mov(eax, arity);
// Invoke function directly.
if (function.is_identical_to(info()->closure())) {
@@ -3504,9 +3397,7 @@ void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
DCHECK(ToRegister(instr->function()).is(edi));
DCHECK(ToRegister(instr->result()).is(eax));
- if (instr->hydrogen()->pass_argument_count()) {
- __ mov(eax, instr->arity());
- }
+ __ mov(eax, instr->arity());
// Change context.
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
@@ -5390,7 +5281,7 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
// eax = regexp literal clone.
// esi = context.
int literal_offset =
- FixedArray::OffsetOfElementAt(instr->hydrogen()->literal_index());
+ LiteralsArray::OffsetOfLiteralAt(instr->hydrogen()->literal_index());
__ LoadHeapObject(ecx, instr->hydrogen()->literals());
__ mov(ebx, FieldOperand(ecx, literal_offset));
__ cmp(ebx, factory()->undefined_value());
@@ -5433,26 +5324,6 @@ void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
}
-void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
- DCHECK(ToRegister(instr->context()).is(esi));
- // Use the fast case closure allocation code that allocates in new
- // space for nested functions that don't need literals cloning.
- bool pretenure = instr->hydrogen()->pretenure();
- if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(isolate(), instr->hydrogen()->language_mode(),
- instr->hydrogen()->kind());
- __ mov(ebx, Immediate(instr->hydrogen()->shared_info()));
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
- } else {
- __ push(esi);
- __ push(Immediate(instr->hydrogen()->shared_info()));
- __ push(Immediate(pretenure ? factory()->true_value()
- : factory()->false_value()));
- CallRuntime(Runtime::kNewClosure, 3, instr);
- }
-}
-
-
void LCodeGen::DoTypeof(LTypeof* instr) {
DCHECK(ToRegister(instr->context()).is(esi));
DCHECK(ToRegister(instr->value()).is(ebx));
@@ -5523,24 +5394,24 @@ Condition LCodeGen::EmitTypeofIs(LTypeofIsAndBranch* instr, Register input) {
final_branch_condition = not_zero;
} else if (String::Equals(type_name, factory()->function_string())) {
- STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
__ JumpIfSmi(input, false_label, false_distance);
- __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
- __ j(equal, true_label, true_distance);
- __ CmpInstanceType(input, JS_FUNCTION_PROXY_TYPE);
+ // Check for callable and not undetectable objects => true.
+ __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
+ __ movzx_b(input, FieldOperand(input, Map::kBitFieldOffset));
+ __ and_(input, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
+ __ cmp(input, 1 << Map::kIsCallable);
final_branch_condition = equal;
} else if (String::Equals(type_name, factory()->object_string())) {
__ JumpIfSmi(input, false_label, false_distance);
__ cmp(input, factory()->null_value());
__ j(equal, true_label, true_distance);
- __ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
+ STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
+ __ CmpObjectType(input, FIRST_SPEC_OBJECT_TYPE, input);
__ j(below, false_label, false_distance);
- __ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
- __ j(above, false_label, false_distance);
- // Check for undetectable objects => false.
+ // Check for callable or undetectable objects => false.
__ test_b(FieldOperand(input, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
+ (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
final_branch_condition = zero;
// clang-format off
@@ -5588,7 +5459,7 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
- if (!info()->IsStub()) {
+ if (info()->ShouldEnsureSpaceForLazyDeopt()) {
// Ensure that we have enough space after the previous lazy-bailout
// instruction for patching the code here.
int current_pc = masm()->pc_offset();