summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips/lithium-codegen-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips/lithium-codegen-mips.cc')
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc147
1 files changed, 92 insertions, 55 deletions
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index 5c26001ef0..768531fce3 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -25,8 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "src/v8.h"
-
#include "src/base/bits.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
@@ -130,7 +128,7 @@ bool LCodeGen::GeneratePrologue() {
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
- info_->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
+ info_->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
__ stop("stop_at");
}
#endif
@@ -424,6 +422,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
Handle<Object> literal = constant->handle(isolate());
Representation r = chunk_->LookupLiteralRepresentation(const_op);
if (r.IsInteger32()) {
+ AllowDeferredHandleDereference get_number;
DCHECK(literal->IsNumber());
__ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
} else if (r.IsSmi()) {
@@ -646,15 +645,23 @@ void LCodeGen::AddToTranslation(LEnvironment* environment,
}
if (op->IsStackSlot()) {
+ int index = op->index();
+ if (index >= 0) {
+ index += StandardFrameConstants::kFixedFrameSize / kPointerSize;
+ }
if (is_tagged) {
- translation->StoreStackSlot(op->index());
+ translation->StoreStackSlot(index);
} else if (is_uint32) {
- translation->StoreUint32StackSlot(op->index());
+ translation->StoreUint32StackSlot(index);
} else {
- translation->StoreInt32StackSlot(op->index());
+ translation->StoreInt32StackSlot(index);
}
} else if (op->IsDoubleStackSlot()) {
- translation->StoreDoubleStackSlot(op->index());
+ int index = op->index();
+ if (index >= 0) {
+ index += StandardFrameConstants::kFixedFrameSize / kPointerSize;
+ }
+ translation->StoreDoubleStackSlot(index);
} else if (op->IsRegister()) {
Register reg = ToRegister(op);
if (is_tagged) {
@@ -2180,6 +2187,14 @@ void LCodeGen::DoBranch(LBranch* instr) {
__ Branch(instr->TrueLabel(chunk_), eq, scratch, Operand(SYMBOL_TYPE));
}
+ if (expected.Contains(ToBooleanStub::SIMD_VALUE)) {
+ // SIMD value -> true.
+ const Register scratch = scratch1();
+ __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
+ __ Branch(instr->TrueLabel(chunk_), eq, scratch,
+ Operand(SIMD128_VALUE_TYPE));
+ }
+
if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
DoubleRegister dbl_scratch = double_scratch0();
@@ -2879,13 +2894,31 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
__ li(LoadDescriptor::NameRegister(), Operand(instr->name()));
EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
- ContextualMode mode = instr->for_typeof() ? NOT_CONTEXTUAL : CONTEXTUAL;
- Handle<Code> ic = CodeFactory::LoadICInOptimizedCode(isolate(), mode, SLOPPY,
- PREMONOMORPHIC).code();
+ Handle<Code> ic =
+ CodeFactory::LoadICInOptimizedCode(isolate(), instr->typeof_mode(),
+ SLOPPY, PREMONOMORPHIC).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
+void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
+ DCHECK(ToRegister(instr->context()).is(cp));
+ DCHECK(ToRegister(instr->result()).is(v0));
+
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
+ __ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ Handle<Code> stub =
+ CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ CallRuntime(Runtime::kLoadGlobalViaContext, 1);
+ }
+}
+
+
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -2982,7 +3015,7 @@ void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
Handle<Code> ic =
CodeFactory::LoadICInOptimizedCode(
- isolate(), NOT_CONTEXTUAL, instr->hydrogen()->language_mode(),
+ isolate(), NOT_INSIDE_TYPEOF, instr->hydrogen()->language_mode(),
instr->hydrogen()->initialization_state()).code();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
}
@@ -3084,10 +3117,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
? (element_size_shift - kSmiTagSize) : element_size_shift;
int base_offset = instr->base_offset();
- if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
- elements_kind == FLOAT32_ELEMENTS ||
- elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
- elements_kind == FLOAT64_ELEMENTS) {
+ if (elements_kind == FLOAT32_ELEMENTS || elements_kind == FLOAT64_ELEMENTS) {
FPURegister result = ToDoubleRegister(instr->result());
if (key_is_constant) {
__ Addu(scratch0(), external_pointer, constant_key << element_size_shift);
@@ -3095,8 +3125,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
__ sll(scratch0(), key, shift_size);
__ Addu(scratch0(), scratch0(), external_pointer);
}
- if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
- elements_kind == FLOAT32_ELEMENTS) {
+ if (elements_kind == FLOAT32_ELEMENTS) {
__ lwc1(result, MemOperand(scratch0(), base_offset));
__ cvt_d_s(result, result);
} else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
@@ -3108,29 +3137,22 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
key, external_pointer, key_is_constant, constant_key,
element_size_shift, shift_size, base_offset);
switch (elements_kind) {
- case EXTERNAL_INT8_ELEMENTS:
case INT8_ELEMENTS:
__ lb(result, mem_operand);
break;
- case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
- case EXTERNAL_UINT8_ELEMENTS:
case UINT8_ELEMENTS:
case UINT8_CLAMPED_ELEMENTS:
__ lbu(result, mem_operand);
break;
- case EXTERNAL_INT16_ELEMENTS:
case INT16_ELEMENTS:
__ lh(result, mem_operand);
break;
- case EXTERNAL_UINT16_ELEMENTS:
case UINT16_ELEMENTS:
__ lhu(result, mem_operand);
break;
- case EXTERNAL_INT32_ELEMENTS:
case INT32_ELEMENTS:
__ lw(result, mem_operand);
break;
- case EXTERNAL_UINT32_ELEMENTS:
case UINT32_ELEMENTS:
__ lw(result, mem_operand);
if (!instr->hydrogen()->CheckFlag(HInstruction::kUint32)) {
@@ -3140,8 +3162,6 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
break;
case FLOAT32_ELEMENTS:
case FLOAT64_ELEMENTS:
- case EXTERNAL_FLOAT32_ELEMENTS:
- case EXTERNAL_FLOAT64_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
case FAST_SMI_ELEMENTS:
@@ -3253,7 +3273,7 @@ void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
- if (instr->is_typed_elements()) {
+ if (instr->is_fixed_typed_array()) {
DoLoadKeyedExternalArray(instr);
} else if (instr->hydrogen()->representation().IsDouble()) {
DoLoadKeyedFixedDoubleArray(instr);
@@ -3507,9 +3527,8 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
__ li(scratch0(), instr->hydrogen()->pairs());
__ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
- // The context is the first argument.
- __ Push(cp, scratch0(), scratch1());
- CallRuntime(Runtime::kDeclareGlobals, 3, instr);
+ __ Push(scratch0(), scratch1());
+ CallRuntime(Runtime::kDeclareGlobals, 2, instr);
}
@@ -4179,6 +4198,30 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
}
+void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
+ DCHECK(ToRegister(instr->context()).is(cp));
+ DCHECK(ToRegister(instr->value())
+ .is(StoreGlobalViaContextDescriptor::ValueRegister()));
+
+ int const slot = instr->slot_index();
+ int const depth = instr->depth();
+ if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
+ __ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
+ Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
+ isolate(), depth, instr->language_mode())
+ .code();
+ CallCode(stub, RelocInfo::CODE_TARGET, instr);
+ } else {
+ __ Push(Smi::FromInt(slot));
+ __ Push(StoreGlobalViaContextDescriptor::ValueRegister());
+ __ CallRuntime(is_strict(language_mode())
+ ? Runtime::kStoreGlobalViaContext_Strict
+ : Runtime::kStoreGlobalViaContext_Sloppy,
+ 2);
+ }
+}
+
+
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
Condition cc = instr->hydrogen()->allow_equality() ? hi : hs;
Operand operand(0);
@@ -4221,10 +4264,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
? (element_size_shift - kSmiTagSize) : element_size_shift;
int base_offset = instr->base_offset();
- if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
- elements_kind == FLOAT32_ELEMENTS ||
- elements_kind == EXTERNAL_FLOAT64_ELEMENTS ||
- elements_kind == FLOAT64_ELEMENTS) {
+ if (elements_kind == FLOAT32_ELEMENTS || elements_kind == FLOAT64_ELEMENTS) {
Register address = scratch0();
FPURegister value(ToDoubleRegister(instr->value()));
if (key_is_constant) {
@@ -4239,8 +4279,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
__ Addu(address, external_pointer, address);
}
- if (elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
- elements_kind == FLOAT32_ELEMENTS) {
+ if (elements_kind == FLOAT32_ELEMENTS) {
__ cvt_s_d(double_scratch0(), value);
__ swc1(double_scratch0(), MemOperand(address, base_offset));
} else { // Storing doubles, not floats.
@@ -4253,30 +4292,21 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
element_size_shift, shift_size,
base_offset);
switch (elements_kind) {
- case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
- case EXTERNAL_INT8_ELEMENTS:
- case EXTERNAL_UINT8_ELEMENTS:
case UINT8_ELEMENTS:
case UINT8_CLAMPED_ELEMENTS:
case INT8_ELEMENTS:
__ sb(value, mem_operand);
break;
- case EXTERNAL_INT16_ELEMENTS:
- case EXTERNAL_UINT16_ELEMENTS:
case INT16_ELEMENTS:
case UINT16_ELEMENTS:
__ sh(value, mem_operand);
break;
- case EXTERNAL_INT32_ELEMENTS:
- case EXTERNAL_UINT32_ELEMENTS:
case INT32_ELEMENTS:
case UINT32_ELEMENTS:
__ sw(value, mem_operand);
break;
case FLOAT32_ELEMENTS:
case FLOAT64_ELEMENTS:
- case EXTERNAL_FLOAT32_ELEMENTS:
- case EXTERNAL_FLOAT64_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
case FAST_ELEMENTS:
case FAST_SMI_ELEMENTS:
@@ -4390,7 +4420,7 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
// By cases: external, fast double
- if (instr->is_typed_elements()) {
+ if (instr->is_fixed_typed_array()) {
DoStoreKeyedExternalArray(instr);
} else if (instr->hydrogen()->value()->representation().IsDouble()) {
DoStoreKeyedFixedDoubleArray(instr);
@@ -5651,15 +5681,9 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
} else if (String::Equals(type_name, factory->string_string())) {
__ JumpIfSmi(input, false_label);
__ GetObjectType(input, input, scratch);
- __ Branch(USE_DELAY_SLOT, false_label,
- ge, scratch, Operand(FIRST_NONSTRING_TYPE));
- // input is an object so we can load the BitFieldOffset even if we take the
- // other branch.
- __ lbu(at, FieldMemOperand(input, Map::kBitFieldOffset));
- __ And(at, at, 1 << Map::kIsUndetectable);
- *cmp1 = at;
- *cmp2 = Operand(zero_reg);
- final_branch_condition = eq;
+ *cmp1 = scratch;
+ *cmp2 = Operand(FIRST_NONSTRING_TYPE);
+ final_branch_condition = lt;
} else if (String::Equals(type_name, factory->symbol_string())) {
__ JumpIfSmi(input, false_label);
@@ -5717,6 +5741,19 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
*cmp2 = Operand(zero_reg);
final_branch_condition = eq;
+// clang-format off
+#define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
+ } else if (String::Equals(type_name, factory->type##_string())) { \
+ __ JumpIfSmi(input, false_label); \
+ __ lw(input, FieldMemOperand(input, HeapObject::kMapOffset)); \
+ __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
+ *cmp1 = input; \
+ *cmp2 = Operand(at); \
+ final_branch_condition = eq;
+ SIMD128_TYPES(SIMD128_TYPE)
+#undef SIMD128_TYPE
+ // clang-format on
+
} else {
*cmp1 = at;
*cmp2 = Operand(zero_reg); // Set to valid regs, to avoid caller assertion.