diff options
Diffstat (limited to 'deps/v8/src/full-codegen/mips/full-codegen-mips.cc')
-rw-r--r-- | deps/v8/src/full-codegen/mips/full-codegen-mips.cc | 378 |
1 files changed, 112 insertions, 266 deletions
diff --git a/deps/v8/src/full-codegen/mips/full-codegen-mips.cc b/deps/v8/src/full-codegen/mips/full-codegen-mips.cc index 3b34cb3a98..f329a23d00 100644 --- a/deps/v8/src/full-codegen/mips/full-codegen-mips.cc +++ b/deps/v8/src/full-codegen/mips/full-codegen-mips.cc @@ -305,38 +305,35 @@ void FullCodeGenerator::Generate() { // Visit the declarations and body unless there is an illegal // redeclaration. - if (scope()->HasIllegalRedeclaration()) { + PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); + { Comment cmnt(masm_, "[ Declarations"); - VisitForEffect(scope()->GetIllegalRedeclaration()); - - } else { - PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); - { Comment cmnt(masm_, "[ Declarations"); - VisitDeclarations(scope()->declarations()); - } + VisitDeclarations(scope()->declarations()); + } - // Assert that the declarations do not use ICs. Otherwise the debugger - // won't be able to redirect a PC at an IC to the correct IC in newly - // recompiled code. - DCHECK_EQ(0, ic_total_count_); - - { Comment cmnt(masm_, "[ Stack check"); - PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); - Label ok; - __ LoadRoot(at, Heap::kStackLimitRootIndex); - __ Branch(&ok, hs, sp, Operand(at)); - Handle<Code> stack_check = isolate()->builtins()->StackCheck(); - PredictableCodeSizeScope predictable(masm_, - masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); - __ Call(stack_check, RelocInfo::CODE_TARGET); - __ bind(&ok); - } + // Assert that the declarations do not use ICs. Otherwise the debugger + // won't be able to redirect a PC at an IC to the correct IC in newly + // recompiled code. + DCHECK_EQ(0, ic_total_count_); + + { + Comment cmnt(masm_, "[ Stack check"); + PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); + Label ok; + __ LoadRoot(at, Heap::kStackLimitRootIndex); + __ Branch(&ok, hs, sp, Operand(at)); + Handle<Code> stack_check = isolate()->builtins()->StackCheck(); + PredictableCodeSizeScope predictable( + masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET)); + __ Call(stack_check, RelocInfo::CODE_TARGET); + __ bind(&ok); + } - { Comment cmnt(masm_, "[ Body"); - DCHECK(loop_depth() == 0); - VisitStatements(literal()->body()); - DCHECK(loop_depth() == 0); - } + { + Comment cmnt(masm_, "[ Body"); + DCHECK(loop_depth() == 0); + VisitStatements(literal()->body()); + DCHECK(loop_depth() == 0); } // Always emit a 'return undefined' in case control fell off the end of @@ -527,7 +524,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { true, true_label_, false_label_); - DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject()); + DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable()); if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { if (false_label_ != fall_through_) __ Branch(false_label_); } else if (lit->IsTrue() || lit->IsJSObject()) { @@ -637,7 +634,7 @@ void FullCodeGenerator::DoTest(Expression* condition, Label* if_false, Label* fall_through) { __ mov(a0, result_register()); - Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); + Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); CallIC(ic, condition->test_id()); __ LoadRoot(at, Heap::kTrueValueRootIndex); Split(eq, result_register(), Operand(at), if_true, if_false, fall_through); @@ -990,15 +987,15 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); - Label loop, exit; - ForIn loop_statement(this, stmt); - increment_loop_depth(); - // Get the object to enumerate over. SetExpressionAsStatementPosition(stmt->enumerable()); VisitForAccumulatorValue(stmt->enumerable()); __ mov(a0, result_register()); - OperandStackDepthIncrement(ForIn::kElementCount); + OperandStackDepthIncrement(5); + + Label loop, exit; + Iteration loop_statement(this, stmt); + increment_loop_depth(); // If the object is null or undefined, skip over the loop, otherwise convert // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. @@ -1072,11 +1069,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { // We got a fixed array in register v0. Iterate through that. __ bind(&fixed_array); - int const vector_index = SmiFromSlot(slot)->value(); - __ EmitLoadTypeFeedbackVector(a1); - __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); - __ sw(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index))); - __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check __ Push(a1, v0); // Smi and array __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); @@ -1111,12 +1103,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); __ Branch(&update_each, eq, t0, Operand(a2)); - // We might get here from TurboFan or Crankshaft when something in the - // for-in loop body deopts and only now notice in fullcodegen, that we - // can now longer use the enum cache, i.e. left fast mode. So better record - // this information here, in case we later OSR back into this loop or - // reoptimize the whole function w/o rerunning the loop with the slow - // mode object in fullcodegen (which would result in a deopt loop). + // We need to filter the key, record slow-path here. + int const vector_index = SmiFromSlot(slot)->value(); __ EmitLoadTypeFeedbackVector(a0); __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); __ sw(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index))); @@ -1167,31 +1155,6 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { } -void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, - bool pretenure) { - // Use the fast case closure allocation code that allocates in new - // space for nested functions that don't need literals cloning. If - // we're running with the --always-opt or the --prepare-always-opt - // flag, we need to use the runtime function so that the new function - // we are creating here gets a chance to have its code optimized and - // doesn't just get a copy of the existing unoptimized code. - if (!FLAG_always_opt && - !FLAG_prepare_always_opt && - !pretenure && - scope()->is_function_scope() && - info->num_literals() == 0) { - FastNewClosureStub stub(isolate(), info->language_mode(), info->kind()); - __ li(a2, Operand(info)); - __ CallStub(&stub); - } else { - __ Push(info); - __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured - : Runtime::kNewClosure); - } - context()->Plug(v0); -} - - void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, FeedbackVectorSlot slot) { DCHECK(NeedsHomeObject(initializer)); @@ -1628,13 +1591,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { } } - if (expr->has_function()) { - DCHECK(result_saved); - __ lw(a0, MemOperand(sp)); - __ push(a0); - __ CallRuntime(Runtime::kToFastProperties); - } - if (result_saved) { context()->PlugTOS(); } else { @@ -1882,62 +1838,41 @@ void FullCodeGenerator::VisitYield(Yield* expr) { // this. It stays on the stack while we update the iterator. VisitForStackValue(expr->expression()); - switch (expr->yield_kind()) { - case Yield::kSuspend: - // Pop value from top-of-stack slot; box result into result register. - EmitCreateIteratorResult(false); - __ push(result_register()); - // Fall through. - case Yield::kInitial: { - Label suspend, continuation, post_runtime, resume; - - __ jmp(&suspend); - __ bind(&continuation); - // When we arrive here, the stack top is the resume mode and - // result_register() holds the input value (the argument given to the - // respective resume operation). - __ RecordGeneratorContinuation(); - __ pop(a1); - __ Branch(&resume, ne, a1, - Operand(Smi::FromInt(JSGeneratorObject::RETURN))); - __ push(result_register()); - EmitCreateIteratorResult(true); - EmitUnwindAndReturn(); - - __ bind(&suspend); - VisitForAccumulatorValue(expr->generator_object()); - DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); - __ li(a1, Operand(Smi::FromInt(continuation.pos()))); - __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); - __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); - __ mov(a1, cp); - __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, - kRAHasBeenSaved, kDontSaveFPRegs); - __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); - __ Branch(&post_runtime, eq, sp, Operand(a1)); - __ push(v0); // generator object - __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - __ bind(&post_runtime); - PopOperand(result_register()); - EmitReturnSequence(); - - __ bind(&resume); - context()->Plug(result_register()); - break; - } - - case Yield::kFinal: { - // Pop value from top-of-stack slot, box result into result register. - OperandStackDepthDecrement(1); - EmitCreateIteratorResult(true); - EmitUnwindAndReturn(); - break; - } + Label suspend, continuation, post_runtime, resume; + + __ jmp(&suspend); + __ bind(&continuation); + // When we arrive here, the stack top is the resume mode and + // result_register() holds the input value (the argument given to the + // respective resume operation). + __ RecordGeneratorContinuation(); + __ pop(a1); + __ Branch(&resume, ne, a1, Operand(Smi::FromInt(JSGeneratorObject::RETURN))); + __ push(result_register()); + EmitCreateIteratorResult(true); + EmitUnwindAndReturn(); + + __ bind(&suspend); + OperandStackDepthIncrement(1); // Not popped on this path. + VisitForAccumulatorValue(expr->generator_object()); + DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); + __ li(a1, Operand(Smi::FromInt(continuation.pos()))); + __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); + __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); + __ mov(a1, cp); + __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, + kRAHasBeenSaved, kDontSaveFPRegs); + __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); + __ Branch(&post_runtime, eq, sp, Operand(a1)); + __ push(v0); // generator object + __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); + __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); + __ bind(&post_runtime); + PopOperand(result_register()); + EmitReturnSequence(); - case Yield::kDelegating: - UNREACHABLE(); - } + __ bind(&resume); + context()->Plug(result_register()); } @@ -1967,7 +1902,10 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator, __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); __ push(a2); - // Push holes for the rest of the arguments to the generator function. + // Push holes for arguments to generator function. Since the parser forced + // context allocation for any variables in generators, the actual argument + // values have already been copied into the context and these dummy values + // will never be used. __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); @@ -1990,9 +1928,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator, // fp = caller's frame pointer. // cp = callee's context, // t0 = callee's JS function. - __ Push(ra, fp, cp, t0); - // Adjust FP to point to saved FP. - __ Addu(fp, sp, 2 * kPointerSize); + __ PushStandardFrame(t0); // Load the operand stack size. __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); @@ -2079,7 +2015,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { __ bind(&done_allocate); __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1); - __ pop(a2); + PopOperand(a2); __ LoadRoot(a3, done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex); @@ -2092,18 +2028,6 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) { } -void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { - SetExpressionPosition(prop); - Literal* key = prop->key()->AsLiteral(); - DCHECK(!prop->IsSuperAccess()); - - __ li(LoadDescriptor::NameRegister(), Operand(key->value())); - __ li(LoadDescriptor::SlotRegister(), - Operand(SmiFromSlot(prop->PropertyFeedbackSlot()))); - CallLoadIC(NOT_INSIDE_TYPEOF); -} - - void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, Token::Value op, Expression* left_expr, @@ -2711,7 +2635,7 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); // Record source position of the IC call. - SetCallPosition(expr); + SetCallPosition(expr, expr->tail_call_mode()); if (expr->tail_call_mode() == TailCallMode::kAllow) { if (FLAG_trace) { __ CallRuntime(Runtime::kTraceTailCall); @@ -3204,23 +3128,6 @@ void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { } -void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { - ZoneList<Expression*>* args = expr->arguments(); - DCHECK_EQ(1, args->length()); - - // Load the argument into v0 and convert it. - VisitForAccumulatorValue(args->at(0)); - - // Convert the object to an integer. - Label done_convert; - __ JumpIfSmi(v0, &done_convert); - __ Push(v0); - __ CallRuntime(Runtime::kToInteger); - __ bind(&done_convert); - context()->Plug(v0); -} - - void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { ZoneList<Expression*>* args = expr->arguments(); DCHECK(args->length() == 1); @@ -3403,6 +3310,11 @@ void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { context()->Plug(v0); } +void FullCodeGenerator::EmitGetOrdinaryHasInstance(CallRuntime* expr) { + DCHECK_EQ(0, expr->arguments()->length()); + __ LoadNativeContextSlot(Context::ORDINARY_HAS_INSTANCE_INDEX, v0); + context()->Plug(v0); +} void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { DCHECK(expr->arguments()->length() == 0); @@ -3444,11 +3356,13 @@ void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { + // Push function. + __ LoadNativeContextSlot(expr->context_index(), v0); + PushOperand(v0); + // Push undefined as the receiver. __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); PushOperand(v0); - - __ LoadNativeContextSlot(expr->context_index(), v0); } @@ -3462,60 +3376,9 @@ void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), RelocInfo::CODE_TARGET); OperandStackDepthDecrement(arg_count + 1); -} - - -void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { - ZoneList<Expression*>* args = expr->arguments(); - int arg_count = args->length(); - - if (expr->is_jsruntime()) { - Comment cmnt(masm_, "[ CallRuntime"); - EmitLoadJSRuntimeFunction(expr); - - // Push the target function under the receiver. - __ lw(at, MemOperand(sp, 0)); - PushOperand(at); - __ sw(v0, MemOperand(sp, kPointerSize)); - - // Push the arguments ("left-to-right"). - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - - PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); - EmitCallJSRuntimeFunction(expr); - - // Restore context register. - __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); - - context()->DropAndPlug(1, v0); - - } else { - const Runtime::Function* function = expr->function(); - switch (function->function_id) { -#define CALL_INTRINSIC_GENERATOR(Name) \ - case Runtime::kInline##Name: { \ - Comment cmnt(masm_, "[ Inline" #Name); \ - return Emit##Name(expr); \ - } - FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) -#undef CALL_INTRINSIC_GENERATOR - default: { - Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); - // Push the arguments ("left-to-right"). - for (int i = 0; i < arg_count; i++) { - VisitForStackValue(args->at(i)); - } - // Call the C runtime function. - PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); - __ CallRuntime(expr->function(), arg_count); - OperandStackDepthDecrement(arg_count); - context()->Plug(v0); - } - } - } + // Restore context register. + __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); } @@ -3753,11 +3616,11 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { __ jmp(&stub_call); __ bind(&slow); } - if (!is_strong(language_mode())) { - ToNumberStub convert_stub(isolate()); - __ CallStub(&convert_stub); - PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); - } + + // Convert old value into a number. + ToNumberStub convert_stub(isolate()); + __ CallStub(&convert_stub); + PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); // Save result for postfix expressions. if (expr->is_postfix()) { @@ -3796,9 +3659,6 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { patch_site.EmitPatchInfo(); __ bind(&done); - if (is_strong(language_mode())) { - PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); - } // Store the value returned in v0. switch (assign_type) { case VARIABLE: @@ -4045,29 +3905,23 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, VisitForAccumulatorValue(sub_expr); PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); - __ mov(a0, result_register()); if (expr->op() == Token::EQ_STRICT) { Heap::RootListIndex nil_value = nil == kNullValue ? Heap::kNullValueRootIndex : Heap::kUndefinedValueRootIndex; __ LoadRoot(a1, nil_value); - Split(eq, a0, Operand(a1), if_true, if_false, fall_through); - } else { - Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); - CallIC(ic, expr->CompareOperationFeedbackId()); - __ LoadRoot(a1, Heap::kTrueValueRootIndex); Split(eq, v0, Operand(a1), if_true, if_false, fall_through); + } else { + __ JumpIfSmi(v0, if_false); + __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); + __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); + __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); + Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); } context()->Plug(if_true, if_false); } -void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { - __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); - context()->Plug(v0); -} - - Register FullCodeGenerator::result_register() { return v0; } @@ -4077,6 +3931,10 @@ Register FullCodeGenerator::context_register() { return cp; } +void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { + DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); + __ lw(value, MemOperand(fp, frame_offset)); +} void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); @@ -4147,12 +4005,6 @@ void FullCodeGenerator::ClearPendingMessage() { } -void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) { - DCHECK(!slot.IsInvalid()); - __ li(VectorStoreICTrampolineDescriptor::SlotRegister(), - Operand(SmiFromSlot(slot))); -} - void FullCodeGenerator::DeferredCommands::EmitCommands() { DCHECK(!result_register().is(a1)); __ Pop(result_register()); // Restore the accumulator. @@ -4188,7 +4040,9 @@ void BackEdgeTable::PatchAt(Code* unoptimized_code, BackEdgeState target_state, Code* replacement_code) { static const int kInstrSize = Assembler::kInstrSize; - Address branch_address = pc - 6 * kInstrSize; + Address pc_immediate_load_address = + Assembler::target_address_from_return_address(pc); + Address branch_address = pc_immediate_load_address - 2 * kInstrSize; Isolate* isolate = unoptimized_code->GetIsolate(); CodePatcher patcher(isolate, branch_address, 1); @@ -4204,7 +4058,6 @@ void BackEdgeTable::PatchAt(Code* unoptimized_code, patcher.masm()->slt(at, a3, zero_reg); break; case ON_STACK_REPLACEMENT: - case OSR_AFTER_STACK_CHECK: // addiu at, zero_reg, 1 // beq at, zero_reg, ok ;; Not changed // lui t9, <on-stack replacement address> upper @@ -4215,7 +4068,6 @@ void BackEdgeTable::PatchAt(Code* unoptimized_code, patcher.masm()->addiu(at, zero_reg, 1); break; } - Address pc_immediate_load_address = pc - 4 * kInstrSize; // Replace the stack check address in the load-immediate (lui/ori pair) // with the entry address of the replacement code. Assembler::set_target_address_at(isolate, pc_immediate_load_address, @@ -4231,10 +4083,11 @@ BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( Code* unoptimized_code, Address pc) { static const int kInstrSize = Assembler::kInstrSize; - Address branch_address = pc - 6 * kInstrSize; - Address pc_immediate_load_address = pc - 4 * kInstrSize; + Address pc_immediate_load_address = + Assembler::target_address_from_return_address(pc); + Address branch_address = pc_immediate_load_address - 2 * kInstrSize; - DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 5 * kInstrSize))); + DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize))); if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) { DCHECK(reinterpret_cast<uint32_t>( Assembler::target_address_at(pc_immediate_load_address)) == @@ -4245,18 +4098,11 @@ BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address))); - if (reinterpret_cast<uint32_t>( - Assembler::target_address_at(pc_immediate_load_address)) == - reinterpret_cast<uint32_t>( - isolate->builtins()->OnStackReplacement()->entry())) { - return ON_STACK_REPLACEMENT; - } - DCHECK(reinterpret_cast<uint32_t>( - Assembler::target_address_at(pc_immediate_load_address)) == + Assembler::target_address_at(pc_immediate_load_address)) == reinterpret_cast<uint32_t>( - isolate->builtins()->OsrAfterStackCheck()->entry())); - return OSR_AFTER_STACK_CHECK; + isolate->builtins()->OnStackReplacement()->entry())); + return ON_STACK_REPLACEMENT; } |