// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/frames.h" #include #include #include "src/base/bits.h" #include "src/deoptimizer.h" #include "src/frames-inl.h" #include "src/ic/ic-stats.h" #include "src/register-configuration.h" #include "src/safepoint-table.h" #include "src/string-stream.h" #include "src/visitors.h" #include "src/vm-state-inl.h" #include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-objects-inl.h" #include "src/zone/zone-containers.h" namespace v8 { namespace internal { ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ = nullptr; // Iterator that supports traversing the stack handlers of a // particular frame. Needs to know the top of the handler chain. class StackHandlerIterator { public: StackHandlerIterator(const StackFrame* frame, StackHandler* handler) : limit_(frame->fp()), handler_(handler) { // Make sure the handler has already been unwound to this frame. DCHECK(frame->sp() <= handler->address()); } StackHandler* handler() const { return handler_; } bool done() { return handler_ == nullptr || handler_->address() > limit_; } void Advance() { DCHECK(!done()); handler_ = handler_->next(); } private: const Address limit_; StackHandler* handler_; }; // ------------------------------------------------------------------------- #define INITIALIZE_SINGLETON(type, field) field##_(this), StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate, bool can_access_heap_objects) : isolate_(isolate), STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr), handler_(nullptr), can_access_heap_objects_(can_access_heap_objects) {} #undef INITIALIZE_SINGLETON StackFrameIterator::StackFrameIterator(Isolate* isolate) : StackFrameIterator(isolate, isolate->thread_local_top()) {} StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t) : StackFrameIteratorBase(isolate, true) { Reset(t); } void StackFrameIterator::Advance() { DCHECK(!done()); // Compute the state of the calling frame before restoring // callee-saved registers and unwinding handlers. This allows the // frame code that computes the caller state to access the top // handler and the value of any callee-saved register if needed. StackFrame::State state; StackFrame::Type type = frame_->GetCallerState(&state); // Unwind handlers corresponding to the current frame. StackHandlerIterator it(frame_, handler_); while (!it.done()) it.Advance(); handler_ = it.handler(); // Advance to the calling frame. frame_ = SingletonFor(type, &state); // When we're done iterating over the stack frames, the handler // chain must have been completely unwound. DCHECK(!done() || handler_ == nullptr); } void StackFrameIterator::Reset(ThreadLocalTop* top) { StackFrame::State state; StackFrame::Type type = ExitFrame::GetStateForFramePointer( Isolate::c_entry_fp(top), &state); handler_ = StackHandler::FromAddress(Isolate::handler(top)); frame_ = SingletonFor(type, &state); } StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type, StackFrame::State* state) { StackFrame* result = SingletonFor(type); DCHECK((!result) == (type == StackFrame::NONE)); if (result) result->state_ = *state; return result; } StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) { #define FRAME_TYPE_CASE(type, field) \ case StackFrame::type: \ return &field##_; switch (type) { case StackFrame::NONE: return nullptr; STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE) default: break; } return nullptr; #undef FRAME_TYPE_CASE } // ------------------------------------------------------------------------- void JavaScriptFrameIterator::Advance() { do { iterator_.Advance(); } while (!iterator_.done() && !iterator_.frame()->is_java_script()); } // ------------------------------------------------------------------------- StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate) : iterator_(isolate) { if (!done() && !IsValidFrame(iterator_.frame())) Advance(); } StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate, StackFrame::Id id) : StackTraceFrameIterator(isolate) { while (!done() && frame()->id() != id) Advance(); } void StackTraceFrameIterator::Advance() { do { iterator_.Advance(); } while (!done() && !IsValidFrame(iterator_.frame())); } bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const { if (frame->is_java_script()) { JavaScriptFrame* jsFrame = static_cast(frame); if (!jsFrame->function()->IsJSFunction()) return false; return jsFrame->function()->shared()->IsSubjectToDebugging(); } // apart from javascript, only wasm is valid return frame->is_wasm(); } // ------------------------------------------------------------------------- namespace { bool IsInterpreterFramePc(Isolate* isolate, Address pc, StackFrame::State* state) { Code* interpreter_entry_trampoline = isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline); Code* interpreter_bytecode_advance = isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance); Code* interpreter_bytecode_dispatch = isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch); if (interpreter_entry_trampoline->contains(pc) || interpreter_bytecode_advance->contains(pc) || interpreter_bytecode_dispatch->contains(pc)) { return true; } else if (FLAG_interpreted_frames_native_stack) { intptr_t marker = Memory( state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); MSAN_MEMORY_IS_INITIALIZED( state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize); Object* maybe_function = Memory(state->fp + StandardFrameConstants::kFunctionOffset); // There's no need to run a full ContainsSlow if we know the frame can't be // an InterpretedFrame, so we do these fast checks first if (StackFrame::IsTypeMarker(marker) || maybe_function->IsSmi()) { return false; } else if (!isolate->heap()->code_space()->ContainsSlow(pc)) { return false; } interpreter_entry_trampoline = isolate->heap()->GcSafeFindCodeForInnerPointer(pc); return interpreter_entry_trampoline->is_interpreter_trampoline_builtin(); } else { return false; } } DISABLE_ASAN Address ReadMemoryAt(Address address) { return Memory
(address); } } // namespace SafeStackFrameIterator::SafeStackFrameIterator( Isolate* isolate, Address fp, Address sp, Address js_entry_sp) : StackFrameIteratorBase(isolate, false), low_bound_(sp), high_bound_(js_entry_sp), top_frame_type_(StackFrame::NONE), external_callback_scope_(isolate->external_callback_scope()) { StackFrame::State state; StackFrame::Type type; ThreadLocalTop* top = isolate->thread_local_top(); bool advance_frame = true; if (IsValidTop(top)) { type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state); top_frame_type_ = type; } else if (IsValidStackAddress(fp)) { DCHECK_NE(fp, kNullAddress); state.fp = fp; state.sp = sp; state.pc_address = StackFrame::ResolveReturnAddressLocation( reinterpret_cast(StandardFrame::ComputePCAddress(fp))); // If the top of stack is a return address to the interpreter trampoline, // then we are likely in a bytecode handler with elided frame. In that // case, set the PC properly and make sure we do not drop the frame. if (IsValidStackAddress(sp)) { MSAN_MEMORY_IS_INITIALIZED(sp, kPointerSize); Address tos = ReadMemoryAt(sp); if (IsInterpreterFramePc(isolate, tos, &state)) { state.pc_address = reinterpret_cast(sp); advance_frame = false; } } // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset, // we check only that kMarkerOffset is within the stack bounds and do // compile time check that kContextOffset slot is pushed on the stack before // kMarkerOffset. STATIC_ASSERT(StandardFrameConstants::kFunctionOffset < StandardFrameConstants::kContextOffset); Address frame_marker = fp + StandardFrameConstants::kFunctionOffset; if (IsValidStackAddress(frame_marker)) { type = StackFrame::ComputeType(this, &state); top_frame_type_ = type; // We only keep the top frame if we believe it to be interpreted frame. if (type != StackFrame::INTERPRETED) { advance_frame = true; } } else { // Mark the frame as OPTIMIZED if we cannot determine its type. // We chose OPTIMIZED rather than INTERPRETED because it's closer to // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT // referred to full-codegen frames (now removed from the tree), and // OPTIMIZED refers to turbofan frames, both of which are generated // code. INTERPRETED frames refer to bytecode. // The frame anyways will be skipped. type = StackFrame::OPTIMIZED; // Top frame is incomplete so we cannot reliably determine its type. top_frame_type_ = StackFrame::NONE; } } else { return; } frame_ = SingletonFor(type, &state); if (advance_frame && frame_) Advance(); } bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const { Address c_entry_fp = Isolate::c_entry_fp(top); if (!IsValidExitFrame(c_entry_fp)) return false; // There should be at least one JS_ENTRY stack handler. Address handler = Isolate::handler(top); if (handler == kNullAddress) return false; // Check that there are no js frames on top of the native frames. return c_entry_fp < handler; } void SafeStackFrameIterator::AdvanceOneFrame() { DCHECK(!done()); StackFrame* last_frame = frame_; Address last_sp = last_frame->sp(), last_fp = last_frame->fp(); // Before advancing to the next stack frame, perform pointer validity tests. if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) { frame_ = nullptr; return; } // Advance to the previous frame. StackFrame::State state; StackFrame::Type type = frame_->GetCallerState(&state); frame_ = SingletonFor(type, &state); if (!frame_) return; // Check that we have actually moved to the previous frame in the stack. if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) { frame_ = nullptr; } } bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const { return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp()); } bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) { StackFrame::State state; if (frame->is_entry() || frame->is_construct_entry()) { // See EntryFrame::GetCallerState. It computes the caller FP address // and calls ExitFrame::GetStateForFramePointer on it. We need to be // sure that caller FP address is valid. Address caller_fp = Memory
(frame->fp() + EntryFrameConstants::kCallerFPOffset); if (!IsValidExitFrame(caller_fp)) return false; } else if (frame->is_arguments_adaptor()) { // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that // the number of arguments is stored on stack as Smi. We need to check // that it really an Smi. Object* number_of_args = reinterpret_cast(frame)-> GetExpression(0); if (!number_of_args->IsSmi()) { return false; } } frame->ComputeCallerState(&state); return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) && SingletonFor(frame->GetCallerState(&state)) != nullptr; } bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const { if (!IsValidStackAddress(fp)) return false; Address sp = ExitFrame::ComputeStackPointer(fp); if (!IsValidStackAddress(sp)) return false; StackFrame::State state; ExitFrame::FillState(fp, sp, &state); MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address)); return *state.pc_address != kNullAddress; } void SafeStackFrameIterator::Advance() { while (true) { AdvanceOneFrame(); if (done()) break; ExternalCallbackScope* last_callback_scope = nullptr; while (external_callback_scope_ != nullptr && external_callback_scope_->scope_address() < frame_->fp()) { // As long as the setup of a frame is not atomic, we may happen to be // in an interval where an ExternalCallbackScope is already created, // but the frame is not yet entered. So we are actually observing // the previous frame. // Skip all the ExternalCallbackScope's that are below the current fp. last_callback_scope = external_callback_scope_; external_callback_scope_ = external_callback_scope_->previous(); } if (frame_->is_java_script() || frame_->is_wasm()) break; if (frame_->is_exit() || frame_->is_builtin_exit()) { // Some of the EXIT frames may have ExternalCallbackScope allocated on // top of them. In that case the scope corresponds to the first EXIT // frame beneath it. There may be other EXIT frames on top of the // ExternalCallbackScope, just skip them as we cannot collect any useful // information about them. if (last_callback_scope) { frame_->state_.pc_address = last_callback_scope->callback_entrypoint_address(); } break; } } } // ------------------------------------------------------------------------- namespace { Code* GetContainingCode(Isolate* isolate, Address pc) { return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code; } } // namespace Code* StackFrame::LookupCode() const { Code* result = GetContainingCode(isolate(), pc()); DCHECK_GE(pc(), result->InstructionStart()); DCHECK_LT(pc(), result->InstructionEnd()); return result; } void StackFrame::IteratePc(RootVisitor* v, Address* pc_address, Address* constant_pool_address, Code* holder) { Address pc = *pc_address; DCHECK(holder->GetHeap()->GcSafeCodeContains(holder, pc)); unsigned pc_offset = static_cast(pc - holder->InstructionStart()); Object* code = holder; v->VisitRootPointer(Root::kTop, nullptr, &code); if (code == holder) return; holder = reinterpret_cast(code); pc = holder->InstructionStart() + pc_offset; *pc_address = pc; if (FLAG_enable_embedded_constant_pool && constant_pool_address) { *constant_pool_address = holder->constant_pool(); } } void StackFrame::SetReturnAddressLocationResolver( ReturnAddressLocationResolver resolver) { DCHECK_NULL(return_address_location_resolver_); return_address_location_resolver_ = resolver; } StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator, State* state) { DCHECK_NE(state->fp, kNullAddress); MSAN_MEMORY_IS_INITIALIZED( state->fp + CommonFrameConstants::kContextOrFrameTypeOffset, kPointerSize); intptr_t marker = Memory( state->fp + CommonFrameConstants::kContextOrFrameTypeOffset); if (!iterator->can_access_heap_objects_) { // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really // means that we are being called from the profiler, which can interrupt // the VM with a signal at any arbitrary instruction, with essentially // anything on the stack. So basically none of these checks are 100% // reliable. MSAN_MEMORY_IS_INITIALIZED( state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize); Object* maybe_function = Memory(state->fp + StandardFrameConstants::kFunctionOffset); if (!StackFrame::IsTypeMarker(marker)) { if (maybe_function->IsSmi()) { return NATIVE; } else if (IsInterpreterFramePc(iterator->isolate(), *(state->pc_address), state)) { return INTERPRETED; } else { return OPTIMIZED; } } } else { Address pc = *(state->pc_address); // If the {pc} does not point into WebAssembly code we can rely on the // returned {wasm_code} to be null and fall back to {GetContainingCode}. wasm::WasmCode* wasm_code = iterator->isolate()->wasm_engine()->code_manager()->LookupCode(pc); if (wasm_code != nullptr) { switch (wasm_code->kind()) { case wasm::WasmCode::kFunction: return WASM_COMPILED; case wasm::WasmCode::kWasmToJsWrapper: return WASM_TO_JS; case wasm::WasmCode::kLazyStub: return WASM_COMPILE_LAZY; case wasm::WasmCode::kRuntimeStub: return STUB; case wasm::WasmCode::kInterpreterEntry: return WASM_INTERPRETER_ENTRY; default: UNREACHABLE(); } } else { // Look up the code object to figure out the type of the stack frame. Code* code_obj = GetContainingCode(iterator->isolate(), pc); if (code_obj != nullptr) { switch (code_obj->kind()) { case Code::BUILTIN: if (StackFrame::IsTypeMarker(marker)) break; if (code_obj->is_interpreter_trampoline_builtin()) { return INTERPRETED; } if (code_obj->is_turbofanned()) { // TODO(bmeurer): We treat frames for BUILTIN Code objects as // OptimizedFrame for now (all the builtins with JavaScript // linkage are actually generated with TurboFan currently, so // this is sound). return OPTIMIZED; } return BUILTIN; case Code::OPTIMIZED_FUNCTION: return OPTIMIZED; case Code::WASM_FUNCTION: return WASM_COMPILED; case Code::WASM_TO_JS_FUNCTION: return WASM_TO_JS; case Code::JS_TO_WASM_FUNCTION: return JS_TO_WASM; case Code::WASM_INTERPRETER_ENTRY: return WASM_INTERPRETER_ENTRY; case Code::C_WASM_ENTRY: return C_WASM_ENTRY; default: // All other types should have an explicit marker break; } } else { return NATIVE; } } } DCHECK(StackFrame::IsTypeMarker(marker)); StackFrame::Type candidate = StackFrame::MarkerToType(marker); switch (candidate) { case ENTRY: case CONSTRUCT_ENTRY: case EXIT: case BUILTIN_CONTINUATION: case JAVA_SCRIPT_BUILTIN_CONTINUATION: case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: case BUILTIN_EXIT: case STUB: case INTERNAL: case CONSTRUCT: case ARGUMENTS_ADAPTOR: case WASM_TO_JS: case WASM_COMPILED: return candidate; case JS_TO_WASM: case OPTIMIZED: case INTERPRETED: default: // Unoptimized and optimized JavaScript frames, including // interpreted frames, should never have a StackFrame::Type // marker. If we find one, we're likely being called from the // profiler in a bogus stack frame. return NATIVE; } } #ifdef DEBUG bool StackFrame::can_access_heap_objects() const { return iterator_->can_access_heap_objects_; } #endif StackFrame::Type StackFrame::GetCallerState(State* state) const { ComputeCallerState(state); return ComputeType(iterator_, state); } Address StackFrame::UnpaddedFP() const { return fp(); } void NativeFrame::ComputeCallerState(State* state) const { state->sp = caller_sp(); state->fp = Memory
(fp() + CommonFrameConstants::kCallerFPOffset); state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(fp() + CommonFrameConstants::kCallerPCOffset)); state->callee_pc_address = nullptr; state->constant_pool_address = nullptr; } Code* EntryFrame::unchecked_code() const { return isolate()->heap()->js_entry_code(); } void EntryFrame::ComputeCallerState(State* state) const { GetCallerState(state); } StackFrame::Type EntryFrame::GetCallerState(State* state) const { const int offset = EntryFrameConstants::kCallerFPOffset; Address fp = Memory
(this->fp() + offset); return ExitFrame::GetStateForFramePointer(fp, state); } Code* ConstructEntryFrame::unchecked_code() const { return isolate()->heap()->js_construct_entry_code(); } Object*& ExitFrame::code_slot() const { const int offset = ExitFrameConstants::kCodeOffset; return Memory(fp() + offset); } Code* ExitFrame::unchecked_code() const { return reinterpret_cast(code_slot()); } void ExitFrame::ComputeCallerState(State* state) const { // Set up the caller state. state->sp = caller_sp(); state->fp = Memory
(fp() + ExitFrameConstants::kCallerFPOffset); state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(fp() + ExitFrameConstants::kCallerPCOffset)); state->callee_pc_address = nullptr; if (FLAG_enable_embedded_constant_pool) { state->constant_pool_address = reinterpret_cast( fp() + ExitFrameConstants::kConstantPoolOffset); } } void ExitFrame::Iterate(RootVisitor* v) const { // The arguments are traversed as part of the expression stack of // the calling frame. IteratePc(v, pc_address(), constant_pool_address(), LookupCode()); v->VisitRootPointer(Root::kTop, nullptr, &code_slot()); } Address ExitFrame::GetCallerStackPointer() const { return fp() + ExitFrameConstants::kCallerSPOffset; } StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { if (fp == 0) return NONE; Address sp = ComputeStackPointer(fp); FillState(fp, sp, state); DCHECK_NE(*state->pc_address, kNullAddress); return ComputeFrameType(fp); } StackFrame::Type ExitFrame::ComputeFrameType(Address fp) { // Distinguish between between regular and builtin exit frames. // Default to EXIT in all hairy cases (e.g., when called from profiler). const int offset = ExitFrameConstants::kFrameTypeOffset; Object* marker = Memory(fp + offset); if (!marker->IsSmi()) { return EXIT; } intptr_t marker_int = bit_cast(marker); StackFrame::Type frame_type = static_cast(marker_int >> 1); if (frame_type == EXIT || frame_type == BUILTIN_EXIT) { return frame_type; } return EXIT; } Address ExitFrame::ComputeStackPointer(Address fp) { MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize); return Memory
(fp + ExitFrameConstants::kSPOffset); } void ExitFrame::FillState(Address fp, Address sp, State* state) { state->sp = sp; state->fp = fp; state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(sp - 1 * kPCOnStackSize)); state->callee_pc_address = nullptr; // The constant pool recorded in the exit frame is not associated // with the pc in this state (the return address into a C entry // stub). ComputeCallerState will retrieve the constant pool // together with the associated caller pc. state->constant_pool_address = nullptr; } JSFunction* BuiltinExitFrame::function() const { return JSFunction::cast(target_slot_object()); } Object* BuiltinExitFrame::receiver() const { return receiver_slot_object(); } bool BuiltinExitFrame::IsConstructor() const { return !new_target_slot_object()->IsUndefined(isolate()); } Object* BuiltinExitFrame::GetParameter(int i) const { DCHECK(i >= 0 && i < ComputeParametersCount()); int offset = BuiltinExitFrameConstants::kFirstArgumentOffset + i * kPointerSize; return Memory(fp() + offset); } int BuiltinExitFrame::ComputeParametersCount() const { Object* argc_slot = argc_slot_object(); DCHECK(argc_slot->IsSmi()); // Argc also counts the receiver, target, new target, and argc itself as args, // therefore the real argument count is argc - 4. int argc = Smi::ToInt(argc_slot) - 4; DCHECK_GE(argc, 0); return argc; } namespace { void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode, int index) { accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index); } const char* StringForStackFrameType(StackFrame::Type type) { switch (type) { #define CASE(value, name) \ case StackFrame::value: \ return #name; STACK_FRAME_TYPE_LIST(CASE) #undef CASE default: UNREACHABLE(); } } } // namespace void StackFrame::Print(StringStream* accumulator, PrintMode mode, int index) const { DisallowHeapAllocation no_gc; PrintIndex(accumulator, mode, index); accumulator->Add(StringForStackFrameType(type())); accumulator->Add(" [pc: %p]\n", reinterpret_cast(pc())); } void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode, int index) const { DisallowHeapAllocation no_gc; Object* receiver = this->receiver(); JSFunction* function = this->function(); accumulator->PrintSecurityTokenIfChanged(function); PrintIndex(accumulator, mode, index); accumulator->Add("builtin exit frame: "); Code* code = nullptr; if (IsConstructor()) accumulator->Add("new "); accumulator->PrintFunction(function, receiver, &code); accumulator->Add("(this=%o", receiver); // Print the parameters. int parameters_count = ComputeParametersCount(); for (int i = 0; i < parameters_count; i++) { accumulator->Add(",%o", GetParameter(i)); } accumulator->Add(")\n\n"); } Address StandardFrame::GetExpressionAddress(int n) const { const int offset = StandardFrameConstants::kExpressionsOffset; return fp() + offset - n * kPointerSize; } Address InterpretedFrame::GetExpressionAddress(int n) const { const int offset = InterpreterFrameConstants::kExpressionsOffset; return fp() + offset - n * kPointerSize; } Script* StandardFrame::script() const { // This should only be called on frames which override this method. DCHECK(false); return nullptr; } Object* StandardFrame::receiver() const { return ReadOnlyRoots(isolate()).undefined_value(); } Object* StandardFrame::context() const { return ReadOnlyRoots(isolate()).undefined_value(); } int StandardFrame::position() const { AbstractCode* code = AbstractCode::cast(LookupCode()); int code_offset = static_cast(pc() - code->InstructionStart()); return code->SourcePosition(code_offset); } int StandardFrame::ComputeExpressionsCount() const { Address base = GetExpressionAddress(0); Address limit = sp() - kPointerSize; DCHECK(base >= limit); // stack grows downwards // Include register-allocated locals in number of expressions. return static_cast((base - limit) / kPointerSize); } Object* StandardFrame::GetParameter(int index) const { // StandardFrame does not define any parameters. UNREACHABLE(); } int StandardFrame::ComputeParametersCount() const { return 0; } void StandardFrame::ComputeCallerState(State* state) const { state->sp = caller_sp(); state->fp = caller_fp(); state->pc_address = ResolveReturnAddressLocation( reinterpret_cast(ComputePCAddress(fp()))); state->callee_pc_address = pc_address(); state->constant_pool_address = reinterpret_cast(ComputeConstantPoolAddress(fp())); } bool StandardFrame::IsConstructor() const { return false; } void StandardFrame::Summarize(std::vector* functions) const { // This should only be called on frames which override this method. UNREACHABLE(); } void StandardFrame::IterateCompiledFrame(RootVisitor* v) const { // Make sure that we're not doing "safe" stack frame iteration. We cannot // possibly find pointers in optimized frames in that state. DCHECK(can_access_heap_objects()); // Find the code and compute the safepoint information. Address inner_pointer = pc(); const wasm::WasmCode* wasm_code = isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer); SafepointEntry safepoint_entry; uint32_t stack_slots; Code* code = nullptr; bool has_tagged_params = false; if (wasm_code != nullptr) { SafepointTable table(wasm_code->instruction_start(), wasm_code->safepoint_table_offset(), wasm_code->stack_slots()); safepoint_entry = table.FindEntry(inner_pointer); stack_slots = wasm_code->stack_slots(); has_tagged_params = wasm_code->kind() != wasm::WasmCode::kFunction; } else { InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry = isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer); if (!entry->safepoint_entry.is_valid()) { entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer); DCHECK(entry->safepoint_entry.is_valid()); } else { DCHECK(entry->safepoint_entry.Equals( entry->code->GetSafepointEntry(inner_pointer))); } code = entry->code; safepoint_entry = entry->safepoint_entry; stack_slots = code->stack_slots(); has_tagged_params = code->has_tagged_params(); } uint32_t slot_space = stack_slots * kPointerSize; // Determine the fixed header and spill slot area size. int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp; intptr_t marker = Memory(fp() + CommonFrameConstants::kContextOrFrameTypeOffset); if (StackFrame::IsTypeMarker(marker)) { StackFrame::Type candidate = StackFrame::MarkerToType(marker); switch (candidate) { case ENTRY: case CONSTRUCT_ENTRY: case EXIT: case BUILTIN_CONTINUATION: case JAVA_SCRIPT_BUILTIN_CONTINUATION: case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: case BUILTIN_EXIT: case ARGUMENTS_ADAPTOR: case STUB: case INTERNAL: case CONSTRUCT: case JS_TO_WASM: case C_WASM_ENTRY: frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp; break; case WASM_TO_JS: case WASM_COMPILED: case WASM_INTERPRETER_ENTRY: case WASM_COMPILE_LAZY: frame_header_size = WasmCompiledFrameConstants::kFixedFrameSizeFromFp; break; case OPTIMIZED: case INTERPRETED: case BUILTIN: // These frame types have a context, but they are actually stored // in the place on the stack that one finds the frame type. UNREACHABLE(); break; case NATIVE: case NONE: case NUMBER_OF_TYPES: case MANUAL: UNREACHABLE(); break; } } slot_space -= (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp); Object** frame_header_base = &Memory(fp() - frame_header_size); Object** frame_header_limit = &Memory(fp() - StandardFrameConstants::kCPSlotSize); Object** parameters_base = &Memory(sp()); Object** parameters_limit = frame_header_base - slot_space / kPointerSize; // Visit the parameters that may be on top of the saved registers. if (safepoint_entry.argument_count() > 0) { v->VisitRootPointers(Root::kTop, nullptr, parameters_base, parameters_base + safepoint_entry.argument_count()); parameters_base += safepoint_entry.argument_count(); } // Skip saved double registers. if (safepoint_entry.has_doubles()) { // Number of doubles not known at snapshot time. DCHECK(!isolate()->serializer_enabled()); parameters_base += RegisterConfiguration::Default()->num_allocatable_double_registers() * kDoubleSize / kPointerSize; } // Visit the registers that contain pointers if any. if (safepoint_entry.HasRegisters()) { for (int i = kNumSafepointRegisters - 1; i >=0; i--) { if (safepoint_entry.HasRegisterAt(i)) { int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i); v->VisitRootPointer(Root::kTop, nullptr, parameters_base + reg_stack_index); } } // Skip the words containing the register values. parameters_base += kNumSafepointRegisters; } // We're done dealing with the register bits. uint8_t* safepoint_bits = safepoint_entry.bits(); safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2; // Visit the rest of the parameters if they are tagged. if (has_tagged_params) { v->VisitRootPointers(Root::kTop, nullptr, parameters_base, parameters_limit); } // Visit pointer spill slots and locals. for (unsigned index = 0; index < stack_slots; index++) { int byte_index = index >> kBitsPerByteLog2; int bit_index = index & (kBitsPerByte - 1); if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) { v->VisitRootPointer(Root::kTop, nullptr, parameters_limit + index); } } // For the off-heap code cases, we can skip this. if (code != nullptr) { // Visit the return address in the callee and incoming arguments. IteratePc(v, pc_address(), constant_pool_address(), code); } // If this frame has JavaScript ABI, visit the context (in stub and JS // frames) and the function (in JS frames). If it has WebAssembly ABI, visit // the instance object. v->VisitRootPointers(Root::kTop, nullptr, frame_header_base, frame_header_limit); } void StubFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); } Code* StubFrame::unchecked_code() const { return isolate()->FindCodeObject(pc()); } Address StubFrame::GetCallerStackPointer() const { return fp() + ExitFrameConstants::kCallerSPOffset; } int StubFrame::GetNumberOfIncomingArguments() const { return 0; } int StubFrame::LookupExceptionHandlerInTable(int* stack_slots) { Code* code = LookupCode(); DCHECK(code->is_turbofanned()); DCHECK_EQ(code->kind(), Code::BUILTIN); HandlerTable table(code); int pc_offset = static_cast(pc() - code->InstructionStart()); *stack_slots = code->stack_slots(); return table.LookupReturn(pc_offset); } void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); } void JavaScriptFrame::SetParameterValue(int index, Object* value) const { Memory(GetParameterSlot(index)) = value; } bool JavaScriptFrame::IsConstructor() const { Address fp = caller_fp(); if (has_adapted_arguments()) { // Skip the arguments adaptor frame and look at the real caller. fp = Memory
(fp + StandardFrameConstants::kCallerFPOffset); } return IsConstructFrame(fp); } bool JavaScriptFrame::HasInlinedFrames() const { std::vector functions; GetFunctions(&functions); return functions.size() > 1; } Code* JavaScriptFrame::unchecked_code() const { return function()->code(); } int JavaScriptFrame::GetNumberOfIncomingArguments() const { DCHECK(can_access_heap_objects() && isolate()->heap()->gc_state() == Heap::NOT_IN_GC); return function()->shared()->internal_formal_parameter_count(); } int OptimizedFrame::GetNumberOfIncomingArguments() const { Code* code = LookupCode(); if (code->kind() == Code::BUILTIN) { return static_cast( Memory(fp() + OptimizedBuiltinFrameConstants::kArgCOffset)); } else { return JavaScriptFrame::GetNumberOfIncomingArguments(); } } Address JavaScriptFrame::GetCallerStackPointer() const { return fp() + StandardFrameConstants::kCallerSPOffset; } void JavaScriptFrame::GetFunctions( std::vector* functions) const { DCHECK(functions->empty()); functions->push_back(function()->shared()); } void JavaScriptFrame::GetFunctions( std::vector>* functions) const { DCHECK(functions->empty()); std::vector raw_functions; GetFunctions(&raw_functions); for (const auto& raw_function : raw_functions) { functions->push_back( Handle(raw_function, function()->GetIsolate())); } } void JavaScriptFrame::Summarize(std::vector* functions) const { DCHECK(functions->empty()); Code* code = LookupCode(); int offset = static_cast(pc() - code->InstructionStart()); AbstractCode* abstract_code = AbstractCode::cast(code); FrameSummary::JavaScriptFrameSummary summary(isolate(), receiver(), function(), abstract_code, offset, IsConstructor()); functions->push_back(summary); } JSFunction* JavaScriptFrame::function() const { return JSFunction::cast(function_slot_object()); } Object* JavaScriptFrame::unchecked_function() const { // During deoptimization of an optimized function, we may have yet to // materialize some closures on the stack. The arguments marker object // marks this case. DCHECK(function_slot_object()->IsJSFunction() || ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object()); return function_slot_object(); } Object* JavaScriptFrame::receiver() const { return GetParameter(-1); } Object* JavaScriptFrame::context() const { const int offset = StandardFrameConstants::kContextOffset; Object* maybe_result = Memory(fp() + offset); DCHECK(!maybe_result->IsSmi()); return maybe_result; } Script* JavaScriptFrame::script() const { return Script::cast(function()->shared()->script()); } int JavaScriptFrame::LookupExceptionHandlerInTable( int* stack_depth, HandlerTable::CatchPrediction* prediction) { DCHECK_EQ(0, LookupCode()->handler_table_offset()); DCHECK(!LookupCode()->is_optimized_code()); return -1; } void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, AbstractCode* code, int code_offset, FILE* file, bool print_line_number) { PrintF(file, "%s", function->IsOptimized() ? "*" : "~"); function->PrintName(file); PrintF(file, "+%d", code_offset); if (print_line_number) { SharedFunctionInfo* shared = function->shared(); int source_pos = code->SourcePosition(code_offset); Object* maybe_script = shared->script(); if (maybe_script->IsScript()) { Script* script = Script::cast(maybe_script); int line = script->GetLineNumber(source_pos) + 1; Object* script_name_raw = script->name(); if (script_name_raw->IsString()) { String* script_name = String::cast(script->name()); std::unique_ptr c_script_name = script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); PrintF(file, " at %s:%d", c_script_name.get(), line); } else { PrintF(file, " at :%d", line); } } else { PrintF(file, " at :"); } } } void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args, bool print_line_number) { // constructor calls DisallowHeapAllocation no_allocation; JavaScriptFrameIterator it(isolate); while (!it.done()) { if (it.frame()->is_java_script()) { JavaScriptFrame* frame = it.frame(); if (frame->IsConstructor()) PrintF(file, "new "); JSFunction* function = frame->function(); int code_offset = 0; if (frame->is_interpreted()) { InterpretedFrame* iframe = reinterpret_cast(frame); code_offset = iframe->GetBytecodeOffset(); } else { Code* code = frame->unchecked_code(); code_offset = static_cast(frame->pc() - code->InstructionStart()); } PrintFunctionAndOffset(function, function->abstract_code(), code_offset, file, print_line_number); if (print_args) { // function arguments // (we are intentionally only printing the actually // supplied parameters, not all parameters required) PrintF(file, "(this="); frame->receiver()->ShortPrint(file); const int length = frame->ComputeParametersCount(); for (int i = 0; i < length; i++) { PrintF(file, ", "); frame->GetParameter(i)->ShortPrint(file); } PrintF(file, ")"); } break; } it.Advance(); } } void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction* function, AbstractCode* code, int code_offset) { auto ic_stats = ICStats::instance(); ICInfo& ic_info = ic_stats->Current(); SharedFunctionInfo* shared = function->shared(); ic_info.function_name = ic_stats->GetOrCacheFunctionName(function); ic_info.script_offset = code_offset; int source_pos = code->SourcePosition(code_offset); Object* maybe_script = shared->script(); if (maybe_script->IsScript()) { Script* script = Script::cast(maybe_script); ic_info.line_num = script->GetLineNumber(source_pos) + 1; ic_info.script_name = ic_stats->GetOrCacheScriptName(script); } } void JavaScriptFrame::CollectTopFrameForICStats(Isolate* isolate) { // constructor calls DisallowHeapAllocation no_allocation; JavaScriptFrameIterator it(isolate); ICInfo& ic_info = ICStats::instance()->Current(); while (!it.done()) { if (it.frame()->is_java_script()) { JavaScriptFrame* frame = it.frame(); if (frame->IsConstructor()) ic_info.is_constructor = true; JSFunction* function = frame->function(); int code_offset = 0; if (frame->is_interpreted()) { InterpretedFrame* iframe = reinterpret_cast(frame); code_offset = iframe->GetBytecodeOffset(); } else { Code* code = frame->unchecked_code(); code_offset = static_cast(frame->pc() - code->InstructionStart()); } CollectFunctionAndOffsetForICStats(function, function->abstract_code(), code_offset); return; } it.Advance(); } } Object* JavaScriptFrame::GetParameter(int index) const { return Memory(GetParameterSlot(index)); } int JavaScriptFrame::ComputeParametersCount() const { return GetNumberOfIncomingArguments(); } int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const { // Assert that the first allocatable register is also the argument count // register. DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0), kJavaScriptCallArgCountRegister.code()); Object* argc_object = Memory(fp() + BuiltinContinuationFrameConstants::kArgCOffset); return Smi::ToInt(argc_object); } intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const { Address height_slot = fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize; intptr_t height = Smi::ToInt(*reinterpret_cast(height_slot)); return height; } Object* JavaScriptBuiltinContinuationFrame::context() const { return Memory( fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset); } void JavaScriptBuiltinContinuationWithCatchFrame::SetException( Object* exception) { Address exception_argument_slot = fp() + JavaScriptFrameConstants::kLastParameterOffset + kPointerSize; // Skip over return value slot. // Only allow setting exception if previous value was the hole. CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(), Memory(exception_argument_slot)); Memory(exception_argument_slot) = exception; } FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary( Isolate* isolate, Object* receiver, JSFunction* function, AbstractCode* abstract_code, int code_offset, bool is_constructor) : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT), receiver_(receiver, isolate), function_(function, isolate), abstract_code_(abstract_code, isolate), code_offset_(code_offset), is_constructor_(is_constructor) { DCHECK(abstract_code->IsBytecodeArray() || Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION); } bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const { return function()->shared()->IsSubjectToDebugging(); } int FrameSummary::JavaScriptFrameSummary::SourcePosition() const { return abstract_code()->SourcePosition(code_offset()); } int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const { return abstract_code()->SourceStatementPosition(code_offset()); } Handle FrameSummary::JavaScriptFrameSummary::script() const { return handle(function_->shared()->script(), isolate()); } Handle FrameSummary::JavaScriptFrameSummary::FunctionName() const { return JSFunction::GetDebugName(function_); } Handle FrameSummary::JavaScriptFrameSummary::native_context() const { return handle(function_->context()->native_context(), isolate()); } FrameSummary::WasmFrameSummary::WasmFrameSummary( Isolate* isolate, FrameSummary::Kind kind, Handle instance, bool at_to_number_conversion) : FrameSummaryBase(isolate, kind), wasm_instance_(instance), at_to_number_conversion_(at_to_number_conversion) {} Handle FrameSummary::WasmFrameSummary::receiver() const { return wasm_instance_->GetIsolate()->global_proxy(); } #define WASM_SUMMARY_DISPATCH(type, name) \ type FrameSummary::WasmFrameSummary::name() const { \ DCHECK(kind() == Kind::WASM_COMPILED || kind() == Kind::WASM_INTERPRETED); \ return kind() == Kind::WASM_COMPILED \ ? static_cast(this)->name() \ : static_cast(this) \ ->name(); \ } WASM_SUMMARY_DISPATCH(uint32_t, function_index) WASM_SUMMARY_DISPATCH(int, byte_offset) #undef WASM_SUMMARY_DISPATCH int FrameSummary::WasmFrameSummary::SourcePosition() const { Handle module_object(wasm_instance()->module_object(), isolate()); return WasmModuleObject::GetSourcePosition(module_object, function_index(), byte_offset(), at_to_number_conversion()); } Handle