• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/execution/frames.h"
6 
7 #include <memory>
8 #include <sstream>
9 
10 #include "src/base/bits.h"
11 #include "src/codegen/interface-descriptors.h"
12 #include "src/codegen/macro-assembler.h"
13 #include "src/codegen/register-configuration.h"
14 #include "src/codegen/safepoint-table.h"
15 #include "src/deoptimizer/deoptimizer.h"
16 #include "src/execution/frames-inl.h"
17 #include "src/execution/vm-state-inl.h"
18 #include "src/ic/ic-stats.h"
19 #include "src/logging/counters.h"
20 #include "src/objects/code.h"
21 #include "src/objects/slots.h"
22 #include "src/objects/smi.h"
23 #include "src/objects/visitors.h"
24 #include "src/snapshot/embedded/embedded-data.h"
25 #include "src/strings/string-stream.h"
26 #include "src/wasm/wasm-code-manager.h"
27 #include "src/wasm/wasm-engine.h"
28 #include "src/wasm/wasm-objects-inl.h"
29 #include "src/zone/zone-containers.h"
30 
31 namespace v8 {
32 namespace internal {
33 
34 ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ =
35     nullptr;
36 
37 namespace {
38 
AddressOf(const StackHandler * handler)39 Address AddressOf(const StackHandler* handler) {
40   Address raw = handler->address();
41 #ifdef V8_USE_ADDRESS_SANITIZER
42   // ASan puts C++-allocated StackHandler markers onto its fake stack.
43   // We work around that by storing the real stack address in the "padding"
44   // field. StackHandlers allocated from generated code have 0 as padding.
45   Address padding =
46       base::Memory<Address>(raw + StackHandlerConstants::kPaddingOffset);
47   if (padding != 0) return padding;
48 #endif
49   return raw;
50 }
51 
52 }  // namespace
53 
54 // Iterator that supports traversing the stack handlers of a
55 // particular frame. Needs to know the top of the handler chain.
56 class StackHandlerIterator {
57  public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)58   StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
59       : limit_(frame->fp()), handler_(handler) {
60     // Make sure the handler has already been unwound to this frame.
61     DCHECK(frame->sp() <= AddressOf(handler));
62     // For CWasmEntry frames, the handler was registered by the last C++
63     // frame (Execution::CallWasm), so even though its address is already
64     // beyond the limit, we know we always want to unwind one handler.
65     if (frame->type() == StackFrame::C_WASM_ENTRY) {
66       handler_ = handler_->next();
67     }
68   }
69 
handler() const70   StackHandler* handler() const { return handler_; }
71 
done()72   bool done() { return handler_ == nullptr || AddressOf(handler_) > limit_; }
Advance()73   void Advance() {
74     DCHECK(!done());
75     handler_ = handler_->next();
76   }
77 
78  private:
79   const Address limit_;
80   StackHandler* handler_;
81 };
82 
83 // -------------------------------------------------------------------------
84 
85 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)86 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
87                                                bool can_access_heap_objects)
88     : isolate_(isolate),
89       STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr),
90       handler_(nullptr),
91       can_access_heap_objects_(can_access_heap_objects) {}
92 #undef INITIALIZE_SINGLETON
93 
StackFrameIterator(Isolate * isolate)94 StackFrameIterator::StackFrameIterator(Isolate* isolate)
95     : StackFrameIterator(isolate, isolate->thread_local_top()) {}
96 
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)97 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
98     : StackFrameIteratorBase(isolate, true) {
99   Reset(t);
100 }
101 
Advance()102 void StackFrameIterator::Advance() {
103   DCHECK(!done());
104   // Compute the state of the calling frame before restoring
105   // callee-saved registers and unwinding handlers. This allows the
106   // frame code that computes the caller state to access the top
107   // handler and the value of any callee-saved register if needed.
108   StackFrame::State state;
109   StackFrame::Type type = frame_->GetCallerState(&state);
110 
111   // Unwind handlers corresponding to the current frame.
112   StackHandlerIterator it(frame_, handler_);
113   while (!it.done()) it.Advance();
114   handler_ = it.handler();
115 
116   // Advance to the calling frame.
117   frame_ = SingletonFor(type, &state);
118 
119   // When we're done iterating over the stack frames, the handler
120   // chain must have been completely unwound.
121   DCHECK(!done() || handler_ == nullptr);
122 }
123 
Reset(ThreadLocalTop * top)124 void StackFrameIterator::Reset(ThreadLocalTop* top) {
125   StackFrame::State state;
126   StackFrame::Type type =
127       ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
128   handler_ = StackHandler::FromAddress(Isolate::handler(top));
129   frame_ = SingletonFor(type, &state);
130 }
131 
SingletonFor(StackFrame::Type type,StackFrame::State * state)132 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
133                                                  StackFrame::State* state) {
134   StackFrame* result = SingletonFor(type);
135   DCHECK((!result) == (type == StackFrame::NONE));
136   if (result) result->state_ = *state;
137   return result;
138 }
139 
SingletonFor(StackFrame::Type type)140 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
141 #define FRAME_TYPE_CASE(type, field) \
142   case StackFrame::type:             \
143     return &field##_;
144 
145   switch (type) {
146     case StackFrame::NONE:
147       return nullptr;
148       STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
149     default:
150       break;
151   }
152   return nullptr;
153 
154 #undef FRAME_TYPE_CASE
155 }
156 
157 // -------------------------------------------------------------------------
158 
Iterate(RootVisitor * v) const159 void TypedFrameWithJSLinkage::Iterate(RootVisitor* v) const {
160   IterateExpressions(v);
161   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
162 }
163 
164 // -------------------------------------------------------------------------
165 
Advance()166 void JavaScriptFrameIterator::Advance() {
167   do {
168     iterator_.Advance();
169   } while (!iterator_.done() && !iterator_.frame()->is_java_script());
170 }
171 
172 // -------------------------------------------------------------------------
173 
StackTraceFrameIterator(Isolate * isolate)174 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
175     : iterator_(isolate) {
176   if (!done() && !IsValidFrame(iterator_.frame())) Advance();
177 }
178 
StackTraceFrameIterator(Isolate * isolate,StackFrameId id)179 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate,
180                                                  StackFrameId id)
181     : StackTraceFrameIterator(isolate) {
182   while (!done() && frame()->id() != id) Advance();
183 }
184 
Advance()185 void StackTraceFrameIterator::Advance() {
186   do {
187     iterator_.Advance();
188   } while (!done() && !IsValidFrame(iterator_.frame()));
189 }
190 
FrameFunctionCount() const191 int StackTraceFrameIterator::FrameFunctionCount() const {
192   DCHECK(!done());
193   if (!iterator_.frame()->is_optimized()) return 1;
194   std::vector<SharedFunctionInfo> infos;
195   OptimizedFrame::cast(iterator_.frame())->GetFunctions(&infos);
196   return static_cast<int>(infos.size());
197 }
198 
IsValidFrame(StackFrame * frame) const199 bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
200   if (frame->is_java_script()) {
201     JavaScriptFrame* js_frame = static_cast<JavaScriptFrame*>(frame);
202     if (!js_frame->function().IsJSFunction()) return false;
203     return js_frame->function().shared().IsSubjectToDebugging();
204   }
205   // Apart from JavaScript frames, only Wasm frames are valid.
206   return frame->is_wasm();
207 }
208 
209 // -------------------------------------------------------------------------
210 
211 namespace {
212 
IsInterpreterFramePc(Isolate * isolate,Address pc,StackFrame::State * state)213 bool IsInterpreterFramePc(Isolate* isolate, Address pc,
214                           StackFrame::State* state) {
215   Code interpreter_entry_trampoline =
216       isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
217   Code interpreter_bytecode_advance =
218       isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance);
219   Code interpreter_bytecode_dispatch =
220       isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
221 
222   if (interpreter_entry_trampoline.contains(pc) ||
223       interpreter_bytecode_advance.contains(pc) ||
224       interpreter_bytecode_dispatch.contains(pc)) {
225     return true;
226   } else if (FLAG_interpreted_frames_native_stack) {
227     intptr_t marker = Memory<intptr_t>(
228         state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
229     MSAN_MEMORY_IS_INITIALIZED(
230         state->fp + StandardFrameConstants::kFunctionOffset,
231         kSystemPointerSize);
232     Object maybe_function = Object(
233         Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
234     // There's no need to run a full ContainsSlow if we know the frame can't be
235     // an InterpretedFrame,  so we do these fast checks first
236     if (StackFrame::IsTypeMarker(marker) || maybe_function.IsSmi()) {
237       return false;
238     } else if (!isolate->heap()->InSpaceSlow(pc, CODE_SPACE)) {
239       return false;
240     }
241     interpreter_entry_trampoline =
242         isolate->heap()->GcSafeFindCodeForInnerPointer(pc);
243     return interpreter_entry_trampoline.is_interpreter_trampoline_builtin();
244   } else {
245     return false;
246   }
247 }
248 
249 }  // namespace
250 
IsNoFrameBytecodeHandlerPc(Isolate * isolate,Address pc,Address fp) const251 bool SafeStackFrameIterator::IsNoFrameBytecodeHandlerPc(Isolate* isolate,
252                                                         Address pc,
253                                                         Address fp) const {
254   // Return false for builds with non-embedded bytecode handlers.
255   if (Isolate::CurrentEmbeddedBlobCode() == nullptr) return false;
256 
257   EmbeddedData d = EmbeddedData::FromBlob();
258   if (pc < d.InstructionStartOfBytecodeHandlers() ||
259       pc >= d.InstructionEndOfBytecodeHandlers()) {
260     // Not a bytecode handler pc address.
261     return false;
262   }
263 
264   if (!IsValidStackAddress(fp +
265                            CommonFrameConstants::kContextOrFrameTypeOffset)) {
266     return false;
267   }
268 
269   // Check if top stack frame is a bytecode handler stub frame.
270   MSAN_MEMORY_IS_INITIALIZED(
271       fp + CommonFrameConstants::kContextOrFrameTypeOffset, kSystemPointerSize);
272   intptr_t marker =
273       Memory<intptr_t>(fp + CommonFrameConstants::kContextOrFrameTypeOffset);
274   if (StackFrame::IsTypeMarker(marker) &&
275       StackFrame::MarkerToType(marker) == StackFrame::STUB) {
276     // Bytecode handler built a frame.
277     return false;
278   }
279   return true;
280 }
281 
SafeStackFrameIterator(Isolate * isolate,Address pc,Address fp,Address sp,Address lr,Address js_entry_sp)282 SafeStackFrameIterator::SafeStackFrameIterator(Isolate* isolate, Address pc,
283                                                Address fp, Address sp,
284                                                Address lr, Address js_entry_sp)
285     : StackFrameIteratorBase(isolate, false),
286       low_bound_(sp),
287       high_bound_(js_entry_sp),
288       top_frame_type_(StackFrame::NONE),
289       top_context_address_(kNullAddress),
290       external_callback_scope_(isolate->external_callback_scope()),
291       top_link_register_(lr) {
292   StackFrame::State state;
293   StackFrame::Type type;
294   ThreadLocalTop* top = isolate->thread_local_top();
295   bool advance_frame = true;
296 
297   Address fast_c_fp = isolate->isolate_data()->fast_c_call_caller_fp();
298   uint8_t stack_is_iterable = isolate->isolate_data()->stack_is_iterable();
299   if (!stack_is_iterable) {
300     frame_ = nullptr;
301     return;
302   }
303   // 'Fast C calls' are a special type of C call where we call directly from JS
304   // to C without an exit frame inbetween. The CEntryStub is responsible for
305   // setting Isolate::c_entry_fp, meaning that it won't be set for fast C calls.
306   // To keep the stack iterable, we store the FP and PC of the caller of the
307   // fast C call on the isolate. This is guaranteed to be the topmost JS frame,
308   // because fast C calls cannot call back into JS. We start iterating the stack
309   // from this topmost JS frame.
310   if (fast_c_fp) {
311     DCHECK_NE(kNullAddress, isolate->isolate_data()->fast_c_call_caller_pc());
312     type = StackFrame::Type::OPTIMIZED;
313     top_frame_type_ = type;
314     state.fp = fast_c_fp;
315     state.sp = sp;
316     state.pc_address = isolate->isolate_data()->fast_c_call_caller_pc_address();
317     advance_frame = false;
318   } else if (IsValidTop(top)) {
319     type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
320     top_frame_type_ = type;
321   } else if (IsValidStackAddress(fp)) {
322     DCHECK_NE(fp, kNullAddress);
323     state.fp = fp;
324     state.sp = sp;
325     state.pc_address = StackFrame::ResolveReturnAddressLocation(
326         reinterpret_cast<Address*>(CommonFrame::ComputePCAddress(fp)));
327 
328     // If the current PC is in a bytecode handler, the top stack frame isn't
329     // the bytecode handler's frame and the top of stack or link register is a
330     // return address into the interpreter entry trampoline, then we are likely
331     // in a bytecode handler with elided frame. In that case, set the PC
332     // properly and make sure we do not drop the frame.
333     bool is_no_frame_bytecode_handler = false;
334     if (IsNoFrameBytecodeHandlerPc(isolate, pc, fp)) {
335       Address* tos_location = nullptr;
336       if (top_link_register_) {
337         tos_location = &top_link_register_;
338       } else if (IsValidStackAddress(sp)) {
339         MSAN_MEMORY_IS_INITIALIZED(sp, kSystemPointerSize);
340         tos_location = reinterpret_cast<Address*>(sp);
341       }
342 
343       if (IsInterpreterFramePc(isolate, *tos_location, &state)) {
344         state.pc_address = tos_location;
345         is_no_frame_bytecode_handler = true;
346         advance_frame = false;
347       }
348     }
349 
350     // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
351     // we check only that kMarkerOffset is within the stack bounds and do
352     // compile time check that kContextOffset slot is pushed on the stack before
353     // kMarkerOffset.
354     STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
355                   StandardFrameConstants::kContextOffset);
356     Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
357     if (IsValidStackAddress(frame_marker)) {
358       if (is_no_frame_bytecode_handler) {
359         type = StackFrame::INTERPRETED;
360       } else {
361         type = StackFrame::ComputeType(this, &state);
362       }
363       top_frame_type_ = type;
364       MSAN_MEMORY_IS_INITIALIZED(
365           fp + CommonFrameConstants::kContextOrFrameTypeOffset,
366           kSystemPointerSize);
367       Address type_or_context_address =
368           Memory<Address>(fp + CommonFrameConstants::kContextOrFrameTypeOffset);
369       if (!StackFrame::IsTypeMarker(type_or_context_address))
370         top_context_address_ = type_or_context_address;
371     } else {
372       // Mark the frame as OPTIMIZED if we cannot determine its type.
373       // We chose OPTIMIZED rather than INTERPRETED because it's closer to
374       // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT
375       // referred to full-codegen frames (now removed from the tree), and
376       // OPTIMIZED refers to turbofan frames, both of which are generated
377       // code. INTERPRETED frames refer to bytecode.
378       // The frame anyways will be skipped.
379       type = StackFrame::OPTIMIZED;
380       // Top frame is incomplete so we cannot reliably determine its type.
381       top_frame_type_ = StackFrame::NONE;
382     }
383   } else {
384     return;
385   }
386   frame_ = SingletonFor(type, &state);
387   if (advance_frame && frame_) Advance();
388 }
389 
IsValidTop(ThreadLocalTop * top) const390 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
391   Address c_entry_fp = Isolate::c_entry_fp(top);
392   if (!IsValidExitFrame(c_entry_fp)) return false;
393   // There should be at least one JS_ENTRY stack handler.
394   Address handler = Isolate::handler(top);
395   if (handler == kNullAddress) return false;
396   // Check that there are no js frames on top of the native frames.
397   return c_entry_fp < handler;
398 }
399 
AdvanceOneFrame()400 void SafeStackFrameIterator::AdvanceOneFrame() {
401   DCHECK(!done());
402   StackFrame* last_frame = frame_;
403   Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
404   // Before advancing to the next stack frame, perform pointer validity tests.
405   if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
406     frame_ = nullptr;
407     return;
408   }
409 
410   // Advance to the previous frame.
411   StackFrame::State state;
412   StackFrame::Type type = frame_->GetCallerState(&state);
413   frame_ = SingletonFor(type, &state);
414   if (!frame_) return;
415 
416   // Check that we have actually moved to the previous frame in the stack.
417   if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) {
418     frame_ = nullptr;
419   }
420 }
421 
IsValidFrame(StackFrame * frame) const422 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
423   return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
424 }
425 
IsValidCaller(StackFrame * frame)426 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
427   StackFrame::State state;
428   if (frame->is_entry() || frame->is_construct_entry()) {
429     // See EntryFrame::GetCallerState. It computes the caller FP address
430     // and calls ExitFrame::GetStateForFramePointer on it. We need to be
431     // sure that caller FP address is valid.
432     Address caller_fp =
433         Memory<Address>(frame->fp() + EntryFrameConstants::kCallerFPOffset);
434     if (!IsValidExitFrame(caller_fp)) return false;
435   } else if (frame->is_arguments_adaptor()) {
436     // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
437     // the number of arguments is stored on stack as Smi. We need to check
438     // that it really an Smi.
439     Object number_of_args =
440         reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->GetExpression(0);
441     if (!number_of_args.IsSmi()) {
442       return false;
443     }
444   }
445   frame->ComputeCallerState(&state);
446   return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
447          SingletonFor(frame->GetCallerState(&state)) != nullptr;
448 }
449 
IsValidExitFrame(Address fp) const450 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
451   if (!IsValidStackAddress(fp)) return false;
452   Address sp = ExitFrame::ComputeStackPointer(fp);
453   if (!IsValidStackAddress(sp)) return false;
454   StackFrame::State state;
455   ExitFrame::FillState(fp, sp, &state);
456   MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
457   return *state.pc_address != kNullAddress;
458 }
459 
Advance()460 void SafeStackFrameIterator::Advance() {
461   while (true) {
462     AdvanceOneFrame();
463     if (done()) break;
464     ExternalCallbackScope* last_callback_scope = nullptr;
465     while (external_callback_scope_ != nullptr &&
466            external_callback_scope_->scope_address() < frame_->fp()) {
467       // As long as the setup of a frame is not atomic, we may happen to be
468       // in an interval where an ExternalCallbackScope is already created,
469       // but the frame is not yet entered. So we are actually observing
470       // the previous frame.
471       // Skip all the ExternalCallbackScope's that are below the current fp.
472       last_callback_scope = external_callback_scope_;
473       external_callback_scope_ = external_callback_scope_->previous();
474     }
475     if (frame_->is_java_script() || frame_->is_wasm() ||
476         frame_->is_wasm_to_js() || frame_->is_js_to_wasm()) {
477       break;
478     }
479     if (frame_->is_exit() || frame_->is_builtin_exit()) {
480       // Some of the EXIT frames may have ExternalCallbackScope allocated on
481       // top of them. In that case the scope corresponds to the first EXIT
482       // frame beneath it. There may be other EXIT frames on top of the
483       // ExternalCallbackScope, just skip them as we cannot collect any useful
484       // information about them.
485       if (last_callback_scope) {
486         frame_->state_.pc_address =
487             last_callback_scope->callback_entrypoint_address();
488       }
489       break;
490     }
491   }
492 }
493 
494 // -------------------------------------------------------------------------
495 
496 namespace {
GetContainingCode(Isolate * isolate,Address pc)497 Code GetContainingCode(Isolate* isolate, Address pc) {
498   return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
499 }
500 }  // namespace
501 
LookupCode() const502 Code StackFrame::LookupCode() const {
503   Code result = GetContainingCode(isolate(), pc());
504   DCHECK_GE(pc(), result.InstructionStart());
505   DCHECK_LT(pc(), result.InstructionEnd());
506   return result;
507 }
508 
IteratePc(RootVisitor * v,Address * pc_address,Address * constant_pool_address,Code holder)509 void StackFrame::IteratePc(RootVisitor* v, Address* pc_address,
510                            Address* constant_pool_address, Code holder) {
511   Address old_pc = ReadPC(pc_address);
512   DCHECK(ReadOnlyHeap::Contains(holder) ||
513          holder.GetHeap()->GcSafeCodeContains(holder, old_pc));
514   unsigned pc_offset =
515       static_cast<unsigned>(old_pc - holder.InstructionStart());
516   Object code = holder;
517   v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&code));
518   if (code == holder) return;
519   holder = Code::unchecked_cast(code);
520   Address pc = holder.InstructionStart() + pc_offset;
521   // TODO(v8:10026): avoid replacing a signed pointer.
522   PointerAuthentication::ReplacePC(pc_address, pc, kSystemPointerSize);
523   if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
524     *constant_pool_address = holder.constant_pool();
525   }
526 }
527 
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)528 void StackFrame::SetReturnAddressLocationResolver(
529     ReturnAddressLocationResolver resolver) {
530   DCHECK_NULL(return_address_location_resolver_);
531   return_address_location_resolver_ = resolver;
532 }
533 
ComputeType(const StackFrameIteratorBase * iterator,State * state)534 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
535                                          State* state) {
536   DCHECK_NE(state->fp, kNullAddress);
537 
538   MSAN_MEMORY_IS_INITIALIZED(
539       state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
540       kSystemPointerSize);
541   intptr_t marker = Memory<intptr_t>(
542       state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
543   Address pc = StackFrame::ReadPC(state->pc_address);
544   if (!iterator->can_access_heap_objects_) {
545     // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
546     // means that we are being called from the profiler, which can interrupt
547     // the VM with a signal at any arbitrary instruction, with essentially
548     // anything on the stack. So basically none of these checks are 100%
549     // reliable.
550     MSAN_MEMORY_IS_INITIALIZED(
551         state->fp + StandardFrameConstants::kFunctionOffset,
552         kSystemPointerSize);
553     Object maybe_function = Object(
554         Memory<Address>(state->fp + StandardFrameConstants::kFunctionOffset));
555     if (!StackFrame::IsTypeMarker(marker)) {
556       if (maybe_function.IsSmi()) {
557         return NATIVE;
558       } else if (IsInterpreterFramePc(iterator->isolate(), pc, state)) {
559         return INTERPRETED;
560       } else {
561         return OPTIMIZED;
562       }
563     }
564   } else {
565     // If the {pc} does not point into WebAssembly code we can rely on the
566     // returned {wasm_code} to be null and fall back to {GetContainingCode}.
567     wasm::WasmCodeRefScope code_ref_scope;
568     wasm::WasmCode* wasm_code =
569         iterator->isolate()->wasm_engine()->code_manager()->LookupCode(pc);
570     if (wasm_code != nullptr) {
571       switch (wasm_code->kind()) {
572         case wasm::WasmCode::kFunction:
573           return WASM;
574         case wasm::WasmCode::kWasmToCapiWrapper:
575           return WASM_EXIT;
576         case wasm::WasmCode::kWasmToJsWrapper:
577           return WASM_TO_JS;
578         default:
579           UNREACHABLE();
580       }
581     } else {
582       // Look up the code object to figure out the type of the stack frame.
583       Code code_obj = GetContainingCode(iterator->isolate(), pc);
584       if (!code_obj.is_null()) {
585         switch (code_obj.kind()) {
586           case CodeKind::BUILTIN:
587             if (StackFrame::IsTypeMarker(marker)) break;
588             if (code_obj.is_interpreter_trampoline_builtin()) {
589               return INTERPRETED;
590             }
591             if (code_obj.is_turbofanned()) {
592               // TODO(bmeurer): We treat frames for BUILTIN Code objects as
593               // OptimizedFrame for now (all the builtins with JavaScript
594               // linkage are actually generated with TurboFan currently, so
595               // this is sound).
596               return OPTIMIZED;
597             }
598             return BUILTIN;
599           case CodeKind::TURBOFAN:
600           case CodeKind::NATIVE_CONTEXT_INDEPENDENT:
601           case CodeKind::TURBOPROP:
602             return OPTIMIZED;
603           case CodeKind::JS_TO_WASM_FUNCTION:
604             return JS_TO_WASM;
605           case CodeKind::JS_TO_JS_FUNCTION:
606             return STUB;
607           case CodeKind::C_WASM_ENTRY:
608             return C_WASM_ENTRY;
609           case CodeKind::WASM_TO_JS_FUNCTION:
610             return WASM_TO_JS;
611           case CodeKind::WASM_FUNCTION:
612           case CodeKind::WASM_TO_CAPI_FUNCTION:
613             // Never appear as on-heap {Code} objects.
614             UNREACHABLE();
615           default:
616             // All other types should have an explicit marker
617             break;
618         }
619       } else {
620         return NATIVE;
621       }
622     }
623   }
624   DCHECK(StackFrame::IsTypeMarker(marker));
625   StackFrame::Type candidate = StackFrame::MarkerToType(marker);
626   switch (candidate) {
627     case ENTRY:
628     case CONSTRUCT_ENTRY:
629     case EXIT:
630     case BUILTIN_CONTINUATION:
631     case JAVA_SCRIPT_BUILTIN_CONTINUATION:
632     case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
633     case BUILTIN_EXIT:
634     case STUB:
635     case INTERNAL:
636     case CONSTRUCT:
637     case ARGUMENTS_ADAPTOR:
638     case WASM_TO_JS:
639     case WASM:
640     case WASM_COMPILE_LAZY:
641     case WASM_EXIT:
642     case WASM_DEBUG_BREAK:
643     case JS_TO_WASM:
644       return candidate;
645     case OPTIMIZED:
646     case INTERPRETED:
647     default:
648       // Unoptimized and optimized JavaScript frames, including
649       // interpreted frames, should never have a StackFrame::Type
650       // marker. If we find one, we're likely being called from the
651       // profiler in a bogus stack frame.
652       return NATIVE;
653   }
654 }
655 
656 #ifdef DEBUG
can_access_heap_objects() const657 bool StackFrame::can_access_heap_objects() const {
658   return iterator_->can_access_heap_objects_;
659 }
660 #endif
661 
GetCallerState(State * state) const662 StackFrame::Type StackFrame::GetCallerState(State* state) const {
663   ComputeCallerState(state);
664   return ComputeType(iterator_, state);
665 }
666 
GetCallerStackPointer() const667 Address CommonFrame::GetCallerStackPointer() const {
668   return fp() + CommonFrameConstants::kCallerSPOffset;
669 }
670 
ComputeCallerState(State * state) const671 void NativeFrame::ComputeCallerState(State* state) const {
672   state->sp = caller_sp();
673   state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset);
674   state->pc_address = ResolveReturnAddressLocation(
675       reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset));
676   state->callee_pc_address = nullptr;
677   state->constant_pool_address = nullptr;
678 }
679 
unchecked_code() const680 Code EntryFrame::unchecked_code() const {
681   return isolate()->heap()->builtin(Builtins::kJSEntry);
682 }
683 
ComputeCallerState(State * state) const684 void EntryFrame::ComputeCallerState(State* state) const {
685   GetCallerState(state);
686 }
687 
GetCallerState(State * state) const688 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
689   const int offset = EntryFrameConstants::kCallerFPOffset;
690   Address fp = Memory<Address>(this->fp() + offset);
691   return ExitFrame::GetStateForFramePointer(fp, state);
692 }
693 
GetCallerState(State * state) const694 StackFrame::Type CWasmEntryFrame::GetCallerState(State* state) const {
695   const int offset = CWasmEntryFrameConstants::kCEntryFPOffset;
696   Address fp = Memory<Address>(this->fp() + offset);
697   return ExitFrame::GetStateForFramePointer(fp, state);
698 }
699 
unchecked_code() const700 Code ConstructEntryFrame::unchecked_code() const {
701   return isolate()->heap()->builtin(Builtins::kJSConstructEntry);
702 }
703 
ComputeCallerState(State * state) const704 void ExitFrame::ComputeCallerState(State* state) const {
705   // Set up the caller state.
706   state->sp = caller_sp();
707   state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset);
708   state->pc_address = ResolveReturnAddressLocation(
709       reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
710   state->callee_pc_address = nullptr;
711   if (FLAG_enable_embedded_constant_pool) {
712     state->constant_pool_address = reinterpret_cast<Address*>(
713         fp() + ExitFrameConstants::kConstantPoolOffset);
714   }
715 }
716 
Iterate(RootVisitor * v) const717 void ExitFrame::Iterate(RootVisitor* v) const {
718   // The arguments are traversed as part of the expression stack of
719   // the calling frame.
720   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
721 }
722 
GetStateForFramePointer(Address fp,State * state)723 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
724   if (fp == 0) return NONE;
725   StackFrame::Type type = ComputeFrameType(fp);
726   Address sp = (type == WASM_EXIT) ? WasmExitFrame::ComputeStackPointer(fp)
727                                    : ExitFrame::ComputeStackPointer(fp);
728   FillState(fp, sp, state);
729   DCHECK_NE(*state->pc_address, kNullAddress);
730   return type;
731 }
732 
ComputeFrameType(Address fp)733 StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
734   // Distinguish between between regular and builtin exit frames.
735   // Default to EXIT in all hairy cases (e.g., when called from profiler).
736   const int offset = ExitFrameConstants::kFrameTypeOffset;
737   Object marker(Memory<Address>(fp + offset));
738 
739   if (!marker.IsSmi()) {
740     return EXIT;
741   }
742 
743   intptr_t marker_int = bit_cast<intptr_t>(marker);
744 
745   StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
746   if (frame_type == EXIT || frame_type == BUILTIN_EXIT ||
747       frame_type == WASM_EXIT) {
748     return frame_type;
749   }
750 
751   return EXIT;
752 }
753 
ComputeStackPointer(Address fp)754 Address ExitFrame::ComputeStackPointer(Address fp) {
755   MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset,
756                              kSystemPointerSize);
757   return Memory<Address>(fp + ExitFrameConstants::kSPOffset);
758 }
759 
ComputeStackPointer(Address fp)760 Address WasmExitFrame::ComputeStackPointer(Address fp) {
761   // For WASM_EXIT frames, {sp} is only needed for finding the PC slot,
762   // everything else is handled via safepoint information.
763   Address sp = fp + WasmExitFrameConstants::kWasmInstanceOffset;
764   DCHECK_EQ(sp - 1 * kPCOnStackSize,
765             fp + WasmExitFrameConstants::kCallingPCOffset);
766   return sp;
767 }
768 
FillState(Address fp,Address sp,State * state)769 void ExitFrame::FillState(Address fp, Address sp, State* state) {
770   state->sp = sp;
771   state->fp = fp;
772   state->pc_address = ResolveReturnAddressLocation(
773       reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
774   state->callee_pc_address = nullptr;
775   // The constant pool recorded in the exit frame is not associated
776   // with the pc in this state (the return address into a C entry
777   // stub).  ComputeCallerState will retrieve the constant pool
778   // together with the associated caller pc.
779   state->constant_pool_address = nullptr;
780 }
781 
function() const782 JSFunction BuiltinExitFrame::function() const {
783   return JSFunction::cast(target_slot_object());
784 }
785 
receiver() const786 Object BuiltinExitFrame::receiver() const { return receiver_slot_object(); }
787 
IsConstructor() const788 bool BuiltinExitFrame::IsConstructor() const {
789   return !new_target_slot_object().IsUndefined(isolate());
790 }
791 
GetParameter(int i) const792 Object BuiltinExitFrame::GetParameter(int i) const {
793   DCHECK(i >= 0 && i < ComputeParametersCount());
794   int offset =
795       BuiltinExitFrameConstants::kFirstArgumentOffset + i * kSystemPointerSize;
796   return Object(Memory<Address>(fp() + offset));
797 }
798 
ComputeParametersCount() const799 int BuiltinExitFrame::ComputeParametersCount() const {
800   Object argc_slot = argc_slot_object();
801   DCHECK(argc_slot.IsSmi());
802   // Argc also counts the receiver, target, new target, and argc itself as args,
803   // therefore the real argument count is argc - 4.
804   int argc = Smi::ToInt(argc_slot) - 4;
805   DCHECK_GE(argc, 0);
806   return argc;
807 }
808 
809 namespace {
PrintIndex(StringStream * accumulator,StackFrame::PrintMode mode,int index)810 void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode,
811                 int index) {
812   accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index);
813 }
814 
StringForStackFrameType(StackFrame::Type type)815 const char* StringForStackFrameType(StackFrame::Type type) {
816   switch (type) {
817 #define CASE(value, name) \
818   case StackFrame::value: \
819     return #name;
820     STACK_FRAME_TYPE_LIST(CASE)
821 #undef CASE
822     default:
823       UNREACHABLE();
824   }
825 }
826 }  // namespace
827 
Print(StringStream * accumulator,PrintMode mode,int index) const828 void StackFrame::Print(StringStream* accumulator, PrintMode mode,
829                        int index) const {
830   DisallowHeapAllocation no_gc;
831   PrintIndex(accumulator, mode, index);
832   accumulator->Add(StringForStackFrameType(type()));
833   accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc()));
834 }
835 
Print(StringStream * accumulator,PrintMode mode,int index) const836 void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
837                              int index) const {
838   DisallowHeapAllocation no_gc;
839   Object receiver = this->receiver();
840   JSFunction function = this->function();
841 
842   accumulator->PrintSecurityTokenIfChanged(function);
843   PrintIndex(accumulator, mode, index);
844   accumulator->Add("builtin exit frame: ");
845   Code code;
846   if (IsConstructor()) accumulator->Add("new ");
847   accumulator->PrintFunction(function, receiver, &code);
848 
849   accumulator->Add("(this=%o", receiver);
850 
851   // Print the parameters.
852   int parameters_count = ComputeParametersCount();
853   for (int i = 0; i < parameters_count; i++) {
854     accumulator->Add(",%o", GetParameter(i));
855   }
856 
857   accumulator->Add(")\n\n");
858 }
859 
GetExpressionAddress(int n) const860 Address CommonFrame::GetExpressionAddress(int n) const {
861   const int offset = StandardFrameConstants::kExpressionsOffset;
862   return fp() + offset - n * kSystemPointerSize;
863 }
864 
GetExpressionAddress(int n) const865 Address InterpretedFrame::GetExpressionAddress(int n) const {
866   const int offset = InterpreterFrameConstants::kExpressionsOffset;
867   return fp() + offset - n * kSystemPointerSize;
868 }
869 
context() const870 Object CommonFrame::context() const {
871   return ReadOnlyRoots(isolate()).undefined_value();
872 }
873 
position() const874 int CommonFrame::position() const {
875   AbstractCode code = AbstractCode::cast(LookupCode());
876   int code_offset = static_cast<int>(pc() - code.InstructionStart());
877   return code.SourcePosition(code_offset);
878 }
879 
ComputeExpressionsCount() const880 int CommonFrame::ComputeExpressionsCount() const {
881   Address base = GetExpressionAddress(0);
882   Address limit = sp() - kSystemPointerSize;
883   DCHECK(base >= limit);  // stack grows downwards
884   // Include register-allocated locals in number of expressions.
885   return static_cast<int>((base - limit) / kSystemPointerSize);
886 }
887 
ComputeCallerState(State * state) const888 void CommonFrame::ComputeCallerState(State* state) const {
889   state->sp = caller_sp();
890   state->fp = caller_fp();
891   state->pc_address = ResolveReturnAddressLocation(
892       reinterpret_cast<Address*>(ComputePCAddress(fp())));
893   state->callee_fp = fp();
894   state->callee_pc_address = pc_address();
895   state->constant_pool_address =
896       reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
897 }
898 
Summarize(std::vector<FrameSummary> * functions) const899 void CommonFrame::Summarize(std::vector<FrameSummary>* functions) const {
900   // This should only be called on frames which override this method.
901   UNREACHABLE();
902 }
903 
IterateCompiledFrame(RootVisitor * v) const904 void CommonFrame::IterateCompiledFrame(RootVisitor* v) const {
905   // Make sure that we're not doing "safe" stack frame iteration. We cannot
906   // possibly find pointers in optimized frames in that state.
907   DCHECK(can_access_heap_objects());
908 
909   // Find the code and compute the safepoint information.
910   Address inner_pointer = pc();
911   const wasm::WasmCode* wasm_code =
912       isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer);
913   SafepointEntry safepoint_entry;
914   uint32_t stack_slots;
915   Code code;
916   bool has_tagged_params = false;
917   uint32_t tagged_parameter_slots = 0;
918   if (wasm_code != nullptr) {
919     SafepointTable table(wasm_code);
920     safepoint_entry = table.FindEntry(inner_pointer);
921     stack_slots = wasm_code->stack_slots();
922     has_tagged_params = wasm_code->kind() != wasm::WasmCode::kFunction &&
923                         wasm_code->kind() != wasm::WasmCode::kWasmToCapiWrapper;
924     tagged_parameter_slots = wasm_code->tagged_parameter_slots();
925   } else {
926     InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
927         isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
928     if (!entry->safepoint_entry.is_valid()) {
929       entry->safepoint_entry = entry->code.GetSafepointEntry(inner_pointer);
930       DCHECK(entry->safepoint_entry.is_valid());
931     } else {
932       DCHECK(entry->safepoint_entry.Equals(
933           entry->code.GetSafepointEntry(inner_pointer)));
934     }
935 
936     code = entry->code;
937     safepoint_entry = entry->safepoint_entry;
938     stack_slots = code.stack_slots();
939     has_tagged_params = code.has_tagged_params();
940   }
941   uint32_t slot_space = stack_slots * kSystemPointerSize;
942 
943   // Determine the fixed header and spill slot area size.
944   int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
945   intptr_t marker =
946       Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
947   bool typed_frame = StackFrame::IsTypeMarker(marker);
948   if (typed_frame) {
949     StackFrame::Type candidate = StackFrame::MarkerToType(marker);
950     switch (candidate) {
951       case ENTRY:
952       case CONSTRUCT_ENTRY:
953       case EXIT:
954       case BUILTIN_CONTINUATION:
955       case JAVA_SCRIPT_BUILTIN_CONTINUATION:
956       case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
957       case BUILTIN_EXIT:
958       case ARGUMENTS_ADAPTOR:
959       case STUB:
960       case INTERNAL:
961       case CONSTRUCT:
962       case JS_TO_WASM:
963       case C_WASM_ENTRY:
964       case WASM_DEBUG_BREAK:
965         frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
966         break;
967       case WASM_TO_JS:
968       case WASM:
969       case WASM_COMPILE_LAZY:
970         frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp;
971         break;
972       case WASM_EXIT:
973         // The last value in the frame header is the calling PC, which should
974         // not be visited.
975         static_assert(WasmExitFrameConstants::kFixedSlotCountFromFp ==
976                           WasmFrameConstants::kFixedSlotCountFromFp + 1,
977                       "WasmExitFrame has one slot more than WasmFrame");
978         frame_header_size = WasmFrameConstants::kFixedFrameSizeFromFp;
979         break;
980       case OPTIMIZED:
981       case INTERPRETED:
982       case BUILTIN:
983         // These frame types have a context, but they are actually stored
984         // in the place on the stack that one finds the frame type.
985         UNREACHABLE();
986         break;
987       case NATIVE:
988       case NONE:
989       case NUMBER_OF_TYPES:
990       case MANUAL:
991         UNREACHABLE();
992         break;
993     }
994   }
995   slot_space -=
996       (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
997 
998   FullObjectSlot frame_header_base(&Memory<Address>(fp() - frame_header_size));
999   FullObjectSlot frame_header_limit(
1000       &Memory<Address>(fp() - StandardFrameConstants::kCPSlotSize));
1001   FullObjectSlot parameters_base(&Memory<Address>(sp()));
1002   FullObjectSlot parameters_limit(frame_header_base.address() - slot_space);
1003 
1004   // Visit the rest of the parameters if they are tagged.
1005   if (has_tagged_params) {
1006     v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
1007                          parameters_limit);
1008   }
1009 
1010   // Visit pointer spill slots and locals.
1011   uint8_t* safepoint_bits = safepoint_entry.bits();
1012   for (unsigned index = 0; index < stack_slots; index++) {
1013     int byte_index = index >> kBitsPerByteLog2;
1014     int bit_index = index & (kBitsPerByte - 1);
1015     if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
1016       FullObjectSlot spill_slot = parameters_limit + index;
1017 #ifdef V8_COMPRESS_POINTERS
1018       // Spill slots may contain compressed values in which case the upper
1019       // 32-bits will contain zeros. In order to simplify handling of such
1020       // slots in GC we ensure that the slot always contains full value.
1021 
1022       // The spill slot may actually contain weak references so we load/store
1023       // values using spill_slot.location() in order to avoid dealing with
1024       // FullMaybeObjectSlots here.
1025       Tagged_t compressed_value = static_cast<Tagged_t>(*spill_slot.location());
1026       if (!HAS_SMI_TAG(compressed_value)) {
1027         // We don't need to update smi values.
1028         *spill_slot.location() =
1029             DecompressTaggedPointer(isolate(), compressed_value);
1030       }
1031 #endif
1032       v->VisitRootPointer(Root::kTop, nullptr, spill_slot);
1033     }
1034   }
1035 
1036   // Visit tagged parameters that have been passed to the function of this
1037   // frame. Conceptionally these parameters belong to the parent frame. However,
1038   // the exact count is only known by this frame (in the presence of tail calls,
1039   // this information cannot be derived from the call site).
1040   if (tagged_parameter_slots > 0) {
1041     FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp()));
1042     FullObjectSlot tagged_parameter_limit =
1043         tagged_parameter_base + tagged_parameter_slots;
1044 
1045     v->VisitRootPointers(Root::kTop, nullptr, tagged_parameter_base,
1046                          tagged_parameter_limit);
1047   }
1048 
1049   // For the off-heap code cases, we can skip this.
1050   if (!code.is_null()) {
1051     // Visit the return address in the callee and incoming arguments.
1052     IteratePc(v, pc_address(), constant_pool_address(), code);
1053   }
1054 
1055   // If this frame has JavaScript ABI, visit the context (in stub and JS
1056   // frames) and the function (in JS frames). If it has WebAssembly ABI, visit
1057   // the instance object.
1058   if (!typed_frame) {
1059     // JavaScript ABI frames also contain arguments count value which is stored
1060     // untagged, we don't need to visit it.
1061     frame_header_base += 1;
1062   }
1063   v->VisitRootPointers(Root::kTop, nullptr, frame_header_base,
1064                        frame_header_limit);
1065 }
1066 
unchecked_code() const1067 Code StubFrame::unchecked_code() const {
1068   return isolate()->FindCodeObject(pc());
1069 }
1070 
LookupExceptionHandlerInTable()1071 int StubFrame::LookupExceptionHandlerInTable() {
1072   Code code = LookupCode();
1073   DCHECK(code.is_turbofanned());
1074   DCHECK_EQ(code.kind(), CodeKind::BUILTIN);
1075   HandlerTable table(code);
1076   int pc_offset = static_cast<int>(pc() - code.InstructionStart());
1077   return table.LookupReturn(pc_offset);
1078 }
1079 
Iterate(RootVisitor * v) const1080 void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
1081 
SetParameterValue(int index,Object value) const1082 void JavaScriptFrame::SetParameterValue(int index, Object value) const {
1083   Memory<Address>(GetParameterSlot(index)) = value.ptr();
1084 }
1085 
IsConstructor() const1086 bool JavaScriptFrame::IsConstructor() const {
1087   Address fp = caller_fp();
1088   if (has_adapted_arguments()) {
1089     // Skip the arguments adaptor frame and look at the real caller.
1090     fp = Memory<Address>(fp + StandardFrameConstants::kCallerFPOffset);
1091   }
1092   return IsConstructFrame(fp);
1093 }
1094 
HasInlinedFrames() const1095 bool JavaScriptFrame::HasInlinedFrames() const {
1096   std::vector<SharedFunctionInfo> functions;
1097   GetFunctions(&functions);
1098   return functions.size() > 1;
1099 }
1100 
unchecked_code() const1101 Code CommonFrameWithJSLinkage::unchecked_code() const {
1102   return function().code();
1103 }
1104 
ComputeParametersCount() const1105 int OptimizedFrame::ComputeParametersCount() const {
1106   Code code = LookupCode();
1107   if (code.kind() == CodeKind::BUILTIN) {
1108     return static_cast<int>(
1109         Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset));
1110   } else {
1111     return JavaScriptFrame::ComputeParametersCount();
1112   }
1113 }
1114 
GetCallerStackPointer() const1115 Address JavaScriptFrame::GetCallerStackPointer() const {
1116   return fp() + StandardFrameConstants::kCallerSPOffset;
1117 }
1118 
GetFunctions(std::vector<SharedFunctionInfo> * functions) const1119 void JavaScriptFrame::GetFunctions(
1120     std::vector<SharedFunctionInfo>* functions) const {
1121   DCHECK(functions->empty());
1122   functions->push_back(function().shared());
1123 }
1124 
GetFunctions(std::vector<Handle<SharedFunctionInfo>> * functions) const1125 void JavaScriptFrame::GetFunctions(
1126     std::vector<Handle<SharedFunctionInfo>>* functions) const {
1127   DCHECK(functions->empty());
1128   std::vector<SharedFunctionInfo> raw_functions;
1129   GetFunctions(&raw_functions);
1130   for (const auto& raw_function : raw_functions) {
1131     functions->push_back(
1132         Handle<SharedFunctionInfo>(raw_function, function().GetIsolate()));
1133   }
1134 }
1135 
IsConstructor() const1136 bool CommonFrameWithJSLinkage::IsConstructor() const {
1137   return IsConstructFrame(caller_fp());
1138 }
1139 
Summarize(std::vector<FrameSummary> * functions) const1140 void CommonFrameWithJSLinkage::Summarize(
1141     std::vector<FrameSummary>* functions) const {
1142   DCHECK(functions->empty());
1143   Code code = LookupCode();
1144   int offset = static_cast<int>(pc() - code.InstructionStart());
1145   Handle<AbstractCode> abstract_code(AbstractCode::cast(code), isolate());
1146   Handle<FixedArray> params = GetParameters();
1147   FrameSummary::JavaScriptFrameSummary summary(
1148       isolate(), receiver(), function(), *abstract_code, offset,
1149       IsConstructor(), *params);
1150   functions->push_back(summary);
1151 }
1152 
function() const1153 JSFunction JavaScriptFrame::function() const {
1154   return JSFunction::cast(function_slot_object());
1155 }
1156 
unchecked_function() const1157 Object JavaScriptFrame::unchecked_function() const {
1158   // During deoptimization of an optimized function, we may have yet to
1159   // materialize some closures on the stack. The arguments marker object
1160   // marks this case.
1161   DCHECK(function_slot_object().IsJSFunction() ||
1162          ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object());
1163   return function_slot_object();
1164 }
1165 
receiver() const1166 Object CommonFrameWithJSLinkage::receiver() const { return GetParameter(-1); }
1167 
context() const1168 Object JavaScriptFrame::context() const {
1169   const int offset = StandardFrameConstants::kContextOffset;
1170   Object maybe_result(Memory<Address>(fp() + offset));
1171   DCHECK(!maybe_result.IsSmi());
1172   return maybe_result;
1173 }
1174 
script() const1175 Script JavaScriptFrame::script() const {
1176   return Script::cast(function().shared().script());
1177 }
1178 
LookupExceptionHandlerInTable(int * stack_depth,HandlerTable::CatchPrediction * prediction)1179 int CommonFrameWithJSLinkage::LookupExceptionHandlerInTable(
1180     int* stack_depth, HandlerTable::CatchPrediction* prediction) {
1181   DCHECK(!LookupCode().has_handler_table());
1182   DCHECK(!LookupCode().is_optimized_code());
1183   return -1;
1184 }
1185 
PrintFunctionAndOffset(JSFunction function,AbstractCode code,int code_offset,FILE * file,bool print_line_number)1186 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction function,
1187                                              AbstractCode code, int code_offset,
1188                                              FILE* file,
1189                                              bool print_line_number) {
1190   PrintF(file, "%s", CodeKindIsOptimizedJSFunction(code.kind()) ? "*" : "~");
1191   function.PrintName(file);
1192   PrintF(file, "+%d", code_offset);
1193   if (print_line_number) {
1194     SharedFunctionInfo shared = function.shared();
1195     int source_pos = code.SourcePosition(code_offset);
1196     Object maybe_script = shared.script();
1197     if (maybe_script.IsScript()) {
1198       Script script = Script::cast(maybe_script);
1199       int line = script.GetLineNumber(source_pos) + 1;
1200       Object script_name_raw = script.name();
1201       if (script_name_raw.IsString()) {
1202         String script_name = String::cast(script.name());
1203         std::unique_ptr<char[]> c_script_name =
1204             script_name.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
1205         PrintF(file, " at %s:%d", c_script_name.get(), line);
1206       } else {
1207         PrintF(file, " at <unknown>:%d", line);
1208       }
1209     } else {
1210       PrintF(file, " at <unknown>:<unknown>");
1211     }
1212   }
1213 }
1214 
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)1215 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
1216                                bool print_line_number) {
1217   // constructor calls
1218   DisallowHeapAllocation no_allocation;
1219   JavaScriptFrameIterator it(isolate);
1220   while (!it.done()) {
1221     if (it.frame()->is_java_script()) {
1222       JavaScriptFrame* frame = it.frame();
1223       if (frame->IsConstructor()) PrintF(file, "new ");
1224       JSFunction function = frame->function();
1225       int code_offset = 0;
1226       if (frame->is_interpreted()) {
1227         InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
1228         code_offset = iframe->GetBytecodeOffset();
1229       } else {
1230         Code code = frame->unchecked_code();
1231         code_offset = static_cast<int>(frame->pc() - code.InstructionStart());
1232       }
1233       PrintFunctionAndOffset(function, function.abstract_code(), code_offset,
1234                              file, print_line_number);
1235       if (print_args) {
1236         // function arguments
1237         // (we are intentionally only printing the actually
1238         // supplied parameters, not all parameters required)
1239         PrintF(file, "(this=");
1240         frame->receiver().ShortPrint(file);
1241         const int length = frame->ComputeParametersCount();
1242         for (int i = 0; i < length; i++) {
1243           PrintF(file, ", ");
1244           frame->GetParameter(i).ShortPrint(file);
1245         }
1246         PrintF(file, ")");
1247       }
1248       break;
1249     }
1250     it.Advance();
1251   }
1252 }
1253 
CollectFunctionAndOffsetForICStats(JSFunction function,AbstractCode code,int code_offset)1254 void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction function,
1255                                                          AbstractCode code,
1256                                                          int code_offset) {
1257   auto ic_stats = ICStats::instance();
1258   ICInfo& ic_info = ic_stats->Current();
1259   SharedFunctionInfo shared = function.shared();
1260 
1261   ic_info.function_name = ic_stats->GetOrCacheFunctionName(function);
1262   ic_info.script_offset = code_offset;
1263 
1264   int source_pos = code.SourcePosition(code_offset);
1265   Object maybe_script = shared.script();
1266   if (maybe_script.IsScript()) {
1267     Script script = Script::cast(maybe_script);
1268     ic_info.line_num = script.GetLineNumber(source_pos) + 1;
1269     ic_info.column_num = script.GetColumnNumber(source_pos);
1270     ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
1271   }
1272 }
1273 
GetParameter(int index) const1274 Object CommonFrameWithJSLinkage::GetParameter(int index) const {
1275   return Object(Memory<Address>(GetParameterSlot(index)));
1276 }
1277 
ComputeParametersCount() const1278 int CommonFrameWithJSLinkage::ComputeParametersCount() const {
1279   DCHECK(can_access_heap_objects() &&
1280          isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
1281   return function().shared().internal_formal_parameter_count();
1282 }
1283 
1284 #ifdef V8_NO_ARGUMENTS_ADAPTOR
GetActualArgumentCount() const1285 int JavaScriptFrame::GetActualArgumentCount() const {
1286   return static_cast<int>(
1287       Memory<intptr_t>(fp() + StandardFrameConstants::kArgCOffset));
1288 }
1289 #endif
1290 
GetParameters() const1291 Handle<FixedArray> CommonFrameWithJSLinkage::GetParameters() const {
1292   if (V8_LIKELY(!FLAG_detailed_error_stack_trace)) {
1293     return isolate()->factory()->empty_fixed_array();
1294   }
1295   int param_count = ComputeParametersCount();
1296   Handle<FixedArray> parameters =
1297       isolate()->factory()->NewFixedArray(param_count);
1298   for (int i = 0; i < param_count; i++) {
1299     parameters->set(i, GetParameter(i));
1300   }
1301 
1302   return parameters;
1303 }
1304 
function() const1305 JSFunction JavaScriptBuiltinContinuationFrame::function() const {
1306   const int offset = BuiltinContinuationFrameConstants::kFunctionOffset;
1307   return JSFunction::cast(Object(base::Memory<Address>(fp() + offset)));
1308 }
1309 
ComputeParametersCount() const1310 int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const {
1311   // Assert that the first allocatable register is also the argument count
1312   // register.
1313   DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),
1314             kJavaScriptCallArgCountRegister.code());
1315   Object argc_object(
1316       Memory<Address>(fp() + BuiltinContinuationFrameConstants::kArgCOffset));
1317   return Smi::ToInt(argc_object);
1318 }
1319 
GetSPToFPDelta() const1320 intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const {
1321   Address height_slot =
1322       fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize;
1323   intptr_t height = Smi::ToInt(Smi(Memory<Address>(height_slot)));
1324   return height;
1325 }
1326 
context() const1327 Object JavaScriptBuiltinContinuationFrame::context() const {
1328   return Object(Memory<Address>(
1329       fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset));
1330 }
1331 
SetException(Object exception)1332 void JavaScriptBuiltinContinuationWithCatchFrame::SetException(
1333     Object exception) {
1334   int argc = ComputeParametersCount();
1335   Address exception_argument_slot =
1336       fp() + BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp +
1337       (argc - 1) * kSystemPointerSize;
1338 
1339   // Only allow setting exception if previous value was the hole.
1340   CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),
1341            Object(Memory<Address>(exception_argument_slot)));
1342   Memory<Address>(exception_argument_slot) = exception.ptr();
1343 }
1344 
JavaScriptFrameSummary(Isolate * isolate,Object receiver,JSFunction function,AbstractCode abstract_code,int code_offset,bool is_constructor,FixedArray parameters)1345 FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary(
1346     Isolate* isolate, Object receiver, JSFunction function,
1347     AbstractCode abstract_code, int code_offset, bool is_constructor,
1348     FixedArray parameters)
1349     : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT),
1350       receiver_(receiver, isolate),
1351       function_(function, isolate),
1352       abstract_code_(abstract_code, isolate),
1353       code_offset_(code_offset),
1354       is_constructor_(is_constructor),
1355       parameters_(parameters, isolate) {
1356   DCHECK(abstract_code.IsBytecodeArray() ||
1357          !CodeKindIsOptimizedJSFunction(Code::cast(abstract_code).kind()));
1358 }
1359 
EnsureSourcePositionsAvailable()1360 void FrameSummary::EnsureSourcePositionsAvailable() {
1361   if (IsJavaScript()) {
1362     java_script_summary_.EnsureSourcePositionsAvailable();
1363   }
1364 }
1365 
AreSourcePositionsAvailable() const1366 bool FrameSummary::AreSourcePositionsAvailable() const {
1367   if (IsJavaScript()) {
1368     return java_script_summary_.AreSourcePositionsAvailable();
1369   }
1370   return true;
1371 }
1372 
EnsureSourcePositionsAvailable()1373 void FrameSummary::JavaScriptFrameSummary::EnsureSourcePositionsAvailable() {
1374   Handle<SharedFunctionInfo> shared(function()->shared(), isolate());
1375   SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared);
1376 }
1377 
AreSourcePositionsAvailable() const1378 bool FrameSummary::JavaScriptFrameSummary::AreSourcePositionsAvailable() const {
1379   return !FLAG_enable_lazy_source_positions ||
1380          function()->shared().GetBytecodeArray().HasSourcePositionTable();
1381 }
1382 
is_subject_to_debugging() const1383 bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const {
1384   return function()->shared().IsSubjectToDebugging();
1385 }
1386 
SourcePosition() const1387 int FrameSummary::JavaScriptFrameSummary::SourcePosition() const {
1388   return abstract_code()->SourcePosition(code_offset());
1389 }
1390 
SourceStatementPosition() const1391 int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const {
1392   return abstract_code()->SourceStatementPosition(code_offset());
1393 }
1394 
script() const1395 Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const {
1396   return handle(function_->shared().script(), isolate());
1397 }
1398 
FunctionName() const1399 Handle<String> FrameSummary::JavaScriptFrameSummary::FunctionName() const {
1400   return JSFunction::GetDebugName(function_);
1401 }
1402 
native_context() const1403 Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const {
1404   return handle(function_->context().native_context(), isolate());
1405 }
1406 
WasmFrameSummary(Isolate * isolate,Handle<WasmInstanceObject> instance,wasm::WasmCode * code,int code_offset,bool at_to_number_conversion)1407 FrameSummary::WasmFrameSummary::WasmFrameSummary(
1408     Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code,
1409     int code_offset, bool at_to_number_conversion)
1410     : FrameSummaryBase(isolate, WASM),
1411       wasm_instance_(instance),
1412       at_to_number_conversion_(at_to_number_conversion),
1413       code_(code),
1414       code_offset_(code_offset) {}
1415 
receiver() const1416 Handle<Object> FrameSummary::WasmFrameSummary::receiver() const {
1417   return wasm_instance_->GetIsolate()->global_proxy();
1418 }
1419 
function_index() const1420 uint32_t FrameSummary::WasmFrameSummary::function_index() const {
1421   return code()->index();
1422 }
1423 
byte_offset() const1424 int FrameSummary::WasmFrameSummary::byte_offset() const {
1425   return code_->GetSourcePositionBefore(code_offset());
1426 }
1427 
SourcePosition() const1428 int FrameSummary::WasmFrameSummary::SourcePosition() const {
1429   const wasm::WasmModule* module = wasm_instance()->module_object().module();
1430   return GetSourcePosition(module, function_index(), byte_offset(),
1431                            at_to_number_conversion());
1432 }
1433 
script() const1434 Handle<Script> FrameSummary::WasmFrameSummary::script() const {
1435   return handle(wasm_instance()->module_object().script(),
1436                 wasm_instance()->GetIsolate());
1437 }
1438 
FunctionName() const1439 Handle<String> FrameSummary::WasmFrameSummary::FunctionName() const {
1440   Handle<WasmModuleObject> module_object(wasm_instance()->module_object(),
1441                                          isolate());
1442   return WasmModuleObject::GetFunctionName(isolate(), module_object,
1443                                            function_index());
1444 }
1445 
native_context() const1446 Handle<Context> FrameSummary::WasmFrameSummary::native_context() const {
1447   return handle(wasm_instance()->native_context(), isolate());
1448 }
1449 
~FrameSummary()1450 FrameSummary::~FrameSummary() {
1451 #define FRAME_SUMMARY_DESTR(kind, type, field, desc) \
1452   case kind:                                         \
1453     field.~type();                                   \
1454     break;
1455   switch (base_.kind()) {
1456     FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR)
1457     default:
1458       UNREACHABLE();
1459   }
1460 #undef FRAME_SUMMARY_DESTR
1461 }
1462 
GetTop(const CommonFrame * frame)1463 FrameSummary FrameSummary::GetTop(const CommonFrame* frame) {
1464   std::vector<FrameSummary> frames;
1465   frame->Summarize(&frames);
1466   DCHECK_LT(0, frames.size());
1467   return frames.back();
1468 }
1469 
GetBottom(const CommonFrame * frame)1470 FrameSummary FrameSummary::GetBottom(const CommonFrame* frame) {
1471   return Get(frame, 0);
1472 }
1473 
GetSingle(const CommonFrame * frame)1474 FrameSummary FrameSummary::GetSingle(const CommonFrame* frame) {
1475   std::vector<FrameSummary> frames;
1476   frame->Summarize(&frames);
1477   DCHECK_EQ(1, frames.size());
1478   return frames.front();
1479 }
1480 
Get(const CommonFrame * frame,int index)1481 FrameSummary FrameSummary::Get(const CommonFrame* frame, int index) {
1482   DCHECK_LE(0, index);
1483   std::vector<FrameSummary> frames;
1484   frame->Summarize(&frames);
1485   DCHECK_GT(frames.size(), index);
1486   return frames[index];
1487 }
1488 
1489 #define FRAME_SUMMARY_DISPATCH(ret, name)   \
1490   ret FrameSummary::name() const {          \
1491     switch (base_.kind()) {                 \
1492       case JAVA_SCRIPT:                     \
1493         return java_script_summary_.name(); \
1494       case WASM:                            \
1495         return wasm_summary_.name();        \
1496       default:                              \
1497         UNREACHABLE();                      \
1498     }                                       \
1499   }
1500 
FRAME_SUMMARY_DISPATCH(Handle<Object>,receiver)1501 FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver)
1502 FRAME_SUMMARY_DISPATCH(int, code_offset)
1503 FRAME_SUMMARY_DISPATCH(bool, is_constructor)
1504 FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging)
1505 FRAME_SUMMARY_DISPATCH(Handle<Object>, script)
1506 FRAME_SUMMARY_DISPATCH(int, SourcePosition)
1507 FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition)
1508 FRAME_SUMMARY_DISPATCH(Handle<String>, FunctionName)
1509 FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context)
1510 
1511 #undef FRAME_SUMMARY_DISPATCH
1512 
1513 void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const {
1514   DCHECK(frames->empty());
1515   DCHECK(is_optimized());
1516 
1517   // Delegate to JS frame in absence of turbofan deoptimization.
1518   // TODO(turbofan): Revisit once we support deoptimization across the board.
1519   Code code = LookupCode();
1520   if (code.kind() == CodeKind::BUILTIN) {
1521     return JavaScriptFrame::Summarize(frames);
1522   }
1523 
1524   int deopt_index = Safepoint::kNoDeoptimizationIndex;
1525   DeoptimizationData const data = GetDeoptimizationData(&deopt_index);
1526   if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
1527     CHECK(data.is_null());
1528     FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
1529   }
1530 
1531   // Prepare iteration over translation. Note that the below iteration might
1532   // materialize objects without storing them back to the Isolate, this will
1533   // lead to objects being re-materialized again for each summary.
1534   TranslatedState translated(this);
1535   translated.Prepare(fp());
1536 
1537   // We create the summary in reverse order because the frames
1538   // in the deoptimization translation are ordered bottom-to-top.
1539   bool is_constructor = IsConstructor();
1540   for (auto it = translated.begin(); it != translated.end(); it++) {
1541     if (it->kind() == TranslatedFrame::kInterpretedFunction ||
1542         it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
1543         it->kind() ==
1544             TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1545       Handle<SharedFunctionInfo> shared_info = it->shared_info();
1546 
1547       // The translation commands are ordered and the function is always
1548       // at the first position, and the receiver is next.
1549       TranslatedFrame::iterator translated_values = it->begin();
1550 
1551       // Get or materialize the correct function in the optimized frame.
1552       Handle<JSFunction> function =
1553           Handle<JSFunction>::cast(translated_values->GetValue());
1554       translated_values++;
1555 
1556       // Get or materialize the correct receiver in the optimized frame.
1557       Handle<Object> receiver = translated_values->GetValue();
1558       translated_values++;
1559 
1560       // Determine the underlying code object and the position within it from
1561       // the translation corresponding to the frame type in question.
1562       Handle<AbstractCode> abstract_code;
1563       unsigned code_offset;
1564       if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
1565           it->kind() ==
1566               TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1567         code_offset = 0;
1568         abstract_code =
1569             handle(AbstractCode::cast(isolate()->builtins()->builtin(
1570                        Builtins::GetBuiltinFromBailoutId(it->node_id()))),
1571                    isolate());
1572       } else {
1573         DCHECK_EQ(it->kind(), TranslatedFrame::kInterpretedFunction);
1574         code_offset = it->node_id().ToInt();  // Points to current bytecode.
1575         abstract_code = handle(shared_info->abstract_code(), isolate());
1576       }
1577 
1578       // Append full summary of the encountered JS frame.
1579       Handle<FixedArray> params = GetParameters();
1580       FrameSummary::JavaScriptFrameSummary summary(
1581           isolate(), *receiver, *function, *abstract_code, code_offset,
1582           is_constructor, *params);
1583       frames->push_back(summary);
1584       is_constructor = false;
1585     } else if (it->kind() == TranslatedFrame::kConstructStub) {
1586       // The next encountered JS frame will be marked as a constructor call.
1587       DCHECK(!is_constructor);
1588       is_constructor = true;
1589     }
1590   }
1591 }
1592 
LookupExceptionHandlerInTable(int * data,HandlerTable::CatchPrediction * prediction)1593 int OptimizedFrame::LookupExceptionHandlerInTable(
1594     int* data, HandlerTable::CatchPrediction* prediction) {
1595   // We cannot perform exception prediction on optimized code. Instead, we need
1596   // to use FrameSummary to find the corresponding code offset in unoptimized
1597   // code to perform prediction there.
1598   DCHECK_NULL(prediction);
1599   Code code = LookupCode();
1600   HandlerTable table(code);
1601   int pc_offset = static_cast<int>(pc() - code.InstructionStart());
1602   DCHECK_NULL(data);  // Data is not used and will not return a value.
1603 
1604   // When the return pc has been replaced by a trampoline there won't be
1605   // a handler for this trampoline. Thus we need to use the return pc that
1606   // _used to be_ on the stack to get the right ExceptionHandler.
1607   if (CodeKindCanDeoptimize(code.kind()) && code.marked_for_deoptimization()) {
1608     SafepointTable safepoints(code);
1609     pc_offset = safepoints.find_return_pc(pc_offset);
1610   }
1611   return table.LookupReturn(pc_offset);
1612 }
1613 
GetDeoptimizationData(int * deopt_index) const1614 DeoptimizationData OptimizedFrame::GetDeoptimizationData(
1615     int* deopt_index) const {
1616   DCHECK(is_optimized());
1617 
1618   JSFunction opt_function = function();
1619   Code code = opt_function.code();
1620 
1621   // The code object may have been replaced by lazy deoptimization. Fall
1622   // back to a slow search in this case to find the original optimized
1623   // code object.
1624   if (!code.contains(pc())) {
1625     code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc());
1626   }
1627   DCHECK(!code.is_null());
1628   DCHECK(CodeKindCanDeoptimize(code.kind()));
1629 
1630   SafepointEntry safepoint_entry = code.GetSafepointEntry(pc());
1631   if (safepoint_entry.has_deoptimization_index()) {
1632     *deopt_index = safepoint_entry.deoptimization_index();
1633     return DeoptimizationData::cast(code.deoptimization_data());
1634   }
1635   *deopt_index = Safepoint::kNoDeoptimizationIndex;
1636   return DeoptimizationData();
1637 }
1638 
GetFunctions(std::vector<SharedFunctionInfo> * functions) const1639 void OptimizedFrame::GetFunctions(
1640     std::vector<SharedFunctionInfo>* functions) const {
1641   DCHECK(functions->empty());
1642   DCHECK(is_optimized());
1643 
1644   // Delegate to JS frame in absence of turbofan deoptimization.
1645   // TODO(turbofan): Revisit once we support deoptimization across the board.
1646   Code code = LookupCode();
1647   if (code.kind() == CodeKind::BUILTIN) {
1648     return JavaScriptFrame::GetFunctions(functions);
1649   }
1650 
1651   DisallowHeapAllocation no_gc;
1652   int deopt_index = Safepoint::kNoDeoptimizationIndex;
1653   DeoptimizationData const data = GetDeoptimizationData(&deopt_index);
1654   DCHECK(!data.is_null());
1655   DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1656   FixedArray const literal_array = data.LiteralArray();
1657 
1658   TranslationIterator it(data.TranslationByteArray(),
1659                          data.TranslationIndex(deopt_index).value());
1660   Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1661   DCHECK_EQ(Translation::BEGIN, opcode);
1662   it.Next();  // Skip frame count.
1663   int jsframe_count = it.Next();
1664   it.Next();  // Skip update feedback count.
1665 
1666   // We insert the frames in reverse order because the frames
1667   // in the deoptimization translation are ordered bottom-to-top.
1668   while (jsframe_count != 0) {
1669     opcode = static_cast<Translation::Opcode>(it.Next());
1670     if (opcode == Translation::INTERPRETED_FRAME ||
1671         opcode == Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME ||
1672         opcode ==
1673             Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) {
1674       it.Next();  // Skip bailout id.
1675       jsframe_count--;
1676 
1677       // The second operand of the frame points to the function.
1678       Object shared = literal_array.get(it.Next());
1679       functions->push_back(SharedFunctionInfo::cast(shared));
1680 
1681       // Skip over remaining operands to advance to the next opcode.
1682       it.Skip(Translation::NumberOfOperandsFor(opcode) - 2);
1683     } else {
1684       // Skip over operands to advance to the next opcode.
1685       it.Skip(Translation::NumberOfOperandsFor(opcode));
1686     }
1687   }
1688 }
1689 
StackSlotOffsetRelativeToFp(int slot_index)1690 int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
1691   return StandardFrameConstants::kCallerSPOffset -
1692          ((slot_index + 1) * kSystemPointerSize);
1693 }
1694 
StackSlotAt(int index) const1695 Object OptimizedFrame::StackSlotAt(int index) const {
1696   return Object(Memory<Address>(fp() + StackSlotOffsetRelativeToFp(index)));
1697 }
1698 
position() const1699 int InterpretedFrame::position() const {
1700   AbstractCode code = AbstractCode::cast(GetBytecodeArray());
1701   int code_offset = GetBytecodeOffset();
1702   return code.SourcePosition(code_offset);
1703 }
1704 
LookupExceptionHandlerInTable(int * context_register,HandlerTable::CatchPrediction * prediction)1705 int InterpretedFrame::LookupExceptionHandlerInTable(
1706     int* context_register, HandlerTable::CatchPrediction* prediction) {
1707   HandlerTable table(GetBytecodeArray());
1708   return table.LookupRange(GetBytecodeOffset(), context_register, prediction);
1709 }
1710 
GetBytecodeOffset() const1711 int InterpretedFrame::GetBytecodeOffset() const {
1712   const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1713   DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
1714             InterpreterFrameConstants::kExpressionsOffset -
1715                 index * kSystemPointerSize);
1716   int raw_offset = Smi::ToInt(GetExpression(index));
1717   return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1718 }
1719 
GetBytecodeOffset(Address fp)1720 int InterpretedFrame::GetBytecodeOffset(Address fp) {
1721   const int offset = InterpreterFrameConstants::kExpressionsOffset;
1722   const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1723   DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
1724             InterpreterFrameConstants::kExpressionsOffset -
1725                 index * kSystemPointerSize);
1726   Address expression_offset = fp + offset - index * kSystemPointerSize;
1727   int raw_offset = Smi::ToInt(Object(Memory<Address>(expression_offset)));
1728   return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1729 }
1730 
PatchBytecodeOffset(int new_offset)1731 void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
1732   const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1733   DCHECK_EQ(InterpreterFrameConstants::kBytecodeOffsetFromFp,
1734             InterpreterFrameConstants::kExpressionsOffset -
1735                 index * kSystemPointerSize);
1736   int raw_offset = BytecodeArray::kHeaderSize - kHeapObjectTag + new_offset;
1737   SetExpression(index, Smi::FromInt(raw_offset));
1738 }
1739 
GetBytecodeArray() const1740 BytecodeArray InterpretedFrame::GetBytecodeArray() const {
1741   const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1742   DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp,
1743             InterpreterFrameConstants::kExpressionsOffset -
1744                 index * kSystemPointerSize);
1745   return BytecodeArray::cast(GetExpression(index));
1746 }
1747 
PatchBytecodeArray(BytecodeArray bytecode_array)1748 void InterpretedFrame::PatchBytecodeArray(BytecodeArray bytecode_array) {
1749   const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1750   DCHECK_EQ(InterpreterFrameConstants::kBytecodeArrayFromFp,
1751             InterpreterFrameConstants::kExpressionsOffset -
1752                 index * kSystemPointerSize);
1753   SetExpression(index, bytecode_array);
1754 }
1755 
ReadInterpreterRegister(int register_index) const1756 Object InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1757   const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1758   DCHECK_EQ(InterpreterFrameConstants::kRegisterFileFromFp,
1759             InterpreterFrameConstants::kExpressionsOffset -
1760                 index * kSystemPointerSize);
1761   return GetExpression(index + register_index);
1762 }
1763 
WriteInterpreterRegister(int register_index,Object value)1764 void InterpretedFrame::WriteInterpreterRegister(int register_index,
1765                                                 Object value) {
1766   const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1767   DCHECK_EQ(InterpreterFrameConstants::kRegisterFileFromFp,
1768             InterpreterFrameConstants::kExpressionsOffset -
1769                 index * kSystemPointerSize);
1770   return SetExpression(index + register_index, value);
1771 }
1772 
Summarize(std::vector<FrameSummary> * functions) const1773 void InterpretedFrame::Summarize(std::vector<FrameSummary>* functions) const {
1774   DCHECK(functions->empty());
1775   Handle<AbstractCode> abstract_code(AbstractCode::cast(GetBytecodeArray()),
1776                                      isolate());
1777   Handle<FixedArray> params = GetParameters();
1778   FrameSummary::JavaScriptFrameSummary summary(
1779       isolate(), receiver(), function(), *abstract_code, GetBytecodeOffset(),
1780       IsConstructor(), *params);
1781   functions->push_back(summary);
1782 }
1783 
ComputeParametersCount() const1784 int ArgumentsAdaptorFrame::ComputeParametersCount() const {
1785   const int offset = ArgumentsAdaptorFrameConstants::kLengthOffset;
1786   return Smi::ToInt(Object(base::Memory<Address>(fp() + offset)));
1787 }
1788 
unchecked_code() const1789 Code ArgumentsAdaptorFrame::unchecked_code() const {
1790   return isolate()->builtins()->builtin(Builtins::kArgumentsAdaptorTrampoline);
1791 }
1792 
function() const1793 JSFunction BuiltinFrame::function() const {
1794   const int offset = BuiltinFrameConstants::kFunctionOffset;
1795   return JSFunction::cast(Object(base::Memory<Address>(fp() + offset)));
1796 }
1797 
ComputeParametersCount() const1798 int BuiltinFrame::ComputeParametersCount() const {
1799   const int offset = BuiltinFrameConstants::kLengthOffset;
1800   return Smi::ToInt(Object(base::Memory<Address>(fp() + offset)));
1801 }
1802 
unchecked_code() const1803 Code InternalFrame::unchecked_code() const { return Code(); }
1804 
Print(StringStream * accumulator,PrintMode mode,int index) const1805 void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
1806                       int index) const {
1807   PrintIndex(accumulator, mode, index);
1808   accumulator->Add("WASM [");
1809   accumulator->PrintName(script().name());
1810   Address instruction_start = isolate()
1811                                   ->wasm_engine()
1812                                   ->code_manager()
1813                                   ->LookupCode(pc())
1814                                   ->instruction_start();
1815   Vector<const uint8_t> raw_func_name =
1816       module_object().GetRawFunctionName(function_index());
1817   const int kMaxPrintedFunctionName = 64;
1818   char func_name[kMaxPrintedFunctionName + 1];
1819   int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
1820   memcpy(func_name, raw_func_name.begin(), func_name_len);
1821   func_name[func_name_len] = '\0';
1822   int pos = position();
1823   const wasm::WasmModule* module = wasm_instance().module_object().module();
1824   int func_index = function_index();
1825   int func_code_offset = module->functions[func_index].code.offset();
1826   accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n",
1827                    func_index, func_name, reinterpret_cast<void*>(pc()),
1828                    static_cast<int>(pc() - instruction_start), pos,
1829                    pos - func_code_offset);
1830   if (mode != OVERVIEW) accumulator->Add("\n");
1831 }
1832 
unchecked_code() const1833 Code WasmFrame::unchecked_code() const {
1834   return isolate()->FindCodeObject(pc());
1835 }
1836 
wasm_code() const1837 wasm::WasmCode* WasmFrame::wasm_code() const {
1838   return isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1839 }
1840 
wasm_instance() const1841 WasmInstanceObject WasmFrame::wasm_instance() const {
1842   const int offset = WasmFrameConstants::kWasmInstanceOffset;
1843   Object instance(Memory<Address>(fp() + offset));
1844   return WasmInstanceObject::cast(instance);
1845 }
1846 
native_module() const1847 wasm::NativeModule* WasmFrame::native_module() const {
1848   return module_object().native_module();
1849 }
1850 
module_object() const1851 WasmModuleObject WasmFrame::module_object() const {
1852   return wasm_instance().module_object();
1853 }
1854 
function_index() const1855 uint32_t WasmFrame::function_index() const {
1856   return FrameSummary::GetSingle(this).AsWasm().function_index();
1857 }
1858 
script() const1859 Script WasmFrame::script() const { return module_object().script(); }
1860 
position() const1861 int WasmFrame::position() const {
1862   wasm::WasmCodeRefScope code_ref_scope;
1863   const wasm::WasmModule* module = wasm_instance().module_object().module();
1864   return GetSourcePosition(module, function_index(), byte_offset(),
1865                            at_to_number_conversion());
1866 }
1867 
byte_offset() const1868 int WasmFrame::byte_offset() const {
1869   wasm::WasmCode* code = wasm_code();
1870   int offset = static_cast<int>(pc() - code->instruction_start());
1871   return code->GetSourcePositionBefore(offset);
1872 }
1873 
is_inspectable() const1874 bool WasmFrame::is_inspectable() const {
1875   wasm::WasmCodeRefScope code_ref_scope;
1876   return wasm_code()->is_inspectable();
1877 }
1878 
context() const1879 Object WasmFrame::context() const { return wasm_instance().native_context(); }
1880 
Summarize(std::vector<FrameSummary> * functions) const1881 void WasmFrame::Summarize(std::vector<FrameSummary>* functions) const {
1882   DCHECK(functions->empty());
1883   // The {WasmCode*} escapes this scope via the {FrameSummary}, which is fine,
1884   // since this code object is part of our stack.
1885   wasm::WasmCodeRefScope code_ref_scope;
1886   wasm::WasmCode* code = wasm_code();
1887   int offset = static_cast<int>(pc() - code->instruction_start());
1888   Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
1889   FrameSummary::WasmFrameSummary summary(isolate(), instance, code, offset,
1890                                          at_to_number_conversion());
1891   functions->push_back(summary);
1892 }
1893 
at_to_number_conversion() const1894 bool WasmFrame::at_to_number_conversion() const {
1895   // Check whether our callee is a WASM_TO_JS frame, and this frame is at the
1896   // ToNumber conversion call.
1897   wasm::WasmCode* code =
1898       callee_pc() != kNullAddress
1899           ? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc())
1900           : nullptr;
1901   if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
1902   int offset = static_cast<int>(callee_pc() - code->instruction_start());
1903   int pos = code->GetSourcePositionBefore(offset);
1904   // The imported call has position 0, ToNumber has position 1.
1905   // If there is no source position available, this is also not a ToNumber call.
1906   DCHECK(pos == wasm::kNoCodePosition || pos == 0 || pos == 1);
1907   return pos == 1;
1908 }
1909 
LookupExceptionHandlerInTable()1910 int WasmFrame::LookupExceptionHandlerInTable() {
1911   wasm::WasmCode* code =
1912       isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1913   if (!code->IsAnonymous() && code->handler_table_size() > 0) {
1914     HandlerTable table(code);
1915     int pc_offset = static_cast<int>(pc() - code->instruction_start());
1916     return table.LookupReturn(pc_offset);
1917   }
1918   return -1;
1919 }
1920 
Iterate(RootVisitor * v) const1921 void WasmDebugBreakFrame::Iterate(RootVisitor* v) const {
1922   // Nothing to iterate here. This will change once we support references in
1923   // Liftoff.
1924 }
1925 
Print(StringStream * accumulator,PrintMode mode,int index) const1926 void WasmDebugBreakFrame::Print(StringStream* accumulator, PrintMode mode,
1927                                 int index) const {
1928   PrintIndex(accumulator, mode, index);
1929   accumulator->Add("WASM DEBUG BREAK");
1930   if (mode != OVERVIEW) accumulator->Add("\n");
1931 }
1932 
Iterate(RootVisitor * v) const1933 void JsToWasmFrame::Iterate(RootVisitor* v) const {
1934   Code code = GetContainingCode(isolate(), pc());
1935   //  GenericJSToWasmWrapper stack layout
1936   //  ------+-----------------+----------------------
1937   //        |  return addr    |
1938   //    fp  |- - - - - - - - -|  -------------------|
1939   //        |       fp        |                     |
1940   //   fp-p |- - - - - - - - -|                     |
1941   //        |  frame marker   |                     | no GC scan
1942   //  fp-2p |- - - - - - - - -|                     |
1943   //        |   scan_count    |                     |
1944   //  fp-3p |- - - - - - - - -|  -------------------|
1945   //        |      ....       | <- spill_slot_limit |
1946   //        |   spill slots   |                     | GC scan scan_count slots
1947   //        |      ....       | <- spill_slot_base--|
1948   //        |- - - - - - - - -|                     |
1949   if (code.is_null() || !code.is_builtin() ||
1950       code.builtin_index() != Builtins::kGenericJSToWasmWrapper) {
1951     // If it's not the  GenericJSToWasmWrapper, then it's the TurboFan compiled
1952     // specific wrapper. So we have to call IterateCompiledFrame.
1953     IterateCompiledFrame(v);
1954     return;
1955   }
1956   // The [fp - 2*kSystemPointerSize] on the stack is a value indicating how
1957   // many values should be scanned from the top.
1958   intptr_t scan_count =
1959       *reinterpret_cast<intptr_t*>(fp() - 2 * kSystemPointerSize);
1960 
1961   FullObjectSlot spill_slot_base(&Memory<Address>(sp()));
1962   FullObjectSlot spill_slot_limit(
1963       &Memory<Address>(sp() + scan_count * kSystemPointerSize));
1964   v->VisitRootPointers(Root::kTop, nullptr, spill_slot_base, spill_slot_limit);
1965 }
1966 
wasm_instance() const1967 WasmInstanceObject WasmCompileLazyFrame::wasm_instance() const {
1968   return WasmInstanceObject::cast(*wasm_instance_slot());
1969 }
1970 
wasm_instance_slot() const1971 FullObjectSlot WasmCompileLazyFrame::wasm_instance_slot() const {
1972   const int offset = WasmCompileLazyFrameConstants::kWasmInstanceOffset;
1973   return FullObjectSlot(&Memory<Address>(fp() + offset));
1974 }
1975 
Iterate(RootVisitor * v) const1976 void WasmCompileLazyFrame::Iterate(RootVisitor* v) const {
1977   const int header_size = WasmCompileLazyFrameConstants::kFixedFrameSizeFromFp;
1978   FullObjectSlot base(&Memory<Address>(sp()));
1979   FullObjectSlot limit(&Memory<Address>(fp() - header_size));
1980   v->VisitRootPointers(Root::kTop, nullptr, base, limit);
1981   v->VisitRootPointer(Root::kTop, nullptr, wasm_instance_slot());
1982 }
1983 
1984 namespace {
1985 
PrintFunctionSource(StringStream * accumulator,SharedFunctionInfo shared,Code code)1986 void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo shared,
1987                          Code code) {
1988   if (FLAG_max_stack_trace_source_length != 0 && !code.is_null()) {
1989     std::ostringstream os;
1990     os << "--------- s o u r c e   c o d e ---------\n"
1991        << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1992        << "\n-----------------------------------------\n";
1993     accumulator->Add(os.str().c_str());
1994   }
1995 }
1996 
1997 }  // namespace
1998 
Print(StringStream * accumulator,PrintMode mode,int index) const1999 void JavaScriptFrame::Print(StringStream* accumulator, PrintMode mode,
2000                             int index) const {
2001   Handle<SharedFunctionInfo> shared = handle(function().shared(), isolate());
2002   SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate(), shared);
2003 
2004   DisallowHeapAllocation no_gc;
2005   Object receiver = this->receiver();
2006   JSFunction function = this->function();
2007 
2008   accumulator->PrintSecurityTokenIfChanged(function);
2009   PrintIndex(accumulator, mode, index);
2010   PrintFrameKind(accumulator);
2011   Code code;
2012   if (IsConstructor()) accumulator->Add("new ");
2013   accumulator->PrintFunction(function, receiver, &code);
2014   accumulator->Add(" [%p]", function);
2015 
2016   // Get scope information for nicer output, if possible. If code is nullptr, or
2017   // doesn't contain scope info, scope_info will return 0 for the number of
2018   // parameters, stack local variables, context local variables, stack slots,
2019   // or context slots.
2020   ScopeInfo scope_info = shared->scope_info();
2021   Object script_obj = shared->script();
2022   if (script_obj.IsScript()) {
2023     Script script = Script::cast(script_obj);
2024     accumulator->Add(" [");
2025     accumulator->PrintName(script.name());
2026 
2027     if (is_interpreted()) {
2028       const InterpretedFrame* iframe =
2029           reinterpret_cast<const InterpretedFrame*>(this);
2030       BytecodeArray bytecodes = iframe->GetBytecodeArray();
2031       int offset = iframe->GetBytecodeOffset();
2032       int source_pos = AbstractCode::cast(bytecodes).SourcePosition(offset);
2033       int line = script.GetLineNumber(source_pos) + 1;
2034       accumulator->Add(":%d] [bytecode=%p offset=%d]", line,
2035                        reinterpret_cast<void*>(bytecodes.ptr()), offset);
2036     } else {
2037       int function_start_pos = shared->StartPosition();
2038       int line = script.GetLineNumber(function_start_pos) + 1;
2039       accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
2040     }
2041   }
2042 
2043   accumulator->Add("(this=%o", receiver);
2044 
2045   // Print the parameters.
2046   int parameters_count = ComputeParametersCount();
2047   for (int i = 0; i < parameters_count; i++) {
2048     accumulator->Add(",");
2049     accumulator->Add("%o", GetParameter(i));
2050   }
2051 
2052   accumulator->Add(")");
2053   if (mode == OVERVIEW) {
2054     accumulator->Add("\n");
2055     return;
2056   }
2057   if (is_optimized()) {
2058     accumulator->Add(" {\n// optimized frame\n");
2059     PrintFunctionSource(accumulator, *shared, code);
2060     accumulator->Add("}\n");
2061     return;
2062   }
2063   accumulator->Add(" {\n");
2064 
2065   // Compute the number of locals and expression stack elements.
2066   int heap_locals_count = scope_info.ContextLocalCount();
2067   int expressions_count = ComputeExpressionsCount();
2068 
2069   // Try to get hold of the context of this frame.
2070   Context context;
2071   if (this->context().IsContext()) {
2072     context = Context::cast(this->context());
2073     while (context.IsWithContext()) {
2074       context = context.previous();
2075       DCHECK(!context.is_null());
2076     }
2077   }
2078 
2079   // Print heap-allocated local variables.
2080   if (heap_locals_count > 0) {
2081     accumulator->Add("  // heap-allocated locals\n");
2082   }
2083   for (int i = 0; i < heap_locals_count; i++) {
2084     accumulator->Add("  var ");
2085     accumulator->PrintName(scope_info.ContextLocalName(i));
2086     accumulator->Add(" = ");
2087     if (!context.is_null()) {
2088       int index = Context::MIN_CONTEXT_SLOTS + i;
2089       if (index < context.length()) {
2090         accumulator->Add("%o", context.get(index));
2091       } else {
2092         accumulator->Add(
2093             "// warning: missing context slot - inconsistent frame?");
2094       }
2095     } else {
2096       accumulator->Add("// warning: no context found - inconsistent frame?");
2097     }
2098     accumulator->Add("\n");
2099   }
2100 
2101   // Print the expression stack.
2102   if (0 < expressions_count) {
2103     accumulator->Add("  // expression stack (top to bottom)\n");
2104   }
2105   for (int i = expressions_count - 1; i >= 0; i--) {
2106     accumulator->Add("  [%02d] : %o\n", i, GetExpression(i));
2107   }
2108 
2109   PrintFunctionSource(accumulator, *shared, code);
2110 
2111   accumulator->Add("}\n\n");
2112 }
2113 
Print(StringStream * accumulator,PrintMode mode,int index) const2114 void ArgumentsAdaptorFrame::Print(StringStream* accumulator, PrintMode mode,
2115                                   int index) const {
2116   int actual = ComputeParametersCount();
2117   int expected = -1;
2118   JSFunction function = this->function();
2119   expected = function.shared().internal_formal_parameter_count();
2120 
2121   PrintIndex(accumulator, mode, index);
2122   accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
2123   if (mode == OVERVIEW) {
2124     accumulator->Add("\n");
2125     return;
2126   }
2127   accumulator->Add(" {\n");
2128 
2129   // Print actual arguments.
2130   if (actual > 0) accumulator->Add("  // actual arguments\n");
2131   for (int i = 0; i < actual; i++) {
2132     accumulator->Add("  [%02d] : %o", i, GetParameter(i));
2133     if (expected != -1 && i >= expected) {
2134       accumulator->Add("  // not passed to callee");
2135     }
2136     accumulator->Add("\n");
2137   }
2138 
2139   accumulator->Add("}\n\n");
2140 }
2141 
Iterate(RootVisitor * v) const2142 void EntryFrame::Iterate(RootVisitor* v) const {
2143   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2144 }
2145 
IterateExpressions(RootVisitor * v) const2146 void CommonFrame::IterateExpressions(RootVisitor* v) const {
2147   const int last_object_offset = StandardFrameConstants::kLastObjectOffset;
2148   intptr_t marker =
2149       Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
2150   FullObjectSlot base(&Memory<Address>(sp()));
2151   FullObjectSlot limit(&Memory<Address>(fp() + last_object_offset) + 1);
2152   if (StackFrame::IsTypeMarker(marker)) {
2153     v->VisitRootPointers(Root::kTop, nullptr, base, limit);
2154   } else {
2155     // The frame contains the actual argument count (intptr) that should not be
2156     // visited.
2157     FullObjectSlot argc(
2158         &Memory<Address>(fp() + StandardFrameConstants::kArgCOffset));
2159     v->VisitRootPointers(Root::kTop, nullptr, base, argc);
2160     v->VisitRootPointers(Root::kTop, nullptr, argc + 1, limit);
2161   }
2162 }
2163 
Iterate(RootVisitor * v) const2164 void JavaScriptFrame::Iterate(RootVisitor* v) const {
2165   IterateExpressions(v);
2166   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2167 }
2168 
Iterate(RootVisitor * v) const2169 void InternalFrame::Iterate(RootVisitor* v) const {
2170   Code code = LookupCode();
2171   IteratePc(v, pc_address(), constant_pool_address(), code);
2172   // Internal frames typically do not receive any arguments, hence their stack
2173   // only contains tagged pointers.
2174   // We are misusing the has_tagged_params flag here to tell us whether
2175   // the full stack frame contains only tagged pointers or only raw values.
2176   // This is used for the WasmCompileLazy builtin, where we actually pass
2177   // untagged arguments and also store untagged values on the stack.
2178   if (code.has_tagged_params()) IterateExpressions(v);
2179 }
2180 
2181 // -------------------------------------------------------------------------
2182 
2183 namespace {
2184 
PcAddressForHashing(Isolate * isolate,Address address)2185 uint32_t PcAddressForHashing(Isolate* isolate, Address address) {
2186   if (InstructionStream::PcIsOffHeap(isolate, address)) {
2187     // Ensure that we get predictable hashes for addresses in embedded code.
2188     return EmbeddedData::FromBlob(isolate).AddressForHashing(address);
2189   }
2190   return ObjectAddressForHashing(address);
2191 }
2192 
2193 }  // namespace
2194 
2195 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)2196 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
2197   isolate_->counters()->pc_to_code()->Increment();
2198   DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize));
2199   uint32_t hash =
2200       ComputeUnseededHash(PcAddressForHashing(isolate_, inner_pointer));
2201   uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
2202   InnerPointerToCodeCacheEntry* entry = cache(index);
2203   if (entry->inner_pointer == inner_pointer) {
2204     isolate_->counters()->pc_to_code_cached()->Increment();
2205     DCHECK(entry->code ==
2206            isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer));
2207   } else {
2208     // Because this code may be interrupted by a profiling signal that
2209     // also queries the cache, we cannot update inner_pointer before the code
2210     // has been set. Otherwise, we risk trying to use a cache entry before
2211     // the code has been computed.
2212     entry->code =
2213         isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer);
2214     entry->safepoint_entry.Reset();
2215     entry->inner_pointer = inner_pointer;
2216   }
2217   return entry;
2218 }
2219 
2220 // Frame layout helper class implementation.
2221 // -------------------------------------------------------------------------
2222 
2223 namespace {
2224 
ArgumentPaddingSlots(int arg_count)2225 int ArgumentPaddingSlots(int arg_count) {
2226   return ShouldPadArguments(arg_count) ? 1 : 0;
2227 }
2228 
2229 // Some architectures need to push padding together with the TOS register
2230 // in order to maintain stack alignment.
TopOfStackRegisterPaddingSlots()2231 constexpr int TopOfStackRegisterPaddingSlots() { return kPadArguments ? 1 : 0; }
2232 
BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode)2233 bool BuiltinContinuationModeIsWithCatch(BuiltinContinuationMode mode) {
2234   switch (mode) {
2235     case BuiltinContinuationMode::STUB:
2236     case BuiltinContinuationMode::JAVASCRIPT:
2237       return false;
2238     case BuiltinContinuationMode::JAVASCRIPT_WITH_CATCH:
2239     case BuiltinContinuationMode::JAVASCRIPT_HANDLE_EXCEPTION:
2240       return true;
2241   }
2242   UNREACHABLE();
2243 }
2244 
2245 }  // namespace
2246 
InterpretedFrameInfo(int parameters_count_with_receiver,int translation_height,bool is_topmost,bool pad_arguments,FrameInfoKind frame_info_kind)2247 InterpretedFrameInfo::InterpretedFrameInfo(int parameters_count_with_receiver,
2248                                            int translation_height,
2249                                            bool is_topmost, bool pad_arguments,
2250                                            FrameInfoKind frame_info_kind) {
2251   const int locals_count = translation_height;
2252 
2253   register_stack_slot_count_ =
2254       InterpreterFrameConstants::RegisterStackSlotCount(locals_count);
2255 
2256   static constexpr int kTheAccumulator = 1;
2257   static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
2258   int maybe_additional_slots =
2259       (is_topmost || frame_info_kind == FrameInfoKind::kConservative)
2260           ? (kTheAccumulator + kTopOfStackPadding)
2261           : 0;
2262   frame_size_in_bytes_without_fixed_ =
2263       (register_stack_slot_count_ + maybe_additional_slots) *
2264       kSystemPointerSize;
2265 
2266   // The 'fixed' part of the frame consists of the incoming parameters and
2267   // the part described by InterpreterFrameConstants. This will include
2268   // argument padding, when needed.
2269   const int parameter_padding_slots =
2270       pad_arguments ? ArgumentPaddingSlots(parameters_count_with_receiver) : 0;
2271   const int fixed_frame_size =
2272       InterpreterFrameConstants::kFixedFrameSize +
2273       (parameters_count_with_receiver + parameter_padding_slots) *
2274           kSystemPointerSize;
2275   frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ + fixed_frame_size;
2276 }
2277 
ArgumentsAdaptorFrameInfo(int translation_height)2278 ArgumentsAdaptorFrameInfo::ArgumentsAdaptorFrameInfo(int translation_height) {
2279   // Note: This is according to the Translation's notion of 'parameters' which
2280   // differs to that of the SharedFunctionInfo, e.g. by including the receiver.
2281   const int parameters_count = translation_height;
2282   frame_size_in_bytes_without_fixed_ =
2283       (parameters_count + ArgumentPaddingSlots(parameters_count)) *
2284       kSystemPointerSize;
2285   frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ +
2286                          ArgumentsAdaptorFrameConstants::kFixedFrameSize;
2287 }
2288 
ConstructStubFrameInfo(int translation_height,bool is_topmost,FrameInfoKind frame_info_kind)2289 ConstructStubFrameInfo::ConstructStubFrameInfo(int translation_height,
2290                                                bool is_topmost,
2291                                                FrameInfoKind frame_info_kind) {
2292   // Note: This is according to the Translation's notion of 'parameters' which
2293   // differs to that of the SharedFunctionInfo, e.g. by including the receiver.
2294   const int parameters_count = translation_height;
2295 
2296   // If the construct frame appears to be topmost we should ensure that the
2297   // value of result register is preserved during continuation execution.
2298   // We do this here by "pushing" the result of the constructor function to
2299   // the top of the reconstructed stack and popping it in
2300   // {Builtins::kNotifyDeoptimized}.
2301 
2302   static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
2303   static constexpr int kTheResult = 1;
2304   const int argument_padding = ArgumentPaddingSlots(parameters_count);
2305 
2306   const int adjusted_height =
2307       (is_topmost || frame_info_kind == FrameInfoKind::kConservative)
2308           ? parameters_count + argument_padding + kTheResult +
2309                 kTopOfStackPadding
2310           : parameters_count + argument_padding;
2311   frame_size_in_bytes_without_fixed_ = adjusted_height * kSystemPointerSize;
2312   frame_size_in_bytes_ = frame_size_in_bytes_without_fixed_ +
2313                          ConstructFrameConstants::kFixedFrameSize;
2314 }
2315 
BuiltinContinuationFrameInfo(int translation_height,const CallInterfaceDescriptor & continuation_descriptor,const RegisterConfiguration * register_config,bool is_topmost,DeoptimizeKind deopt_kind,BuiltinContinuationMode continuation_mode,FrameInfoKind frame_info_kind)2316 BuiltinContinuationFrameInfo::BuiltinContinuationFrameInfo(
2317     int translation_height,
2318     const CallInterfaceDescriptor& continuation_descriptor,
2319     const RegisterConfiguration* register_config, bool is_topmost,
2320     DeoptimizeKind deopt_kind, BuiltinContinuationMode continuation_mode,
2321     FrameInfoKind frame_info_kind) {
2322   const bool is_conservative = frame_info_kind == FrameInfoKind::kConservative;
2323 
2324   // Note: This is according to the Translation's notion of 'parameters' which
2325   // differs to that of the SharedFunctionInfo, e.g. by including the receiver.
2326   const int parameters_count = translation_height;
2327   frame_has_result_stack_slot_ =
2328       !is_topmost || deopt_kind == DeoptimizeKind::kLazy;
2329   const int result_slot_count =
2330       (frame_has_result_stack_slot_ || is_conservative) ? 1 : 0;
2331 
2332   const int exception_slot_count =
2333       (BuiltinContinuationModeIsWithCatch(continuation_mode) || is_conservative)
2334           ? 1
2335           : 0;
2336 
2337   const int allocatable_register_count =
2338       register_config->num_allocatable_general_registers();
2339   const int padding_slot_count =
2340       BuiltinContinuationFrameConstants::PaddingSlotCount(
2341           allocatable_register_count);
2342 
2343   const int register_parameter_count =
2344       continuation_descriptor.GetRegisterParameterCount();
2345   translated_stack_parameter_count_ =
2346       parameters_count - register_parameter_count;
2347   stack_parameter_count_ = translated_stack_parameter_count_ +
2348                            result_slot_count + exception_slot_count;
2349   const int stack_param_pad_count =
2350       ArgumentPaddingSlots(stack_parameter_count_);
2351 
2352   // If the builtins frame appears to be topmost we should ensure that the
2353   // value of result register is preserved during continuation execution.
2354   // We do this here by "pushing" the result of callback function to the
2355   // top of the reconstructed stack and popping it in
2356   // {Builtins::kNotifyDeoptimized}.
2357   static constexpr int kTopOfStackPadding = TopOfStackRegisterPaddingSlots();
2358   static constexpr int kTheResult = 1;
2359   const int push_result_count =
2360       (is_topmost || is_conservative) ? kTheResult + kTopOfStackPadding : 0;
2361 
2362   frame_size_in_bytes_ =
2363       kSystemPointerSize * (stack_parameter_count_ + stack_param_pad_count +
2364                             allocatable_register_count + padding_slot_count +
2365                             push_result_count) +
2366       BuiltinContinuationFrameConstants::kFixedFrameSize;
2367 
2368   frame_size_in_bytes_above_fp_ =
2369       kSystemPointerSize * (allocatable_register_count + padding_slot_count +
2370                             push_result_count) +
2371       (BuiltinContinuationFrameConstants::kFixedFrameSize -
2372        BuiltinContinuationFrameConstants::kFixedFrameSizeAboveFp);
2373 }
2374 
2375 }  // namespace internal
2376 }  // namespace v8
2377