1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/frames.h"
6
7 #include <memory>
8 #include <sstream>
9
10 #include "src/base/bits.h"
11 #include "src/deoptimizer.h"
12 #include "src/frames-inl.h"
13 #include "src/ic/ic-stats.h"
14 #include "src/register-configuration.h"
15 #include "src/safepoint-table.h"
16 #include "src/string-stream.h"
17 #include "src/visitors.h"
18 #include "src/vm-state-inl.h"
19 #include "src/wasm/wasm-code-manager.h"
20 #include "src/wasm/wasm-engine.h"
21 #include "src/wasm/wasm-objects-inl.h"
22 #include "src/zone/zone-containers.h"
23
24 namespace v8 {
25 namespace internal {
26
27 ReturnAddressLocationResolver StackFrame::return_address_location_resolver_ =
28 nullptr;
29
30 // Iterator that supports traversing the stack handlers of a
31 // particular frame. Needs to know the top of the handler chain.
32 class StackHandlerIterator BASE_EMBEDDED {
33 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)34 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
35 : limit_(frame->fp()), handler_(handler) {
36 // Make sure the handler has already been unwound to this frame.
37 DCHECK(frame->sp() <= handler->address());
38 }
39
handler() const40 StackHandler* handler() const { return handler_; }
41
done()42 bool done() { return handler_ == nullptr || handler_->address() > limit_; }
Advance()43 void Advance() {
44 DCHECK(!done());
45 handler_ = handler_->next();
46 }
47
48 private:
49 const Address limit_;
50 StackHandler* handler_;
51 };
52
53
54 // -------------------------------------------------------------------------
55
56
57 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)58 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
59 bool can_access_heap_objects)
60 : isolate_(isolate),
61 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON) frame_(nullptr),
62 handler_(nullptr),
63 can_access_heap_objects_(can_access_heap_objects) {}
64 #undef INITIALIZE_SINGLETON
65
StackFrameIterator(Isolate * isolate)66 StackFrameIterator::StackFrameIterator(Isolate* isolate)
67 : StackFrameIterator(isolate, isolate->thread_local_top()) {}
68
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)69 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
70 : StackFrameIteratorBase(isolate, true) {
71 Reset(t);
72 }
73
Advance()74 void StackFrameIterator::Advance() {
75 DCHECK(!done());
76 // Compute the state of the calling frame before restoring
77 // callee-saved registers and unwinding handlers. This allows the
78 // frame code that computes the caller state to access the top
79 // handler and the value of any callee-saved register if needed.
80 StackFrame::State state;
81 StackFrame::Type type = frame_->GetCallerState(&state);
82
83 // Unwind handlers corresponding to the current frame.
84 StackHandlerIterator it(frame_, handler_);
85 while (!it.done()) it.Advance();
86 handler_ = it.handler();
87
88 // Advance to the calling frame.
89 frame_ = SingletonFor(type, &state);
90
91 // When we're done iterating over the stack frames, the handler
92 // chain must have been completely unwound.
93 DCHECK(!done() || handler_ == nullptr);
94 }
95
96
Reset(ThreadLocalTop * top)97 void StackFrameIterator::Reset(ThreadLocalTop* top) {
98 StackFrame::State state;
99 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
100 Isolate::c_entry_fp(top), &state);
101 handler_ = StackHandler::FromAddress(Isolate::handler(top));
102 frame_ = SingletonFor(type, &state);
103 }
104
105
SingletonFor(StackFrame::Type type,StackFrame::State * state)106 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
107 StackFrame::State* state) {
108 StackFrame* result = SingletonFor(type);
109 DCHECK((!result) == (type == StackFrame::NONE));
110 if (result) result->state_ = *state;
111 return result;
112 }
113
114
SingletonFor(StackFrame::Type type)115 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
116 #define FRAME_TYPE_CASE(type, field) \
117 case StackFrame::type: \
118 return &field##_;
119
120 switch (type) {
121 case StackFrame::NONE:
122 return nullptr;
123 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
124 default: break;
125 }
126 return nullptr;
127
128 #undef FRAME_TYPE_CASE
129 }
130
131 // -------------------------------------------------------------------------
132
Advance()133 void JavaScriptFrameIterator::Advance() {
134 do {
135 iterator_.Advance();
136 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
137 }
138
139 // -------------------------------------------------------------------------
140
StackTraceFrameIterator(Isolate * isolate)141 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
142 : iterator_(isolate) {
143 if (!done() && !IsValidFrame(iterator_.frame())) Advance();
144 }
145
StackTraceFrameIterator(Isolate * isolate,StackFrame::Id id)146 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate,
147 StackFrame::Id id)
148 : StackTraceFrameIterator(isolate) {
149 while (!done() && frame()->id() != id) Advance();
150 }
151
Advance()152 void StackTraceFrameIterator::Advance() {
153 do {
154 iterator_.Advance();
155 } while (!done() && !IsValidFrame(iterator_.frame()));
156 }
157
IsValidFrame(StackFrame * frame) const158 bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
159 if (frame->is_java_script()) {
160 JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
161 if (!jsFrame->function()->IsJSFunction()) return false;
162 return jsFrame->function()->shared()->IsSubjectToDebugging();
163 }
164 // apart from javascript, only wasm is valid
165 return frame->is_wasm();
166 }
167
168 // -------------------------------------------------------------------------
169
170 namespace {
171
IsInterpreterFramePc(Isolate * isolate,Address pc,StackFrame::State * state)172 bool IsInterpreterFramePc(Isolate* isolate, Address pc,
173 StackFrame::State* state) {
174 Code* interpreter_entry_trampoline =
175 isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
176 Code* interpreter_bytecode_advance =
177 isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeAdvance);
178 Code* interpreter_bytecode_dispatch =
179 isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
180
181 if (interpreter_entry_trampoline->contains(pc) ||
182 interpreter_bytecode_advance->contains(pc) ||
183 interpreter_bytecode_dispatch->contains(pc)) {
184 return true;
185 } else if (FLAG_interpreted_frames_native_stack) {
186 intptr_t marker = Memory<intptr_t>(
187 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
188 MSAN_MEMORY_IS_INITIALIZED(
189 state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
190 Object* maybe_function =
191 Memory<Object*>(state->fp + StandardFrameConstants::kFunctionOffset);
192 // There's no need to run a full ContainsSlow if we know the frame can't be
193 // an InterpretedFrame, so we do these fast checks first
194 if (StackFrame::IsTypeMarker(marker) || maybe_function->IsSmi()) {
195 return false;
196 } else if (!isolate->heap()->code_space()->ContainsSlow(pc)) {
197 return false;
198 }
199 interpreter_entry_trampoline =
200 isolate->heap()->GcSafeFindCodeForInnerPointer(pc);
201 return interpreter_entry_trampoline->is_interpreter_trampoline_builtin();
202 } else {
203 return false;
204 }
205 }
206
ReadMemoryAt(Address address)207 DISABLE_ASAN Address ReadMemoryAt(Address address) {
208 return Memory<Address>(address);
209 }
210
211 } // namespace
212
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)213 SafeStackFrameIterator::SafeStackFrameIterator(
214 Isolate* isolate,
215 Address fp, Address sp, Address js_entry_sp)
216 : StackFrameIteratorBase(isolate, false),
217 low_bound_(sp),
218 high_bound_(js_entry_sp),
219 top_frame_type_(StackFrame::NONE),
220 external_callback_scope_(isolate->external_callback_scope()) {
221 StackFrame::State state;
222 StackFrame::Type type;
223 ThreadLocalTop* top = isolate->thread_local_top();
224 bool advance_frame = true;
225 if (IsValidTop(top)) {
226 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
227 top_frame_type_ = type;
228 } else if (IsValidStackAddress(fp)) {
229 DCHECK_NE(fp, kNullAddress);
230 state.fp = fp;
231 state.sp = sp;
232 state.pc_address = StackFrame::ResolveReturnAddressLocation(
233 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
234
235 // If the top of stack is a return address to the interpreter trampoline,
236 // then we are likely in a bytecode handler with elided frame. In that
237 // case, set the PC properly and make sure we do not drop the frame.
238 if (IsValidStackAddress(sp)) {
239 MSAN_MEMORY_IS_INITIALIZED(sp, kPointerSize);
240 Address tos = ReadMemoryAt(sp);
241 if (IsInterpreterFramePc(isolate, tos, &state)) {
242 state.pc_address = reinterpret_cast<Address*>(sp);
243 advance_frame = false;
244 }
245 }
246
247 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
248 // we check only that kMarkerOffset is within the stack bounds and do
249 // compile time check that kContextOffset slot is pushed on the stack before
250 // kMarkerOffset.
251 STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
252 StandardFrameConstants::kContextOffset);
253 Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
254 if (IsValidStackAddress(frame_marker)) {
255 type = StackFrame::ComputeType(this, &state);
256 top_frame_type_ = type;
257 // We only keep the top frame if we believe it to be interpreted frame.
258 if (type != StackFrame::INTERPRETED) {
259 advance_frame = true;
260 }
261 } else {
262 // Mark the frame as OPTIMIZED if we cannot determine its type.
263 // We chose OPTIMIZED rather than INTERPRETED because it's closer to
264 // the original value of StackFrame::JAVA_SCRIPT here, in that JAVA_SCRIPT
265 // referred to full-codegen frames (now removed from the tree), and
266 // OPTIMIZED refers to turbofan frames, both of which are generated
267 // code. INTERPRETED frames refer to bytecode.
268 // The frame anyways will be skipped.
269 type = StackFrame::OPTIMIZED;
270 // Top frame is incomplete so we cannot reliably determine its type.
271 top_frame_type_ = StackFrame::NONE;
272 }
273 } else {
274 return;
275 }
276 frame_ = SingletonFor(type, &state);
277 if (advance_frame && frame_) Advance();
278 }
279
280
IsValidTop(ThreadLocalTop * top) const281 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
282 Address c_entry_fp = Isolate::c_entry_fp(top);
283 if (!IsValidExitFrame(c_entry_fp)) return false;
284 // There should be at least one JS_ENTRY stack handler.
285 Address handler = Isolate::handler(top);
286 if (handler == kNullAddress) return false;
287 // Check that there are no js frames on top of the native frames.
288 return c_entry_fp < handler;
289 }
290
291
AdvanceOneFrame()292 void SafeStackFrameIterator::AdvanceOneFrame() {
293 DCHECK(!done());
294 StackFrame* last_frame = frame_;
295 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
296 // Before advancing to the next stack frame, perform pointer validity tests.
297 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
298 frame_ = nullptr;
299 return;
300 }
301
302 // Advance to the previous frame.
303 StackFrame::State state;
304 StackFrame::Type type = frame_->GetCallerState(&state);
305 frame_ = SingletonFor(type, &state);
306 if (!frame_) return;
307
308 // Check that we have actually moved to the previous frame in the stack.
309 if (frame_->sp() <= last_sp || frame_->fp() <= last_fp) {
310 frame_ = nullptr;
311 }
312 }
313
314
IsValidFrame(StackFrame * frame) const315 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
316 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
317 }
318
319
IsValidCaller(StackFrame * frame)320 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
321 StackFrame::State state;
322 if (frame->is_entry() || frame->is_construct_entry()) {
323 // See EntryFrame::GetCallerState. It computes the caller FP address
324 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
325 // sure that caller FP address is valid.
326 Address caller_fp =
327 Memory<Address>(frame->fp() + EntryFrameConstants::kCallerFPOffset);
328 if (!IsValidExitFrame(caller_fp)) return false;
329 } else if (frame->is_arguments_adaptor()) {
330 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
331 // the number of arguments is stored on stack as Smi. We need to check
332 // that it really an Smi.
333 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
334 GetExpression(0);
335 if (!number_of_args->IsSmi()) {
336 return false;
337 }
338 }
339 frame->ComputeCallerState(&state);
340 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
341 SingletonFor(frame->GetCallerState(&state)) != nullptr;
342 }
343
344
IsValidExitFrame(Address fp) const345 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
346 if (!IsValidStackAddress(fp)) return false;
347 Address sp = ExitFrame::ComputeStackPointer(fp);
348 if (!IsValidStackAddress(sp)) return false;
349 StackFrame::State state;
350 ExitFrame::FillState(fp, sp, &state);
351 MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
352 return *state.pc_address != kNullAddress;
353 }
354
355
Advance()356 void SafeStackFrameIterator::Advance() {
357 while (true) {
358 AdvanceOneFrame();
359 if (done()) break;
360 ExternalCallbackScope* last_callback_scope = nullptr;
361 while (external_callback_scope_ != nullptr &&
362 external_callback_scope_->scope_address() < frame_->fp()) {
363 // As long as the setup of a frame is not atomic, we may happen to be
364 // in an interval where an ExternalCallbackScope is already created,
365 // but the frame is not yet entered. So we are actually observing
366 // the previous frame.
367 // Skip all the ExternalCallbackScope's that are below the current fp.
368 last_callback_scope = external_callback_scope_;
369 external_callback_scope_ = external_callback_scope_->previous();
370 }
371 if (frame_->is_java_script() || frame_->is_wasm()) break;
372 if (frame_->is_exit() || frame_->is_builtin_exit()) {
373 // Some of the EXIT frames may have ExternalCallbackScope allocated on
374 // top of them. In that case the scope corresponds to the first EXIT
375 // frame beneath it. There may be other EXIT frames on top of the
376 // ExternalCallbackScope, just skip them as we cannot collect any useful
377 // information about them.
378 if (last_callback_scope) {
379 frame_->state_.pc_address =
380 last_callback_scope->callback_entrypoint_address();
381 }
382 break;
383 }
384 }
385 }
386
387
388 // -------------------------------------------------------------------------
389
390 namespace {
GetContainingCode(Isolate * isolate,Address pc)391 Code* GetContainingCode(Isolate* isolate, Address pc) {
392 return isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code;
393 }
394 } // namespace
395
LookupCode() const396 Code* StackFrame::LookupCode() const {
397 Code* result = GetContainingCode(isolate(), pc());
398 DCHECK_GE(pc(), result->InstructionStart());
399 DCHECK_LT(pc(), result->InstructionEnd());
400 return result;
401 }
402
IteratePc(RootVisitor * v,Address * pc_address,Address * constant_pool_address,Code * holder)403 void StackFrame::IteratePc(RootVisitor* v, Address* pc_address,
404 Address* constant_pool_address, Code* holder) {
405 Address pc = *pc_address;
406 DCHECK(holder->GetHeap()->GcSafeCodeContains(holder, pc));
407 unsigned pc_offset = static_cast<unsigned>(pc - holder->InstructionStart());
408 Object* code = holder;
409 v->VisitRootPointer(Root::kTop, nullptr, &code);
410 if (code == holder) return;
411 holder = reinterpret_cast<Code*>(code);
412 pc = holder->InstructionStart() + pc_offset;
413 *pc_address = pc;
414 if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
415 *constant_pool_address = holder->constant_pool();
416 }
417 }
418
419
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)420 void StackFrame::SetReturnAddressLocationResolver(
421 ReturnAddressLocationResolver resolver) {
422 DCHECK_NULL(return_address_location_resolver_);
423 return_address_location_resolver_ = resolver;
424 }
425
ComputeType(const StackFrameIteratorBase * iterator,State * state)426 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
427 State* state) {
428 DCHECK_NE(state->fp, kNullAddress);
429
430 MSAN_MEMORY_IS_INITIALIZED(
431 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
432 kPointerSize);
433 intptr_t marker = Memory<intptr_t>(
434 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
435 if (!iterator->can_access_heap_objects_) {
436 // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
437 // means that we are being called from the profiler, which can interrupt
438 // the VM with a signal at any arbitrary instruction, with essentially
439 // anything on the stack. So basically none of these checks are 100%
440 // reliable.
441 MSAN_MEMORY_IS_INITIALIZED(
442 state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
443 Object* maybe_function =
444 Memory<Object*>(state->fp + StandardFrameConstants::kFunctionOffset);
445 if (!StackFrame::IsTypeMarker(marker)) {
446 if (maybe_function->IsSmi()) {
447 return NATIVE;
448 } else if (IsInterpreterFramePc(iterator->isolate(), *(state->pc_address),
449 state)) {
450 return INTERPRETED;
451 } else {
452 return OPTIMIZED;
453 }
454 }
455 } else {
456 Address pc = *(state->pc_address);
457 // If the {pc} does not point into WebAssembly code we can rely on the
458 // returned {wasm_code} to be null and fall back to {GetContainingCode}.
459 wasm::WasmCode* wasm_code =
460 iterator->isolate()->wasm_engine()->code_manager()->LookupCode(pc);
461 if (wasm_code != nullptr) {
462 switch (wasm_code->kind()) {
463 case wasm::WasmCode::kFunction:
464 return WASM_COMPILED;
465 case wasm::WasmCode::kWasmToJsWrapper:
466 return WASM_TO_JS;
467 case wasm::WasmCode::kLazyStub:
468 return WASM_COMPILE_LAZY;
469 case wasm::WasmCode::kRuntimeStub:
470 return STUB;
471 case wasm::WasmCode::kInterpreterEntry:
472 return WASM_INTERPRETER_ENTRY;
473 default:
474 UNREACHABLE();
475 }
476 } else {
477 // Look up the code object to figure out the type of the stack frame.
478 Code* code_obj = GetContainingCode(iterator->isolate(), pc);
479 if (code_obj != nullptr) {
480 switch (code_obj->kind()) {
481 case Code::BUILTIN:
482 if (StackFrame::IsTypeMarker(marker)) break;
483 if (code_obj->is_interpreter_trampoline_builtin()) {
484 return INTERPRETED;
485 }
486 if (code_obj->is_turbofanned()) {
487 // TODO(bmeurer): We treat frames for BUILTIN Code objects as
488 // OptimizedFrame for now (all the builtins with JavaScript
489 // linkage are actually generated with TurboFan currently, so
490 // this is sound).
491 return OPTIMIZED;
492 }
493 return BUILTIN;
494 case Code::OPTIMIZED_FUNCTION:
495 return OPTIMIZED;
496 case Code::WASM_FUNCTION:
497 return WASM_COMPILED;
498 case Code::WASM_TO_JS_FUNCTION:
499 return WASM_TO_JS;
500 case Code::JS_TO_WASM_FUNCTION:
501 return JS_TO_WASM;
502 case Code::WASM_INTERPRETER_ENTRY:
503 return WASM_INTERPRETER_ENTRY;
504 case Code::C_WASM_ENTRY:
505 return C_WASM_ENTRY;
506 default:
507 // All other types should have an explicit marker
508 break;
509 }
510 } else {
511 return NATIVE;
512 }
513 }
514 }
515 DCHECK(StackFrame::IsTypeMarker(marker));
516 StackFrame::Type candidate = StackFrame::MarkerToType(marker);
517 switch (candidate) {
518 case ENTRY:
519 case CONSTRUCT_ENTRY:
520 case EXIT:
521 case BUILTIN_CONTINUATION:
522 case JAVA_SCRIPT_BUILTIN_CONTINUATION:
523 case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
524 case BUILTIN_EXIT:
525 case STUB:
526 case INTERNAL:
527 case CONSTRUCT:
528 case ARGUMENTS_ADAPTOR:
529 case WASM_TO_JS:
530 case WASM_COMPILED:
531 return candidate;
532 case JS_TO_WASM:
533 case OPTIMIZED:
534 case INTERPRETED:
535 default:
536 // Unoptimized and optimized JavaScript frames, including
537 // interpreted frames, should never have a StackFrame::Type
538 // marker. If we find one, we're likely being called from the
539 // profiler in a bogus stack frame.
540 return NATIVE;
541 }
542 }
543
544
545 #ifdef DEBUG
can_access_heap_objects() const546 bool StackFrame::can_access_heap_objects() const {
547 return iterator_->can_access_heap_objects_;
548 }
549 #endif
550
551
GetCallerState(State * state) const552 StackFrame::Type StackFrame::GetCallerState(State* state) const {
553 ComputeCallerState(state);
554 return ComputeType(iterator_, state);
555 }
556
557
UnpaddedFP() const558 Address StackFrame::UnpaddedFP() const {
559 return fp();
560 }
561
ComputeCallerState(State * state) const562 void NativeFrame::ComputeCallerState(State* state) const {
563 state->sp = caller_sp();
564 state->fp = Memory<Address>(fp() + CommonFrameConstants::kCallerFPOffset);
565 state->pc_address = ResolveReturnAddressLocation(
566 reinterpret_cast<Address*>(fp() + CommonFrameConstants::kCallerPCOffset));
567 state->callee_pc_address = nullptr;
568 state->constant_pool_address = nullptr;
569 }
570
unchecked_code() const571 Code* EntryFrame::unchecked_code() const {
572 return isolate()->heap()->js_entry_code();
573 }
574
575
ComputeCallerState(State * state) const576 void EntryFrame::ComputeCallerState(State* state) const {
577 GetCallerState(state);
578 }
579
580
GetCallerState(State * state) const581 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
582 const int offset = EntryFrameConstants::kCallerFPOffset;
583 Address fp = Memory<Address>(this->fp() + offset);
584 return ExitFrame::GetStateForFramePointer(fp, state);
585 }
586
unchecked_code() const587 Code* ConstructEntryFrame::unchecked_code() const {
588 return isolate()->heap()->js_construct_entry_code();
589 }
590
591
code_slot() const592 Object*& ExitFrame::code_slot() const {
593 const int offset = ExitFrameConstants::kCodeOffset;
594 return Memory<Object*>(fp() + offset);
595 }
596
unchecked_code() const597 Code* ExitFrame::unchecked_code() const {
598 return reinterpret_cast<Code*>(code_slot());
599 }
600
601
ComputeCallerState(State * state) const602 void ExitFrame::ComputeCallerState(State* state) const {
603 // Set up the caller state.
604 state->sp = caller_sp();
605 state->fp = Memory<Address>(fp() + ExitFrameConstants::kCallerFPOffset);
606 state->pc_address = ResolveReturnAddressLocation(
607 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
608 state->callee_pc_address = nullptr;
609 if (FLAG_enable_embedded_constant_pool) {
610 state->constant_pool_address = reinterpret_cast<Address*>(
611 fp() + ExitFrameConstants::kConstantPoolOffset);
612 }
613 }
614
615
Iterate(RootVisitor * v) const616 void ExitFrame::Iterate(RootVisitor* v) const {
617 // The arguments are traversed as part of the expression stack of
618 // the calling frame.
619 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
620 v->VisitRootPointer(Root::kTop, nullptr, &code_slot());
621 }
622
623
GetCallerStackPointer() const624 Address ExitFrame::GetCallerStackPointer() const {
625 return fp() + ExitFrameConstants::kCallerSPOffset;
626 }
627
628
GetStateForFramePointer(Address fp,State * state)629 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
630 if (fp == 0) return NONE;
631 Address sp = ComputeStackPointer(fp);
632 FillState(fp, sp, state);
633 DCHECK_NE(*state->pc_address, kNullAddress);
634
635 return ComputeFrameType(fp);
636 }
637
ComputeFrameType(Address fp)638 StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
639 // Distinguish between between regular and builtin exit frames.
640 // Default to EXIT in all hairy cases (e.g., when called from profiler).
641 const int offset = ExitFrameConstants::kFrameTypeOffset;
642 Object* marker = Memory<Object*>(fp + offset);
643
644 if (!marker->IsSmi()) {
645 return EXIT;
646 }
647
648 intptr_t marker_int = bit_cast<intptr_t>(marker);
649
650 StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
651 if (frame_type == EXIT || frame_type == BUILTIN_EXIT) {
652 return frame_type;
653 }
654
655 return EXIT;
656 }
657
ComputeStackPointer(Address fp)658 Address ExitFrame::ComputeStackPointer(Address fp) {
659 MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize);
660 return Memory<Address>(fp + ExitFrameConstants::kSPOffset);
661 }
662
FillState(Address fp,Address sp,State * state)663 void ExitFrame::FillState(Address fp, Address sp, State* state) {
664 state->sp = sp;
665 state->fp = fp;
666 state->pc_address = ResolveReturnAddressLocation(
667 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
668 state->callee_pc_address = nullptr;
669 // The constant pool recorded in the exit frame is not associated
670 // with the pc in this state (the return address into a C entry
671 // stub). ComputeCallerState will retrieve the constant pool
672 // together with the associated caller pc.
673 state->constant_pool_address = nullptr;
674 }
675
function() const676 JSFunction* BuiltinExitFrame::function() const {
677 return JSFunction::cast(target_slot_object());
678 }
679
receiver() const680 Object* BuiltinExitFrame::receiver() const { return receiver_slot_object(); }
681
IsConstructor() const682 bool BuiltinExitFrame::IsConstructor() const {
683 return !new_target_slot_object()->IsUndefined(isolate());
684 }
685
GetParameter(int i) const686 Object* BuiltinExitFrame::GetParameter(int i) const {
687 DCHECK(i >= 0 && i < ComputeParametersCount());
688 int offset =
689 BuiltinExitFrameConstants::kFirstArgumentOffset + i * kPointerSize;
690 return Memory<Object*>(fp() + offset);
691 }
692
ComputeParametersCount() const693 int BuiltinExitFrame::ComputeParametersCount() const {
694 Object* argc_slot = argc_slot_object();
695 DCHECK(argc_slot->IsSmi());
696 // Argc also counts the receiver, target, new target, and argc itself as args,
697 // therefore the real argument count is argc - 4.
698 int argc = Smi::ToInt(argc_slot) - 4;
699 DCHECK_GE(argc, 0);
700 return argc;
701 }
702
703 namespace {
PrintIndex(StringStream * accumulator,StackFrame::PrintMode mode,int index)704 void PrintIndex(StringStream* accumulator, StackFrame::PrintMode mode,
705 int index) {
706 accumulator->Add((mode == StackFrame::OVERVIEW) ? "%5d: " : "[%d]: ", index);
707 }
708
StringForStackFrameType(StackFrame::Type type)709 const char* StringForStackFrameType(StackFrame::Type type) {
710 switch (type) {
711 #define CASE(value, name) \
712 case StackFrame::value: \
713 return #name;
714 STACK_FRAME_TYPE_LIST(CASE)
715 #undef CASE
716 default:
717 UNREACHABLE();
718 }
719 }
720 } // namespace
721
Print(StringStream * accumulator,PrintMode mode,int index) const722 void StackFrame::Print(StringStream* accumulator, PrintMode mode,
723 int index) const {
724 DisallowHeapAllocation no_gc;
725 PrintIndex(accumulator, mode, index);
726 accumulator->Add(StringForStackFrameType(type()));
727 accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc()));
728 }
729
Print(StringStream * accumulator,PrintMode mode,int index) const730 void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
731 int index) const {
732 DisallowHeapAllocation no_gc;
733 Object* receiver = this->receiver();
734 JSFunction* function = this->function();
735
736 accumulator->PrintSecurityTokenIfChanged(function);
737 PrintIndex(accumulator, mode, index);
738 accumulator->Add("builtin exit frame: ");
739 Code* code = nullptr;
740 if (IsConstructor()) accumulator->Add("new ");
741 accumulator->PrintFunction(function, receiver, &code);
742
743 accumulator->Add("(this=%o", receiver);
744
745 // Print the parameters.
746 int parameters_count = ComputeParametersCount();
747 for (int i = 0; i < parameters_count; i++) {
748 accumulator->Add(",%o", GetParameter(i));
749 }
750
751 accumulator->Add(")\n\n");
752 }
753
GetExpressionAddress(int n) const754 Address StandardFrame::GetExpressionAddress(int n) const {
755 const int offset = StandardFrameConstants::kExpressionsOffset;
756 return fp() + offset - n * kPointerSize;
757 }
758
GetExpressionAddress(int n) const759 Address InterpretedFrame::GetExpressionAddress(int n) const {
760 const int offset = InterpreterFrameConstants::kExpressionsOffset;
761 return fp() + offset - n * kPointerSize;
762 }
763
script() const764 Script* StandardFrame::script() const {
765 // This should only be called on frames which override this method.
766 DCHECK(false);
767 return nullptr;
768 }
769
receiver() const770 Object* StandardFrame::receiver() const {
771 return ReadOnlyRoots(isolate()).undefined_value();
772 }
773
context() const774 Object* StandardFrame::context() const {
775 return ReadOnlyRoots(isolate()).undefined_value();
776 }
777
position() const778 int StandardFrame::position() const {
779 AbstractCode* code = AbstractCode::cast(LookupCode());
780 int code_offset = static_cast<int>(pc() - code->InstructionStart());
781 return code->SourcePosition(code_offset);
782 }
783
ComputeExpressionsCount() const784 int StandardFrame::ComputeExpressionsCount() const {
785 Address base = GetExpressionAddress(0);
786 Address limit = sp() - kPointerSize;
787 DCHECK(base >= limit); // stack grows downwards
788 // Include register-allocated locals in number of expressions.
789 return static_cast<int>((base - limit) / kPointerSize);
790 }
791
GetParameter(int index) const792 Object* StandardFrame::GetParameter(int index) const {
793 // StandardFrame does not define any parameters.
794 UNREACHABLE();
795 }
796
ComputeParametersCount() const797 int StandardFrame::ComputeParametersCount() const { return 0; }
798
ComputeCallerState(State * state) const799 void StandardFrame::ComputeCallerState(State* state) const {
800 state->sp = caller_sp();
801 state->fp = caller_fp();
802 state->pc_address = ResolveReturnAddressLocation(
803 reinterpret_cast<Address*>(ComputePCAddress(fp())));
804 state->callee_pc_address = pc_address();
805 state->constant_pool_address =
806 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
807 }
808
809
IsConstructor() const810 bool StandardFrame::IsConstructor() const { return false; }
811
Summarize(std::vector<FrameSummary> * functions) const812 void StandardFrame::Summarize(std::vector<FrameSummary>* functions) const {
813 // This should only be called on frames which override this method.
814 UNREACHABLE();
815 }
816
IterateCompiledFrame(RootVisitor * v) const817 void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
818 // Make sure that we're not doing "safe" stack frame iteration. We cannot
819 // possibly find pointers in optimized frames in that state.
820 DCHECK(can_access_heap_objects());
821
822 // Find the code and compute the safepoint information.
823 Address inner_pointer = pc();
824 const wasm::WasmCode* wasm_code =
825 isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer);
826 SafepointEntry safepoint_entry;
827 uint32_t stack_slots;
828 Code* code = nullptr;
829 bool has_tagged_params = false;
830 if (wasm_code != nullptr) {
831 SafepointTable table(wasm_code->instruction_start(),
832 wasm_code->safepoint_table_offset(),
833 wasm_code->stack_slots());
834 safepoint_entry = table.FindEntry(inner_pointer);
835 stack_slots = wasm_code->stack_slots();
836 has_tagged_params = wasm_code->kind() != wasm::WasmCode::kFunction;
837 } else {
838 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
839 isolate()->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
840 if (!entry->safepoint_entry.is_valid()) {
841 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
842 DCHECK(entry->safepoint_entry.is_valid());
843 } else {
844 DCHECK(entry->safepoint_entry.Equals(
845 entry->code->GetSafepointEntry(inner_pointer)));
846 }
847
848 code = entry->code;
849 safepoint_entry = entry->safepoint_entry;
850 stack_slots = code->stack_slots();
851 has_tagged_params = code->has_tagged_params();
852 }
853 uint32_t slot_space = stack_slots * kPointerSize;
854
855 // Determine the fixed header and spill slot area size.
856 int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
857 intptr_t marker =
858 Memory<intptr_t>(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
859 if (StackFrame::IsTypeMarker(marker)) {
860 StackFrame::Type candidate = StackFrame::MarkerToType(marker);
861 switch (candidate) {
862 case ENTRY:
863 case CONSTRUCT_ENTRY:
864 case EXIT:
865 case BUILTIN_CONTINUATION:
866 case JAVA_SCRIPT_BUILTIN_CONTINUATION:
867 case JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
868 case BUILTIN_EXIT:
869 case ARGUMENTS_ADAPTOR:
870 case STUB:
871 case INTERNAL:
872 case CONSTRUCT:
873 case JS_TO_WASM:
874 case C_WASM_ENTRY:
875 frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
876 break;
877 case WASM_TO_JS:
878 case WASM_COMPILED:
879 case WASM_INTERPRETER_ENTRY:
880 case WASM_COMPILE_LAZY:
881 frame_header_size = WasmCompiledFrameConstants::kFixedFrameSizeFromFp;
882 break;
883 case OPTIMIZED:
884 case INTERPRETED:
885 case BUILTIN:
886 // These frame types have a context, but they are actually stored
887 // in the place on the stack that one finds the frame type.
888 UNREACHABLE();
889 break;
890 case NATIVE:
891 case NONE:
892 case NUMBER_OF_TYPES:
893 case MANUAL:
894 UNREACHABLE();
895 break;
896 }
897 }
898 slot_space -=
899 (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
900
901 Object** frame_header_base = &Memory<Object*>(fp() - frame_header_size);
902 Object** frame_header_limit =
903 &Memory<Object*>(fp() - StandardFrameConstants::kCPSlotSize);
904 Object** parameters_base = &Memory<Object*>(sp());
905 Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
906
907 // Visit the parameters that may be on top of the saved registers.
908 if (safepoint_entry.argument_count() > 0) {
909 v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
910 parameters_base + safepoint_entry.argument_count());
911 parameters_base += safepoint_entry.argument_count();
912 }
913
914 // Skip saved double registers.
915 if (safepoint_entry.has_doubles()) {
916 // Number of doubles not known at snapshot time.
917 DCHECK(!isolate()->serializer_enabled());
918 parameters_base +=
919 RegisterConfiguration::Default()->num_allocatable_double_registers() *
920 kDoubleSize / kPointerSize;
921 }
922
923 // Visit the registers that contain pointers if any.
924 if (safepoint_entry.HasRegisters()) {
925 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
926 if (safepoint_entry.HasRegisterAt(i)) {
927 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
928 v->VisitRootPointer(Root::kTop, nullptr,
929 parameters_base + reg_stack_index);
930 }
931 }
932 // Skip the words containing the register values.
933 parameters_base += kNumSafepointRegisters;
934 }
935
936 // We're done dealing with the register bits.
937 uint8_t* safepoint_bits = safepoint_entry.bits();
938 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
939
940 // Visit the rest of the parameters if they are tagged.
941 if (has_tagged_params) {
942 v->VisitRootPointers(Root::kTop, nullptr, parameters_base,
943 parameters_limit);
944 }
945
946 // Visit pointer spill slots and locals.
947 for (unsigned index = 0; index < stack_slots; index++) {
948 int byte_index = index >> kBitsPerByteLog2;
949 int bit_index = index & (kBitsPerByte - 1);
950 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
951 v->VisitRootPointer(Root::kTop, nullptr, parameters_limit + index);
952 }
953 }
954
955 // For the off-heap code cases, we can skip this.
956 if (code != nullptr) {
957 // Visit the return address in the callee and incoming arguments.
958 IteratePc(v, pc_address(), constant_pool_address(), code);
959 }
960
961 // If this frame has JavaScript ABI, visit the context (in stub and JS
962 // frames) and the function (in JS frames). If it has WebAssembly ABI, visit
963 // the instance object.
964 v->VisitRootPointers(Root::kTop, nullptr, frame_header_base,
965 frame_header_limit);
966 }
967
Iterate(RootVisitor * v) const968 void StubFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
969
unchecked_code() const970 Code* StubFrame::unchecked_code() const {
971 return isolate()->FindCodeObject(pc());
972 }
973
974
GetCallerStackPointer() const975 Address StubFrame::GetCallerStackPointer() const {
976 return fp() + ExitFrameConstants::kCallerSPOffset;
977 }
978
979
GetNumberOfIncomingArguments() const980 int StubFrame::GetNumberOfIncomingArguments() const {
981 return 0;
982 }
983
LookupExceptionHandlerInTable(int * stack_slots)984 int StubFrame::LookupExceptionHandlerInTable(int* stack_slots) {
985 Code* code = LookupCode();
986 DCHECK(code->is_turbofanned());
987 DCHECK_EQ(code->kind(), Code::BUILTIN);
988 HandlerTable table(code);
989 int pc_offset = static_cast<int>(pc() - code->InstructionStart());
990 *stack_slots = code->stack_slots();
991 return table.LookupReturn(pc_offset);
992 }
993
Iterate(RootVisitor * v) const994 void OptimizedFrame::Iterate(RootVisitor* v) const { IterateCompiledFrame(v); }
995
SetParameterValue(int index,Object * value) const996 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
997 Memory<Object*>(GetParameterSlot(index)) = value;
998 }
999
1000
IsConstructor() const1001 bool JavaScriptFrame::IsConstructor() const {
1002 Address fp = caller_fp();
1003 if (has_adapted_arguments()) {
1004 // Skip the arguments adaptor frame and look at the real caller.
1005 fp = Memory<Address>(fp + StandardFrameConstants::kCallerFPOffset);
1006 }
1007 return IsConstructFrame(fp);
1008 }
1009
1010
HasInlinedFrames() const1011 bool JavaScriptFrame::HasInlinedFrames() const {
1012 std::vector<SharedFunctionInfo*> functions;
1013 GetFunctions(&functions);
1014 return functions.size() > 1;
1015 }
1016
1017
unchecked_code() const1018 Code* JavaScriptFrame::unchecked_code() const {
1019 return function()->code();
1020 }
1021
1022
GetNumberOfIncomingArguments() const1023 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
1024 DCHECK(can_access_heap_objects() &&
1025 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
1026 return function()->shared()->internal_formal_parameter_count();
1027 }
1028
GetNumberOfIncomingArguments() const1029 int OptimizedFrame::GetNumberOfIncomingArguments() const {
1030 Code* code = LookupCode();
1031 if (code->kind() == Code::BUILTIN) {
1032 return static_cast<int>(
1033 Memory<intptr_t>(fp() + OptimizedBuiltinFrameConstants::kArgCOffset));
1034 } else {
1035 return JavaScriptFrame::GetNumberOfIncomingArguments();
1036 }
1037 }
1038
GetCallerStackPointer() const1039 Address JavaScriptFrame::GetCallerStackPointer() const {
1040 return fp() + StandardFrameConstants::kCallerSPOffset;
1041 }
1042
GetFunctions(std::vector<SharedFunctionInfo * > * functions) const1043 void JavaScriptFrame::GetFunctions(
1044 std::vector<SharedFunctionInfo*>* functions) const {
1045 DCHECK(functions->empty());
1046 functions->push_back(function()->shared());
1047 }
1048
GetFunctions(std::vector<Handle<SharedFunctionInfo>> * functions) const1049 void JavaScriptFrame::GetFunctions(
1050 std::vector<Handle<SharedFunctionInfo>>* functions) const {
1051 DCHECK(functions->empty());
1052 std::vector<SharedFunctionInfo*> raw_functions;
1053 GetFunctions(&raw_functions);
1054 for (const auto& raw_function : raw_functions) {
1055 functions->push_back(
1056 Handle<SharedFunctionInfo>(raw_function, function()->GetIsolate()));
1057 }
1058 }
1059
Summarize(std::vector<FrameSummary> * functions) const1060 void JavaScriptFrame::Summarize(std::vector<FrameSummary>* functions) const {
1061 DCHECK(functions->empty());
1062 Code* code = LookupCode();
1063 int offset = static_cast<int>(pc() - code->InstructionStart());
1064 AbstractCode* abstract_code = AbstractCode::cast(code);
1065 FrameSummary::JavaScriptFrameSummary summary(isolate(), receiver(),
1066 function(), abstract_code,
1067 offset, IsConstructor());
1068 functions->push_back(summary);
1069 }
1070
function() const1071 JSFunction* JavaScriptFrame::function() const {
1072 return JSFunction::cast(function_slot_object());
1073 }
1074
unchecked_function() const1075 Object* JavaScriptFrame::unchecked_function() const {
1076 // During deoptimization of an optimized function, we may have yet to
1077 // materialize some closures on the stack. The arguments marker object
1078 // marks this case.
1079 DCHECK(function_slot_object()->IsJSFunction() ||
1080 ReadOnlyRoots(isolate()).arguments_marker() == function_slot_object());
1081 return function_slot_object();
1082 }
1083
receiver() const1084 Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }
1085
context() const1086 Object* JavaScriptFrame::context() const {
1087 const int offset = StandardFrameConstants::kContextOffset;
1088 Object* maybe_result = Memory<Object*>(fp() + offset);
1089 DCHECK(!maybe_result->IsSmi());
1090 return maybe_result;
1091 }
1092
script() const1093 Script* JavaScriptFrame::script() const {
1094 return Script::cast(function()->shared()->script());
1095 }
1096
LookupExceptionHandlerInTable(int * stack_depth,HandlerTable::CatchPrediction * prediction)1097 int JavaScriptFrame::LookupExceptionHandlerInTable(
1098 int* stack_depth, HandlerTable::CatchPrediction* prediction) {
1099 DCHECK_EQ(0, LookupCode()->handler_table_offset());
1100 DCHECK(!LookupCode()->is_optimized_code());
1101 return -1;
1102 }
1103
PrintFunctionAndOffset(JSFunction * function,AbstractCode * code,int code_offset,FILE * file,bool print_line_number)1104 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function,
1105 AbstractCode* code,
1106 int code_offset, FILE* file,
1107 bool print_line_number) {
1108 PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
1109 function->PrintName(file);
1110 PrintF(file, "+%d", code_offset);
1111 if (print_line_number) {
1112 SharedFunctionInfo* shared = function->shared();
1113 int source_pos = code->SourcePosition(code_offset);
1114 Object* maybe_script = shared->script();
1115 if (maybe_script->IsScript()) {
1116 Script* script = Script::cast(maybe_script);
1117 int line = script->GetLineNumber(source_pos) + 1;
1118 Object* script_name_raw = script->name();
1119 if (script_name_raw->IsString()) {
1120 String* script_name = String::cast(script->name());
1121 std::unique_ptr<char[]> c_script_name =
1122 script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
1123 PrintF(file, " at %s:%d", c_script_name.get(), line);
1124 } else {
1125 PrintF(file, " at <unknown>:%d", line);
1126 }
1127 } else {
1128 PrintF(file, " at <unknown>:<unknown>");
1129 }
1130 }
1131 }
1132
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)1133 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
1134 bool print_line_number) {
1135 // constructor calls
1136 DisallowHeapAllocation no_allocation;
1137 JavaScriptFrameIterator it(isolate);
1138 while (!it.done()) {
1139 if (it.frame()->is_java_script()) {
1140 JavaScriptFrame* frame = it.frame();
1141 if (frame->IsConstructor()) PrintF(file, "new ");
1142 JSFunction* function = frame->function();
1143 int code_offset = 0;
1144 if (frame->is_interpreted()) {
1145 InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
1146 code_offset = iframe->GetBytecodeOffset();
1147 } else {
1148 Code* code = frame->unchecked_code();
1149 code_offset = static_cast<int>(frame->pc() - code->InstructionStart());
1150 }
1151 PrintFunctionAndOffset(function, function->abstract_code(), code_offset,
1152 file, print_line_number);
1153 if (print_args) {
1154 // function arguments
1155 // (we are intentionally only printing the actually
1156 // supplied parameters, not all parameters required)
1157 PrintF(file, "(this=");
1158 frame->receiver()->ShortPrint(file);
1159 const int length = frame->ComputeParametersCount();
1160 for (int i = 0; i < length; i++) {
1161 PrintF(file, ", ");
1162 frame->GetParameter(i)->ShortPrint(file);
1163 }
1164 PrintF(file, ")");
1165 }
1166 break;
1167 }
1168 it.Advance();
1169 }
1170 }
1171
CollectFunctionAndOffsetForICStats(JSFunction * function,AbstractCode * code,int code_offset)1172 void JavaScriptFrame::CollectFunctionAndOffsetForICStats(JSFunction* function,
1173 AbstractCode* code,
1174 int code_offset) {
1175 auto ic_stats = ICStats::instance();
1176 ICInfo& ic_info = ic_stats->Current();
1177 SharedFunctionInfo* shared = function->shared();
1178
1179 ic_info.function_name = ic_stats->GetOrCacheFunctionName(function);
1180 ic_info.script_offset = code_offset;
1181
1182 int source_pos = code->SourcePosition(code_offset);
1183 Object* maybe_script = shared->script();
1184 if (maybe_script->IsScript()) {
1185 Script* script = Script::cast(maybe_script);
1186 ic_info.line_num = script->GetLineNumber(source_pos) + 1;
1187 ic_info.script_name = ic_stats->GetOrCacheScriptName(script);
1188 }
1189 }
1190
CollectTopFrameForICStats(Isolate * isolate)1191 void JavaScriptFrame::CollectTopFrameForICStats(Isolate* isolate) {
1192 // constructor calls
1193 DisallowHeapAllocation no_allocation;
1194 JavaScriptFrameIterator it(isolate);
1195 ICInfo& ic_info = ICStats::instance()->Current();
1196 while (!it.done()) {
1197 if (it.frame()->is_java_script()) {
1198 JavaScriptFrame* frame = it.frame();
1199 if (frame->IsConstructor()) ic_info.is_constructor = true;
1200 JSFunction* function = frame->function();
1201 int code_offset = 0;
1202 if (frame->is_interpreted()) {
1203 InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
1204 code_offset = iframe->GetBytecodeOffset();
1205 } else {
1206 Code* code = frame->unchecked_code();
1207 code_offset = static_cast<int>(frame->pc() - code->InstructionStart());
1208 }
1209 CollectFunctionAndOffsetForICStats(function, function->abstract_code(),
1210 code_offset);
1211 return;
1212 }
1213 it.Advance();
1214 }
1215 }
1216
GetParameter(int index) const1217 Object* JavaScriptFrame::GetParameter(int index) const {
1218 return Memory<Object*>(GetParameterSlot(index));
1219 }
1220
ComputeParametersCount() const1221 int JavaScriptFrame::ComputeParametersCount() const {
1222 return GetNumberOfIncomingArguments();
1223 }
1224
ComputeParametersCount() const1225 int JavaScriptBuiltinContinuationFrame::ComputeParametersCount() const {
1226 // Assert that the first allocatable register is also the argument count
1227 // register.
1228 DCHECK_EQ(RegisterConfiguration::Default()->GetAllocatableGeneralCode(0),
1229 kJavaScriptCallArgCountRegister.code());
1230 Object* argc_object =
1231 Memory<Object*>(fp() + BuiltinContinuationFrameConstants::kArgCOffset);
1232 return Smi::ToInt(argc_object);
1233 }
1234
GetSPToFPDelta() const1235 intptr_t JavaScriptBuiltinContinuationFrame::GetSPToFPDelta() const {
1236 Address height_slot =
1237 fp() + BuiltinContinuationFrameConstants::kFrameSPtoFPDeltaAtDeoptimize;
1238 intptr_t height = Smi::ToInt(*reinterpret_cast<Smi**>(height_slot));
1239 return height;
1240 }
1241
context() const1242 Object* JavaScriptBuiltinContinuationFrame::context() const {
1243 return Memory<Object*>(
1244 fp() + BuiltinContinuationFrameConstants::kBuiltinContextOffset);
1245 }
1246
SetException(Object * exception)1247 void JavaScriptBuiltinContinuationWithCatchFrame::SetException(
1248 Object* exception) {
1249 Address exception_argument_slot =
1250 fp() + JavaScriptFrameConstants::kLastParameterOffset +
1251 kPointerSize; // Skip over return value slot.
1252
1253 // Only allow setting exception if previous value was the hole.
1254 CHECK_EQ(ReadOnlyRoots(isolate()).the_hole_value(),
1255 Memory<Object*>(exception_argument_slot));
1256 Memory<Object*>(exception_argument_slot) = exception;
1257 }
1258
JavaScriptFrameSummary(Isolate * isolate,Object * receiver,JSFunction * function,AbstractCode * abstract_code,int code_offset,bool is_constructor)1259 FrameSummary::JavaScriptFrameSummary::JavaScriptFrameSummary(
1260 Isolate* isolate, Object* receiver, JSFunction* function,
1261 AbstractCode* abstract_code, int code_offset, bool is_constructor)
1262 : FrameSummaryBase(isolate, FrameSummary::JAVA_SCRIPT),
1263 receiver_(receiver, isolate),
1264 function_(function, isolate),
1265 abstract_code_(abstract_code, isolate),
1266 code_offset_(code_offset),
1267 is_constructor_(is_constructor) {
1268 DCHECK(abstract_code->IsBytecodeArray() ||
1269 Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION);
1270 }
1271
is_subject_to_debugging() const1272 bool FrameSummary::JavaScriptFrameSummary::is_subject_to_debugging() const {
1273 return function()->shared()->IsSubjectToDebugging();
1274 }
1275
SourcePosition() const1276 int FrameSummary::JavaScriptFrameSummary::SourcePosition() const {
1277 return abstract_code()->SourcePosition(code_offset());
1278 }
1279
SourceStatementPosition() const1280 int FrameSummary::JavaScriptFrameSummary::SourceStatementPosition() const {
1281 return abstract_code()->SourceStatementPosition(code_offset());
1282 }
1283
script() const1284 Handle<Object> FrameSummary::JavaScriptFrameSummary::script() const {
1285 return handle(function_->shared()->script(), isolate());
1286 }
1287
FunctionName() const1288 Handle<String> FrameSummary::JavaScriptFrameSummary::FunctionName() const {
1289 return JSFunction::GetDebugName(function_);
1290 }
1291
native_context() const1292 Handle<Context> FrameSummary::JavaScriptFrameSummary::native_context() const {
1293 return handle(function_->context()->native_context(), isolate());
1294 }
1295
WasmFrameSummary(Isolate * isolate,FrameSummary::Kind kind,Handle<WasmInstanceObject> instance,bool at_to_number_conversion)1296 FrameSummary::WasmFrameSummary::WasmFrameSummary(
1297 Isolate* isolate, FrameSummary::Kind kind,
1298 Handle<WasmInstanceObject> instance, bool at_to_number_conversion)
1299 : FrameSummaryBase(isolate, kind),
1300 wasm_instance_(instance),
1301 at_to_number_conversion_(at_to_number_conversion) {}
1302
receiver() const1303 Handle<Object> FrameSummary::WasmFrameSummary::receiver() const {
1304 return wasm_instance_->GetIsolate()->global_proxy();
1305 }
1306
1307 #define WASM_SUMMARY_DISPATCH(type, name) \
1308 type FrameSummary::WasmFrameSummary::name() const { \
1309 DCHECK(kind() == Kind::WASM_COMPILED || kind() == Kind::WASM_INTERPRETED); \
1310 return kind() == Kind::WASM_COMPILED \
1311 ? static_cast<const WasmCompiledFrameSummary*>(this)->name() \
1312 : static_cast<const WasmInterpretedFrameSummary*>(this) \
1313 ->name(); \
1314 }
1315
WASM_SUMMARY_DISPATCH(uint32_t,function_index)1316 WASM_SUMMARY_DISPATCH(uint32_t, function_index)
1317 WASM_SUMMARY_DISPATCH(int, byte_offset)
1318
1319 #undef WASM_SUMMARY_DISPATCH
1320
1321 int FrameSummary::WasmFrameSummary::SourcePosition() const {
1322 Handle<WasmModuleObject> module_object(wasm_instance()->module_object(),
1323 isolate());
1324 return WasmModuleObject::GetSourcePosition(module_object, function_index(),
1325 byte_offset(),
1326 at_to_number_conversion());
1327 }
1328
script() const1329 Handle<Script> FrameSummary::WasmFrameSummary::script() const {
1330 return handle(wasm_instance()->module_object()->script(),
1331 wasm_instance()->GetIsolate());
1332 }
1333
FunctionName() const1334 Handle<String> FrameSummary::WasmFrameSummary::FunctionName() const {
1335 Handle<WasmModuleObject> module_object(wasm_instance()->module_object(),
1336 isolate());
1337 return WasmModuleObject::GetFunctionName(isolate(), module_object,
1338 function_index());
1339 }
1340
native_context() const1341 Handle<Context> FrameSummary::WasmFrameSummary::native_context() const {
1342 return handle(wasm_instance()->native_context(), isolate());
1343 }
1344
WasmCompiledFrameSummary(Isolate * isolate,Handle<WasmInstanceObject> instance,wasm::WasmCode * code,int code_offset,bool at_to_number_conversion)1345 FrameSummary::WasmCompiledFrameSummary::WasmCompiledFrameSummary(
1346 Isolate* isolate, Handle<WasmInstanceObject> instance, wasm::WasmCode* code,
1347 int code_offset, bool at_to_number_conversion)
1348 : WasmFrameSummary(isolate, WASM_COMPILED, instance,
1349 at_to_number_conversion),
1350 code_(code),
1351 code_offset_(code_offset) {}
1352
function_index() const1353 uint32_t FrameSummary::WasmCompiledFrameSummary::function_index() const {
1354 return code()->index();
1355 }
1356
GetWasmSourcePosition(const wasm::WasmCode * code,int offset)1357 int FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
1358 const wasm::WasmCode* code, int offset) {
1359 int position = 0;
1360 // Subtract one because the current PC is one instruction after the call site.
1361 offset--;
1362 for (SourcePositionTableIterator iterator(code->source_positions());
1363 !iterator.done() && iterator.code_offset() <= offset;
1364 iterator.Advance()) {
1365 position = iterator.source_position().ScriptOffset();
1366 }
1367 return position;
1368 }
1369
byte_offset() const1370 int FrameSummary::WasmCompiledFrameSummary::byte_offset() const {
1371 return GetWasmSourcePosition(code_, code_offset());
1372 }
1373
WasmInterpretedFrameSummary(Isolate * isolate,Handle<WasmInstanceObject> instance,uint32_t function_index,int byte_offset)1374 FrameSummary::WasmInterpretedFrameSummary::WasmInterpretedFrameSummary(
1375 Isolate* isolate, Handle<WasmInstanceObject> instance,
1376 uint32_t function_index, int byte_offset)
1377 : WasmFrameSummary(isolate, WASM_INTERPRETED, instance, false),
1378 function_index_(function_index),
1379 byte_offset_(byte_offset) {}
1380
~FrameSummary()1381 FrameSummary::~FrameSummary() {
1382 #define FRAME_SUMMARY_DESTR(kind, type, field, desc) \
1383 case kind: \
1384 field.~type(); \
1385 break;
1386 switch (base_.kind()) {
1387 FRAME_SUMMARY_VARIANTS(FRAME_SUMMARY_DESTR)
1388 default:
1389 UNREACHABLE();
1390 }
1391 #undef FRAME_SUMMARY_DESTR
1392 }
1393
GetTop(const StandardFrame * frame)1394 FrameSummary FrameSummary::GetTop(const StandardFrame* frame) {
1395 std::vector<FrameSummary> frames;
1396 frame->Summarize(&frames);
1397 DCHECK_LT(0, frames.size());
1398 return frames.back();
1399 }
1400
GetBottom(const StandardFrame * frame)1401 FrameSummary FrameSummary::GetBottom(const StandardFrame* frame) {
1402 return Get(frame, 0);
1403 }
1404
GetSingle(const StandardFrame * frame)1405 FrameSummary FrameSummary::GetSingle(const StandardFrame* frame) {
1406 std::vector<FrameSummary> frames;
1407 frame->Summarize(&frames);
1408 DCHECK_EQ(1, frames.size());
1409 return frames.front();
1410 }
1411
Get(const StandardFrame * frame,int index)1412 FrameSummary FrameSummary::Get(const StandardFrame* frame, int index) {
1413 DCHECK_LE(0, index);
1414 std::vector<FrameSummary> frames;
1415 frame->Summarize(&frames);
1416 DCHECK_GT(frames.size(), index);
1417 return frames[index];
1418 }
1419
1420 #define FRAME_SUMMARY_DISPATCH(ret, name) \
1421 ret FrameSummary::name() const { \
1422 switch (base_.kind()) { \
1423 case JAVA_SCRIPT: \
1424 return java_script_summary_.name(); \
1425 case WASM_COMPILED: \
1426 return wasm_compiled_summary_.name(); \
1427 case WASM_INTERPRETED: \
1428 return wasm_interpreted_summary_.name(); \
1429 default: \
1430 UNREACHABLE(); \
1431 return ret{}; \
1432 } \
1433 }
1434
FRAME_SUMMARY_DISPATCH(Handle<Object>,receiver)1435 FRAME_SUMMARY_DISPATCH(Handle<Object>, receiver)
1436 FRAME_SUMMARY_DISPATCH(int, code_offset)
1437 FRAME_SUMMARY_DISPATCH(bool, is_constructor)
1438 FRAME_SUMMARY_DISPATCH(bool, is_subject_to_debugging)
1439 FRAME_SUMMARY_DISPATCH(Handle<Object>, script)
1440 FRAME_SUMMARY_DISPATCH(int, SourcePosition)
1441 FRAME_SUMMARY_DISPATCH(int, SourceStatementPosition)
1442 FRAME_SUMMARY_DISPATCH(Handle<String>, FunctionName)
1443 FRAME_SUMMARY_DISPATCH(Handle<Context>, native_context)
1444
1445 #undef FRAME_SUMMARY_DISPATCH
1446
1447 void OptimizedFrame::Summarize(std::vector<FrameSummary>* frames) const {
1448 DCHECK(frames->empty());
1449 DCHECK(is_optimized());
1450
1451 // Delegate to JS frame in absence of turbofan deoptimization.
1452 // TODO(turbofan): Revisit once we support deoptimization across the board.
1453 Code* code = LookupCode();
1454 if (code->kind() == Code::BUILTIN) {
1455 return JavaScriptFrame::Summarize(frames);
1456 }
1457
1458 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1459 DeoptimizationData* const data = GetDeoptimizationData(&deopt_index);
1460 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
1461 CHECK_NULL(data);
1462 FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
1463 }
1464
1465 // Prepare iteration over translation. Note that the below iteration might
1466 // materialize objects without storing them back to the Isolate, this will
1467 // lead to objects being re-materialized again for each summary.
1468 TranslatedState translated(this);
1469 translated.Prepare(fp());
1470
1471 // We create the summary in reverse order because the frames
1472 // in the deoptimization translation are ordered bottom-to-top.
1473 bool is_constructor = IsConstructor();
1474 for (auto it = translated.begin(); it != translated.end(); it++) {
1475 if (it->kind() == TranslatedFrame::kInterpretedFunction ||
1476 it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
1477 it->kind() ==
1478 TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1479 Handle<SharedFunctionInfo> shared_info = it->shared_info();
1480
1481 // The translation commands are ordered and the function is always
1482 // at the first position, and the receiver is next.
1483 TranslatedFrame::iterator translated_values = it->begin();
1484
1485 // Get or materialize the correct function in the optimized frame.
1486 Handle<JSFunction> function =
1487 Handle<JSFunction>::cast(translated_values->GetValue());
1488 translated_values++;
1489
1490 // Get or materialize the correct receiver in the optimized frame.
1491 Handle<Object> receiver = translated_values->GetValue();
1492 translated_values++;
1493
1494 // Determine the underlying code object and the position within it from
1495 // the translation corresponding to the frame type in question.
1496 Handle<AbstractCode> abstract_code;
1497 unsigned code_offset;
1498 if (it->kind() == TranslatedFrame::kJavaScriptBuiltinContinuation ||
1499 it->kind() ==
1500 TranslatedFrame::kJavaScriptBuiltinContinuationWithCatch) {
1501 code_offset = 0;
1502 abstract_code =
1503 handle(AbstractCode::cast(isolate()->builtins()->builtin(
1504 Builtins::GetBuiltinFromBailoutId(it->node_id()))),
1505 isolate());
1506 } else {
1507 DCHECK_EQ(it->kind(), TranslatedFrame::kInterpretedFunction);
1508 code_offset = it->node_id().ToInt(); // Points to current bytecode.
1509 abstract_code = handle(shared_info->abstract_code(), isolate());
1510 }
1511
1512 // Append full summary of the encountered JS frame.
1513 FrameSummary::JavaScriptFrameSummary summary(isolate(), *receiver,
1514 *function, *abstract_code,
1515 code_offset, is_constructor);
1516 frames->push_back(summary);
1517 is_constructor = false;
1518 } else if (it->kind() == TranslatedFrame::kConstructStub) {
1519 // The next encountered JS frame will be marked as a constructor call.
1520 DCHECK(!is_constructor);
1521 is_constructor = true;
1522 }
1523 }
1524 }
1525
1526
LookupExceptionHandlerInTable(int * stack_slots,HandlerTable::CatchPrediction * prediction)1527 int OptimizedFrame::LookupExceptionHandlerInTable(
1528 int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1529 // We cannot perform exception prediction on optimized code. Instead, we need
1530 // to use FrameSummary to find the corresponding code offset in unoptimized
1531 // code to perform prediction there.
1532 DCHECK_NULL(prediction);
1533 Code* code = LookupCode();
1534 HandlerTable table(code);
1535 int pc_offset = static_cast<int>(pc() - code->InstructionStart());
1536 if (stack_slots) *stack_slots = code->stack_slots();
1537
1538 // When the return pc has been replaced by a trampoline there won't be
1539 // a handler for this trampoline. Thus we need to use the return pc that
1540 // _used to be_ on the stack to get the right ExceptionHandler.
1541 if (code->kind() == Code::OPTIMIZED_FUNCTION &&
1542 code->marked_for_deoptimization()) {
1543 SafepointTable safepoints(code);
1544 pc_offset = safepoints.find_return_pc(pc_offset);
1545 }
1546 return table.LookupReturn(pc_offset);
1547 }
1548
GetDeoptimizationData(int * deopt_index) const1549 DeoptimizationData* OptimizedFrame::GetDeoptimizationData(
1550 int* deopt_index) const {
1551 DCHECK(is_optimized());
1552
1553 JSFunction* opt_function = function();
1554 Code* code = opt_function->code();
1555
1556 // The code object may have been replaced by lazy deoptimization. Fall
1557 // back to a slow search in this case to find the original optimized
1558 // code object.
1559 if (!code->contains(pc())) {
1560 code = isolate()->heap()->GcSafeFindCodeForInnerPointer(pc());
1561 }
1562 DCHECK_NOT_NULL(code);
1563 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1564
1565 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1566 *deopt_index = safepoint_entry.deoptimization_index();
1567 if (*deopt_index != Safepoint::kNoDeoptimizationIndex) {
1568 return DeoptimizationData::cast(code->deoptimization_data());
1569 }
1570 return nullptr;
1571 }
1572
receiver() const1573 Object* OptimizedFrame::receiver() const {
1574 Code* code = LookupCode();
1575 if (code->kind() == Code::BUILTIN) {
1576 Address argc_ptr = fp() + OptimizedBuiltinFrameConstants::kArgCOffset;
1577 intptr_t argc = *reinterpret_cast<intptr_t*>(argc_ptr);
1578 intptr_t args_size =
1579 (StandardFrameConstants::kFixedSlotCountAboveFp + argc) * kPointerSize;
1580 Address receiver_ptr = fp() + args_size;
1581 return *reinterpret_cast<Object**>(receiver_ptr);
1582 } else {
1583 return JavaScriptFrame::receiver();
1584 }
1585 }
1586
GetFunctions(std::vector<SharedFunctionInfo * > * functions) const1587 void OptimizedFrame::GetFunctions(
1588 std::vector<SharedFunctionInfo*>* functions) const {
1589 DCHECK(functions->empty());
1590 DCHECK(is_optimized());
1591
1592 // Delegate to JS frame in absence of turbofan deoptimization.
1593 // TODO(turbofan): Revisit once we support deoptimization across the board.
1594 Code* code = LookupCode();
1595 if (code->kind() == Code::BUILTIN) {
1596 return JavaScriptFrame::GetFunctions(functions);
1597 }
1598
1599 DisallowHeapAllocation no_gc;
1600 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1601 DeoptimizationData* const data = GetDeoptimizationData(&deopt_index);
1602 DCHECK_NOT_NULL(data);
1603 DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1604 FixedArray* const literal_array = data->LiteralArray();
1605
1606 TranslationIterator it(data->TranslationByteArray(),
1607 data->TranslationIndex(deopt_index)->value());
1608 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1609 DCHECK_EQ(Translation::BEGIN, opcode);
1610 it.Next(); // Skip frame count.
1611 int jsframe_count = it.Next();
1612 it.Next(); // Skip update feedback count.
1613
1614 // We insert the frames in reverse order because the frames
1615 // in the deoptimization translation are ordered bottom-to-top.
1616 while (jsframe_count != 0) {
1617 opcode = static_cast<Translation::Opcode>(it.Next());
1618 if (opcode == Translation::INTERPRETED_FRAME ||
1619 opcode == Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME ||
1620 opcode ==
1621 Translation::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME) {
1622 it.Next(); // Skip bailout id.
1623 jsframe_count--;
1624
1625 // The second operand of the frame points to the function.
1626 Object* shared = literal_array->get(it.Next());
1627 functions->push_back(SharedFunctionInfo::cast(shared));
1628
1629 // Skip over remaining operands to advance to the next opcode.
1630 it.Skip(Translation::NumberOfOperandsFor(opcode) - 2);
1631 } else {
1632 // Skip over operands to advance to the next opcode.
1633 it.Skip(Translation::NumberOfOperandsFor(opcode));
1634 }
1635 }
1636 }
1637
1638
StackSlotOffsetRelativeToFp(int slot_index)1639 int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
1640 return StandardFrameConstants::kCallerSPOffset -
1641 ((slot_index + 1) * kPointerSize);
1642 }
1643
1644
StackSlotAt(int index) const1645 Object* OptimizedFrame::StackSlotAt(int index) const {
1646 return Memory<Object*>(fp() + StackSlotOffsetRelativeToFp(index));
1647 }
1648
position() const1649 int InterpretedFrame::position() const {
1650 AbstractCode* code = AbstractCode::cast(GetBytecodeArray());
1651 int code_offset = GetBytecodeOffset();
1652 return code->SourcePosition(code_offset);
1653 }
1654
LookupExceptionHandlerInTable(int * context_register,HandlerTable::CatchPrediction * prediction)1655 int InterpretedFrame::LookupExceptionHandlerInTable(
1656 int* context_register, HandlerTable::CatchPrediction* prediction) {
1657 HandlerTable table(function()->shared()->GetBytecodeArray());
1658 return table.LookupRange(GetBytecodeOffset(), context_register, prediction);
1659 }
1660
GetBytecodeOffset() const1661 int InterpretedFrame::GetBytecodeOffset() const {
1662 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1663 DCHECK_EQ(
1664 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1665 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1666 int raw_offset = Smi::ToInt(GetExpression(index));
1667 return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1668 }
1669
GetBytecodeOffset(Address fp)1670 int InterpretedFrame::GetBytecodeOffset(Address fp) {
1671 const int offset = InterpreterFrameConstants::kExpressionsOffset;
1672 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1673 DCHECK_EQ(
1674 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1675 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1676 Address expression_offset = fp + offset - index * kPointerSize;
1677 int raw_offset = Smi::ToInt(Memory<Object*>(expression_offset));
1678 return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1679 }
1680
PatchBytecodeOffset(int new_offset)1681 void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
1682 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1683 DCHECK_EQ(
1684 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1685 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1686 int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
1687 SetExpression(index, Smi::FromInt(raw_offset));
1688 }
1689
GetBytecodeArray() const1690 BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
1691 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1692 DCHECK_EQ(
1693 InterpreterFrameConstants::kBytecodeArrayFromFp,
1694 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1695 return BytecodeArray::cast(GetExpression(index));
1696 }
1697
PatchBytecodeArray(BytecodeArray * bytecode_array)1698 void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
1699 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1700 DCHECK_EQ(
1701 InterpreterFrameConstants::kBytecodeArrayFromFp,
1702 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1703 SetExpression(index, bytecode_array);
1704 }
1705
ReadInterpreterRegister(int register_index) const1706 Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1707 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1708 DCHECK_EQ(
1709 InterpreterFrameConstants::kRegisterFileFromFp,
1710 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1711 return GetExpression(index + register_index);
1712 }
1713
WriteInterpreterRegister(int register_index,Object * value)1714 void InterpretedFrame::WriteInterpreterRegister(int register_index,
1715 Object* value) {
1716 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1717 DCHECK_EQ(
1718 InterpreterFrameConstants::kRegisterFileFromFp,
1719 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1720 return SetExpression(index + register_index, value);
1721 }
1722
Summarize(std::vector<FrameSummary> * functions) const1723 void InterpretedFrame::Summarize(std::vector<FrameSummary>* functions) const {
1724 DCHECK(functions->empty());
1725 AbstractCode* abstract_code =
1726 AbstractCode::cast(function()->shared()->GetBytecodeArray());
1727 FrameSummary::JavaScriptFrameSummary summary(
1728 isolate(), receiver(), function(), abstract_code, GetBytecodeOffset(),
1729 IsConstructor());
1730 functions->push_back(summary);
1731 }
1732
GetNumberOfIncomingArguments() const1733 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1734 return Smi::ToInt(GetExpression(0));
1735 }
1736
unchecked_code() const1737 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1738 return isolate()->builtins()->builtin(
1739 Builtins::kArgumentsAdaptorTrampoline);
1740 }
1741
GetNumberOfIncomingArguments() const1742 int BuiltinFrame::GetNumberOfIncomingArguments() const {
1743 return Smi::ToInt(GetExpression(0));
1744 }
1745
PrintFrameKind(StringStream * accumulator) const1746 void BuiltinFrame::PrintFrameKind(StringStream* accumulator) const {
1747 accumulator->Add("builtin frame: ");
1748 }
1749
GetCallerStackPointer() const1750 Address InternalFrame::GetCallerStackPointer() const {
1751 // Internal frames have no arguments. The stack pointer of the
1752 // caller is at a fixed offset from the frame pointer.
1753 return fp() + StandardFrameConstants::kCallerSPOffset;
1754 }
1755
unchecked_code() const1756 Code* InternalFrame::unchecked_code() const { UNREACHABLE(); }
1757
Print(StringStream * accumulator,PrintMode mode,int index) const1758 void WasmCompiledFrame::Print(StringStream* accumulator, PrintMode mode,
1759 int index) const {
1760 PrintIndex(accumulator, mode, index);
1761 accumulator->Add("WASM [");
1762 accumulator->PrintName(script()->name());
1763 Address instruction_start = isolate()
1764 ->wasm_engine()
1765 ->code_manager()
1766 ->LookupCode(pc())
1767 ->instruction_start();
1768 Vector<const uint8_t> raw_func_name =
1769 module_object()->GetRawFunctionName(function_index());
1770 const int kMaxPrintedFunctionName = 64;
1771 char func_name[kMaxPrintedFunctionName + 1];
1772 int func_name_len = std::min(kMaxPrintedFunctionName, raw_func_name.length());
1773 memcpy(func_name, raw_func_name.start(), func_name_len);
1774 func_name[func_name_len] = '\0';
1775 int pos = position();
1776 const wasm::WasmModule* module = wasm_instance()->module_object()->module();
1777 int func_index = function_index();
1778 int func_code_offset = module->functions[func_index].code.offset();
1779 accumulator->Add("], function #%u ('%s'), pc=%p (+0x%x), pos=%d (+%d)\n",
1780 func_index, func_name, reinterpret_cast<void*>(pc()),
1781 static_cast<int>(pc() - instruction_start), pos,
1782 pos - func_code_offset);
1783 if (mode != OVERVIEW) accumulator->Add("\n");
1784 }
1785
unchecked_code() const1786 Code* WasmCompiledFrame::unchecked_code() const {
1787 return isolate()->FindCodeObject(pc());
1788 }
1789
Iterate(RootVisitor * v) const1790 void WasmCompiledFrame::Iterate(RootVisitor* v) const {
1791 IterateCompiledFrame(v);
1792 }
1793
GetCallerStackPointer() const1794 Address WasmCompiledFrame::GetCallerStackPointer() const {
1795 return fp() + ExitFrameConstants::kCallerSPOffset;
1796 }
1797
wasm_code() const1798 wasm::WasmCode* WasmCompiledFrame::wasm_code() const {
1799 return isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1800 }
1801
wasm_instance() const1802 WasmInstanceObject* WasmCompiledFrame::wasm_instance() const {
1803 const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset;
1804 Object* instance = Memory<Object*>(fp() + offset);
1805 return WasmInstanceObject::cast(instance);
1806 }
1807
module_object() const1808 WasmModuleObject* WasmCompiledFrame::module_object() const {
1809 return wasm_instance()->module_object();
1810 }
1811
function_index() const1812 uint32_t WasmCompiledFrame::function_index() const {
1813 return FrameSummary::GetSingle(this).AsWasmCompiled().function_index();
1814 }
1815
script() const1816 Script* WasmCompiledFrame::script() const { return module_object()->script(); }
1817
position() const1818 int WasmCompiledFrame::position() const {
1819 return FrameSummary::GetSingle(this).SourcePosition();
1820 }
1821
Summarize(std::vector<FrameSummary> * functions) const1822 void WasmCompiledFrame::Summarize(std::vector<FrameSummary>* functions) const {
1823 DCHECK(functions->empty());
1824 wasm::WasmCode* code = wasm_code();
1825 int offset = static_cast<int>(pc() - code->instruction_start());
1826 Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
1827 FrameSummary::WasmCompiledFrameSummary summary(
1828 isolate(), instance, code, offset, at_to_number_conversion());
1829 functions->push_back(summary);
1830 }
1831
at_to_number_conversion() const1832 bool WasmCompiledFrame::at_to_number_conversion() const {
1833 // Check whether our callee is a WASM_TO_JS frame, and this frame is at the
1834 // ToNumber conversion call.
1835 wasm::WasmCode* code =
1836 callee_pc() != kNullAddress
1837 ? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc())
1838 : nullptr;
1839 if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
1840 int offset = static_cast<int>(callee_pc() - code->instruction_start());
1841 int pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
1842 code, offset);
1843 DCHECK(pos == 0 || pos == 1);
1844 // The imported call has position 0, ToNumber has position 1.
1845 return !!pos;
1846 }
1847
LookupExceptionHandlerInTable(int * stack_slots)1848 int WasmCompiledFrame::LookupExceptionHandlerInTable(int* stack_slots) {
1849 DCHECK_NOT_NULL(stack_slots);
1850 wasm::WasmCode* code =
1851 isolate()->wasm_engine()->code_manager()->LookupCode(pc());
1852 if (!code->IsAnonymous() && code->handler_table_offset() > 0) {
1853 HandlerTable table(code->instruction_start(), code->handler_table_offset());
1854 int pc_offset = static_cast<int>(pc() - code->instruction_start());
1855 *stack_slots = static_cast<int>(code->stack_slots());
1856 return table.LookupReturn(pc_offset);
1857 }
1858 return -1;
1859 }
1860
Iterate(RootVisitor * v) const1861 void WasmInterpreterEntryFrame::Iterate(RootVisitor* v) const {
1862 IterateCompiledFrame(v);
1863 }
1864
Print(StringStream * accumulator,PrintMode mode,int index) const1865 void WasmInterpreterEntryFrame::Print(StringStream* accumulator, PrintMode mode,
1866 int index) const {
1867 PrintIndex(accumulator, mode, index);
1868 accumulator->Add("WASM INTERPRETER ENTRY [");
1869 Script* script = this->script();
1870 accumulator->PrintName(script->name());
1871 accumulator->Add("]");
1872 if (mode != OVERVIEW) accumulator->Add("\n");
1873 }
1874
Summarize(std::vector<FrameSummary> * functions) const1875 void WasmInterpreterEntryFrame::Summarize(
1876 std::vector<FrameSummary>* functions) const {
1877 Handle<WasmInstanceObject> instance(wasm_instance(), isolate());
1878 std::vector<std::pair<uint32_t, int>> interpreted_stack =
1879 instance->debug_info()->GetInterpretedStack(fp());
1880
1881 for (auto& e : interpreted_stack) {
1882 FrameSummary::WasmInterpretedFrameSummary summary(isolate(), instance,
1883 e.first, e.second);
1884 functions->push_back(summary);
1885 }
1886 }
1887
unchecked_code() const1888 Code* WasmInterpreterEntryFrame::unchecked_code() const { UNREACHABLE(); }
1889
wasm_instance() const1890 WasmInstanceObject* WasmInterpreterEntryFrame::wasm_instance() const {
1891 const int offset = WasmCompiledFrameConstants::kWasmInstanceOffset;
1892 Object* instance = Memory<Object*>(fp() + offset);
1893 return WasmInstanceObject::cast(instance);
1894 }
1895
debug_info() const1896 WasmDebugInfo* WasmInterpreterEntryFrame::debug_info() const {
1897 return wasm_instance()->debug_info();
1898 }
1899
module_object() const1900 WasmModuleObject* WasmInterpreterEntryFrame::module_object() const {
1901 return wasm_instance()->module_object();
1902 }
1903
script() const1904 Script* WasmInterpreterEntryFrame::script() const {
1905 return module_object()->script();
1906 }
1907
position() const1908 int WasmInterpreterEntryFrame::position() const {
1909 return FrameSummary::GetBottom(this).AsWasmInterpreted().SourcePosition();
1910 }
1911
context() const1912 Object* WasmInterpreterEntryFrame::context() const {
1913 return wasm_instance()->native_context();
1914 }
1915
GetCallerStackPointer() const1916 Address WasmInterpreterEntryFrame::GetCallerStackPointer() const {
1917 return fp() + ExitFrameConstants::kCallerSPOffset;
1918 }
1919
wasm_instance() const1920 WasmInstanceObject* WasmCompileLazyFrame::wasm_instance() const {
1921 return WasmInstanceObject::cast(*wasm_instance_slot());
1922 }
1923
wasm_instance_slot() const1924 Object** WasmCompileLazyFrame::wasm_instance_slot() const {
1925 const int offset = WasmCompileLazyFrameConstants::kWasmInstanceOffset;
1926 return &Memory<Object*>(fp() + offset);
1927 }
1928
Iterate(RootVisitor * v) const1929 void WasmCompileLazyFrame::Iterate(RootVisitor* v) const {
1930 const int header_size = WasmCompileLazyFrameConstants::kFixedFrameSizeFromFp;
1931 Object** base = &Memory<Object*>(sp());
1932 Object** limit = &Memory<Object*>(fp() - header_size);
1933 v->VisitRootPointers(Root::kTop, nullptr, base, limit);
1934 v->VisitRootPointer(Root::kTop, nullptr, wasm_instance_slot());
1935 }
1936
GetCallerStackPointer() const1937 Address WasmCompileLazyFrame::GetCallerStackPointer() const {
1938 return fp() + WasmCompileLazyFrameConstants::kCallerSPOffset;
1939 }
1940
1941 namespace {
1942
1943
PrintFunctionSource(StringStream * accumulator,SharedFunctionInfo * shared,Code * code)1944 void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
1945 Code* code) {
1946 if (FLAG_max_stack_trace_source_length != 0 && code != nullptr) {
1947 std::ostringstream os;
1948 os << "--------- s o u r c e c o d e ---------\n"
1949 << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1950 << "\n-----------------------------------------\n";
1951 accumulator->Add(os.str().c_str());
1952 }
1953 }
1954
1955
1956 } // namespace
1957
1958
Print(StringStream * accumulator,PrintMode mode,int index) const1959 void JavaScriptFrame::Print(StringStream* accumulator,
1960 PrintMode mode,
1961 int index) const {
1962 DisallowHeapAllocation no_gc;
1963 Object* receiver = this->receiver();
1964 JSFunction* function = this->function();
1965
1966 accumulator->PrintSecurityTokenIfChanged(function);
1967 PrintIndex(accumulator, mode, index);
1968 PrintFrameKind(accumulator);
1969 Code* code = nullptr;
1970 if (IsConstructor()) accumulator->Add("new ");
1971 accumulator->PrintFunction(function, receiver, &code);
1972 accumulator->Add(" [%p]", function);
1973
1974 // Get scope information for nicer output, if possible. If code is nullptr, or
1975 // doesn't contain scope info, scope_info will return 0 for the number of
1976 // parameters, stack local variables, context local variables, stack slots,
1977 // or context slots.
1978 SharedFunctionInfo* shared = function->shared();
1979 ScopeInfo* scope_info = shared->scope_info();
1980 Object* script_obj = shared->script();
1981 if (script_obj->IsScript()) {
1982 Script* script = Script::cast(script_obj);
1983 accumulator->Add(" [");
1984 accumulator->PrintName(script->name());
1985
1986 if (is_interpreted()) {
1987 const InterpretedFrame* iframe =
1988 reinterpret_cast<const InterpretedFrame*>(this);
1989 BytecodeArray* bytecodes = iframe->GetBytecodeArray();
1990 int offset = iframe->GetBytecodeOffset();
1991 int source_pos = AbstractCode::cast(bytecodes)->SourcePosition(offset);
1992 int line = script->GetLineNumber(source_pos) + 1;
1993 accumulator->Add(":%d] [bytecode=%p offset=%d]", line, bytecodes, offset);
1994 } else {
1995 int function_start_pos = shared->StartPosition();
1996 int line = script->GetLineNumber(function_start_pos) + 1;
1997 accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
1998 }
1999 }
2000
2001 accumulator->Add("(this=%o", receiver);
2002
2003 // Print the parameters.
2004 int parameters_count = ComputeParametersCount();
2005 for (int i = 0; i < parameters_count; i++) {
2006 accumulator->Add(",");
2007 accumulator->Add("%o", GetParameter(i));
2008 }
2009
2010 accumulator->Add(")");
2011 if (mode == OVERVIEW) {
2012 accumulator->Add("\n");
2013 return;
2014 }
2015 if (is_optimized()) {
2016 accumulator->Add(" {\n// optimized frame\n");
2017 PrintFunctionSource(accumulator, shared, code);
2018 accumulator->Add("}\n");
2019 return;
2020 }
2021 accumulator->Add(" {\n");
2022
2023 // Compute the number of locals and expression stack elements.
2024 int heap_locals_count = scope_info->ContextLocalCount();
2025 int expressions_count = ComputeExpressionsCount();
2026
2027 // Try to get hold of the context of this frame.
2028 Context* context = nullptr;
2029 if (this->context() != nullptr && this->context()->IsContext()) {
2030 context = Context::cast(this->context());
2031 while (context->IsWithContext()) {
2032 context = context->previous();
2033 DCHECK_NOT_NULL(context);
2034 }
2035 }
2036
2037 // Print heap-allocated local variables.
2038 if (heap_locals_count > 0) {
2039 accumulator->Add(" // heap-allocated locals\n");
2040 }
2041 for (int i = 0; i < heap_locals_count; i++) {
2042 accumulator->Add(" var ");
2043 accumulator->PrintName(scope_info->ContextLocalName(i));
2044 accumulator->Add(" = ");
2045 if (context != nullptr) {
2046 int index = Context::MIN_CONTEXT_SLOTS + i;
2047 if (index < context->length()) {
2048 accumulator->Add("%o", context->get(index));
2049 } else {
2050 accumulator->Add(
2051 "// warning: missing context slot - inconsistent frame?");
2052 }
2053 } else {
2054 accumulator->Add("// warning: no context found - inconsistent frame?");
2055 }
2056 accumulator->Add("\n");
2057 }
2058
2059 // Print the expression stack.
2060 if (0 < expressions_count) {
2061 accumulator->Add(" // expression stack (top to bottom)\n");
2062 }
2063 for (int i = expressions_count - 1; i >= 0; i--) {
2064 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
2065 }
2066
2067 PrintFunctionSource(accumulator, shared, code);
2068
2069 accumulator->Add("}\n\n");
2070 }
2071
2072
Print(StringStream * accumulator,PrintMode mode,int index) const2073 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
2074 PrintMode mode,
2075 int index) const {
2076 int actual = ComputeParametersCount();
2077 int expected = -1;
2078 JSFunction* function = this->function();
2079 expected = function->shared()->internal_formal_parameter_count();
2080
2081 PrintIndex(accumulator, mode, index);
2082 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
2083 if (mode == OVERVIEW) {
2084 accumulator->Add("\n");
2085 return;
2086 }
2087 accumulator->Add(" {\n");
2088
2089 // Print actual arguments.
2090 if (actual > 0) accumulator->Add(" // actual arguments\n");
2091 for (int i = 0; i < actual; i++) {
2092 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
2093 if (expected != -1 && i >= expected) {
2094 accumulator->Add(" // not passed to callee");
2095 }
2096 accumulator->Add("\n");
2097 }
2098
2099 accumulator->Add("}\n\n");
2100 }
2101
Iterate(RootVisitor * v) const2102 void EntryFrame::Iterate(RootVisitor* v) const {
2103 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2104 }
2105
IterateExpressions(RootVisitor * v) const2106 void StandardFrame::IterateExpressions(RootVisitor* v) const {
2107 const int offset = StandardFrameConstants::kLastObjectOffset;
2108 Object** base = &Memory<Object*>(sp());
2109 Object** limit = &Memory<Object*>(fp() + offset) + 1;
2110 v->VisitRootPointers(Root::kTop, nullptr, base, limit);
2111 }
2112
Iterate(RootVisitor * v) const2113 void JavaScriptFrame::Iterate(RootVisitor* v) const {
2114 IterateExpressions(v);
2115 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
2116 }
2117
Iterate(RootVisitor * v) const2118 void InternalFrame::Iterate(RootVisitor* v) const {
2119 Code* code = LookupCode();
2120 IteratePc(v, pc_address(), constant_pool_address(), code);
2121 // Internal frames typically do not receive any arguments, hence their stack
2122 // only contains tagged pointers.
2123 // We are misusing the has_tagged_params flag here to tell us whether
2124 // the full stack frame contains only tagged pointers or only raw values.
2125 // This is used for the WasmCompileLazy builtin, where we actually pass
2126 // untagged arguments and also store untagged values on the stack.
2127 if (code->has_tagged_params()) IterateExpressions(v);
2128 }
2129
2130 // -------------------------------------------------------------------------
2131
2132 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)2133 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
2134 isolate_->counters()->pc_to_code()->Increment();
2135 DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize));
2136 uint32_t hash = ComputeIntegerHash(
2137 ObjectAddressForHashing(reinterpret_cast<void*>(inner_pointer)));
2138 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
2139 InnerPointerToCodeCacheEntry* entry = cache(index);
2140 if (entry->inner_pointer == inner_pointer) {
2141 isolate_->counters()->pc_to_code_cached()->Increment();
2142 DCHECK(entry->code ==
2143 isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer));
2144 } else {
2145 // Because this code may be interrupted by a profiling signal that
2146 // also queries the cache, we cannot update inner_pointer before the code
2147 // has been set. Otherwise, we risk trying to use a cache entry before
2148 // the code has been computed.
2149 entry->code =
2150 isolate_->heap()->GcSafeFindCodeForInnerPointer(inner_pointer);
2151 entry->safepoint_entry.Reset();
2152 entry->inner_pointer = inner_pointer;
2153 }
2154 return entry;
2155 }
2156 } // namespace internal
2157 } // namespace v8
2158