1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/frames.h"
6
7 #include <sstream>
8
9 #include "src/ast/ast.h"
10 #include "src/ast/scopeinfo.h"
11 #include "src/base/bits.h"
12 #include "src/deoptimizer.h"
13 #include "src/frames-inl.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/register-configuration.h"
16 #include "src/safepoint-table.h"
17 #include "src/string-stream.h"
18 #include "src/vm-state-inl.h"
19
20 namespace v8 {
21 namespace internal {
22
23 ReturnAddressLocationResolver
24 StackFrame::return_address_location_resolver_ = NULL;
25
26
27 // Iterator that supports traversing the stack handlers of a
28 // particular frame. Needs to know the top of the handler chain.
29 class StackHandlerIterator BASE_EMBEDDED {
30 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)31 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
32 : limit_(frame->fp()), handler_(handler) {
33 // Make sure the handler has already been unwound to this frame.
34 DCHECK(frame->sp() <= handler->address());
35 }
36
handler() const37 StackHandler* handler() const { return handler_; }
38
done()39 bool done() {
40 return handler_ == NULL || handler_->address() > limit_;
41 }
Advance()42 void Advance() {
43 DCHECK(!done());
44 handler_ = handler_->next();
45 }
46
47 private:
48 const Address limit_;
49 StackHandler* handler_;
50 };
51
52
53 // -------------------------------------------------------------------------
54
55
56 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)57 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
58 bool can_access_heap_objects)
59 : isolate_(isolate),
60 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
61 frame_(NULL), handler_(NULL),
62 can_access_heap_objects_(can_access_heap_objects) {
63 }
64 #undef INITIALIZE_SINGLETON
65
66
StackFrameIterator(Isolate * isolate)67 StackFrameIterator::StackFrameIterator(Isolate* isolate)
68 : StackFrameIteratorBase(isolate, true) {
69 Reset(isolate->thread_local_top());
70 }
71
72
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)73 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
74 : StackFrameIteratorBase(isolate, true) {
75 Reset(t);
76 }
77
78
Advance()79 void StackFrameIterator::Advance() {
80 DCHECK(!done());
81 // Compute the state of the calling frame before restoring
82 // callee-saved registers and unwinding handlers. This allows the
83 // frame code that computes the caller state to access the top
84 // handler and the value of any callee-saved register if needed.
85 StackFrame::State state;
86 StackFrame::Type type = frame_->GetCallerState(&state);
87
88 // Unwind handlers corresponding to the current frame.
89 StackHandlerIterator it(frame_, handler_);
90 while (!it.done()) it.Advance();
91 handler_ = it.handler();
92
93 // Advance to the calling frame.
94 frame_ = SingletonFor(type, &state);
95
96 // When we're done iterating over the stack frames, the handler
97 // chain must have been completely unwound.
98 DCHECK(!done() || handler_ == NULL);
99 }
100
101
Reset(ThreadLocalTop * top)102 void StackFrameIterator::Reset(ThreadLocalTop* top) {
103 StackFrame::State state;
104 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
105 Isolate::c_entry_fp(top), &state);
106 handler_ = StackHandler::FromAddress(Isolate::handler(top));
107 frame_ = SingletonFor(type, &state);
108 }
109
110
SingletonFor(StackFrame::Type type,StackFrame::State * state)111 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
112 StackFrame::State* state) {
113 StackFrame* result = SingletonFor(type);
114 DCHECK((!result) == (type == StackFrame::NONE));
115 if (result) result->state_ = *state;
116 return result;
117 }
118
119
SingletonFor(StackFrame::Type type)120 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
121 #define FRAME_TYPE_CASE(type, field) \
122 case StackFrame::type: \
123 return &field##_;
124
125 switch (type) {
126 case StackFrame::NONE: return NULL;
127 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
128 default: break;
129 }
130 return NULL;
131
132 #undef FRAME_TYPE_CASE
133 }
134
135 // -------------------------------------------------------------------------
136
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)137 JavaScriptFrameIterator::JavaScriptFrameIterator(Isolate* isolate,
138 StackFrame::Id id)
139 : iterator_(isolate) {
140 while (!done()) {
141 Advance();
142 if (frame()->id() == id) return;
143 }
144 }
145
146
Advance()147 void JavaScriptFrameIterator::Advance() {
148 do {
149 iterator_.Advance();
150 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
151 }
152
153
AdvanceToArgumentsFrame()154 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
155 if (!frame()->has_adapted_arguments()) return;
156 iterator_.Advance();
157 DCHECK(iterator_.frame()->is_arguments_adaptor());
158 }
159
160
161 // -------------------------------------------------------------------------
162
StackTraceFrameIterator(Isolate * isolate)163 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
164 : iterator_(isolate) {
165 if (!done() && !IsValidFrame(iterator_.frame())) Advance();
166 }
167
168
Advance()169 void StackTraceFrameIterator::Advance() {
170 do {
171 iterator_.Advance();
172 } while (!done() && !IsValidFrame(iterator_.frame()));
173 }
174
IsValidFrame(StackFrame * frame) const175 bool StackTraceFrameIterator::IsValidFrame(StackFrame* frame) const {
176 if (frame->is_java_script()) {
177 JavaScriptFrame* jsFrame = static_cast<JavaScriptFrame*>(frame);
178 if (!jsFrame->function()->IsJSFunction()) return false;
179 Object* script = jsFrame->function()->shared()->script();
180 // Don't show functions from native scripts to user.
181 return (script->IsScript() &&
182 Script::TYPE_NATIVE != Script::cast(script)->type());
183 }
184 // apart from javascript, only wasm is valid
185 return frame->is_wasm();
186 }
187
188
189 // -------------------------------------------------------------------------
190
191
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)192 SafeStackFrameIterator::SafeStackFrameIterator(
193 Isolate* isolate,
194 Address fp, Address sp, Address js_entry_sp)
195 : StackFrameIteratorBase(isolate, false),
196 low_bound_(sp),
197 high_bound_(js_entry_sp),
198 top_frame_type_(StackFrame::NONE),
199 external_callback_scope_(isolate->external_callback_scope()) {
200 StackFrame::State state;
201 StackFrame::Type type;
202 ThreadLocalTop* top = isolate->thread_local_top();
203 if (IsValidTop(top)) {
204 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
205 top_frame_type_ = type;
206 } else if (IsValidStackAddress(fp)) {
207 DCHECK(fp != NULL);
208 state.fp = fp;
209 state.sp = sp;
210 state.pc_address = StackFrame::ResolveReturnAddressLocation(
211 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
212 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
213 // we check only that kMarkerOffset is within the stack bounds and do
214 // compile time check that kContextOffset slot is pushed on the stack before
215 // kMarkerOffset.
216 STATIC_ASSERT(StandardFrameConstants::kFunctionOffset <
217 StandardFrameConstants::kContextOffset);
218 Address frame_marker = fp + StandardFrameConstants::kFunctionOffset;
219 if (IsValidStackAddress(frame_marker)) {
220 type = StackFrame::ComputeType(this, &state);
221 top_frame_type_ = type;
222 } else {
223 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
224 // The frame anyways will be skipped.
225 type = StackFrame::JAVA_SCRIPT;
226 // Top frame is incomplete so we cannot reliably determine its type.
227 top_frame_type_ = StackFrame::NONE;
228 }
229 } else {
230 return;
231 }
232 frame_ = SingletonFor(type, &state);
233 if (frame_) Advance();
234 }
235
236
IsValidTop(ThreadLocalTop * top) const237 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
238 Address c_entry_fp = Isolate::c_entry_fp(top);
239 if (!IsValidExitFrame(c_entry_fp)) return false;
240 // There should be at least one JS_ENTRY stack handler.
241 Address handler = Isolate::handler(top);
242 if (handler == NULL) return false;
243 // Check that there are no js frames on top of the native frames.
244 return c_entry_fp < handler;
245 }
246
247
AdvanceOneFrame()248 void SafeStackFrameIterator::AdvanceOneFrame() {
249 DCHECK(!done());
250 StackFrame* last_frame = frame_;
251 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
252 // Before advancing to the next stack frame, perform pointer validity tests.
253 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
254 frame_ = NULL;
255 return;
256 }
257
258 // Advance to the previous frame.
259 StackFrame::State state;
260 StackFrame::Type type = frame_->GetCallerState(&state);
261 frame_ = SingletonFor(type, &state);
262 if (!frame_) return;
263
264 // Check that we have actually moved to the previous frame in the stack.
265 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
266 frame_ = NULL;
267 }
268 }
269
270
IsValidFrame(StackFrame * frame) const271 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
272 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
273 }
274
275
IsValidCaller(StackFrame * frame)276 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
277 StackFrame::State state;
278 if (frame->is_entry() || frame->is_entry_construct()) {
279 // See EntryFrame::GetCallerState. It computes the caller FP address
280 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
281 // sure that caller FP address is valid.
282 Address caller_fp = Memory::Address_at(
283 frame->fp() + EntryFrameConstants::kCallerFPOffset);
284 if (!IsValidExitFrame(caller_fp)) return false;
285 } else if (frame->is_arguments_adaptor()) {
286 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
287 // the number of arguments is stored on stack as Smi. We need to check
288 // that it really an Smi.
289 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
290 GetExpression(0);
291 if (!number_of_args->IsSmi()) {
292 return false;
293 }
294 }
295 frame->ComputeCallerState(&state);
296 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
297 SingletonFor(frame->GetCallerState(&state)) != NULL;
298 }
299
300
IsValidExitFrame(Address fp) const301 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
302 if (!IsValidStackAddress(fp)) return false;
303 Address sp = ExitFrame::ComputeStackPointer(fp);
304 if (!IsValidStackAddress(sp)) return false;
305 StackFrame::State state;
306 ExitFrame::FillState(fp, sp, &state);
307 return *state.pc_address != NULL;
308 }
309
310
Advance()311 void SafeStackFrameIterator::Advance() {
312 while (true) {
313 AdvanceOneFrame();
314 if (done()) break;
315 ExternalCallbackScope* last_callback_scope = NULL;
316 while (external_callback_scope_ != NULL &&
317 external_callback_scope_->scope_address() < frame_->fp()) {
318 // As long as the setup of a frame is not atomic, we may happen to be
319 // in an interval where an ExternalCallbackScope is already created,
320 // but the frame is not yet entered. So we are actually observing
321 // the previous frame.
322 // Skip all the ExternalCallbackScope's that are below the current fp.
323 last_callback_scope = external_callback_scope_;
324 external_callback_scope_ = external_callback_scope_->previous();
325 }
326 if (frame_->is_java_script()) break;
327 if (frame_->is_exit()) {
328 // Some of the EXIT frames may have ExternalCallbackScope allocated on
329 // top of them. In that case the scope corresponds to the first EXIT
330 // frame beneath it. There may be other EXIT frames on top of the
331 // ExternalCallbackScope, just skip them as we cannot collect any useful
332 // information about them.
333 if (last_callback_scope) {
334 frame_->state_.pc_address =
335 last_callback_scope->callback_entrypoint_address();
336 }
337 break;
338 }
339 }
340 }
341
342
343 // -------------------------------------------------------------------------
344
345
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)346 Code* StackFrame::GetSafepointData(Isolate* isolate,
347 Address inner_pointer,
348 SafepointEntry* safepoint_entry,
349 unsigned* stack_slots) {
350 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
351 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
352 if (!entry->safepoint_entry.is_valid()) {
353 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
354 DCHECK(entry->safepoint_entry.is_valid());
355 } else {
356 DCHECK(entry->safepoint_entry.Equals(
357 entry->code->GetSafepointEntry(inner_pointer)));
358 }
359
360 // Fill in the results and return the code.
361 Code* code = entry->code;
362 *safepoint_entry = entry->safepoint_entry;
363 *stack_slots = code->stack_slots();
364 return code;
365 }
366
367
368 #ifdef DEBUG
369 static bool GcSafeCodeContains(HeapObject* object, Address addr);
370 #endif
371
372
IteratePc(ObjectVisitor * v,Address * pc_address,Address * constant_pool_address,Code * holder)373 void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
374 Address* constant_pool_address, Code* holder) {
375 Address pc = *pc_address;
376 DCHECK(GcSafeCodeContains(holder, pc));
377 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
378 Object* code = holder;
379 v->VisitPointer(&code);
380 if (code != holder) {
381 holder = reinterpret_cast<Code*>(code);
382 pc = holder->instruction_start() + pc_offset;
383 *pc_address = pc;
384 if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
385 *constant_pool_address = holder->constant_pool();
386 }
387 }
388 }
389
390
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)391 void StackFrame::SetReturnAddressLocationResolver(
392 ReturnAddressLocationResolver resolver) {
393 DCHECK(return_address_location_resolver_ == NULL);
394 return_address_location_resolver_ = resolver;
395 }
396
IsInterpreterFramePc(Isolate * isolate,Address pc)397 static bool IsInterpreterFramePc(Isolate* isolate, Address pc) {
398 Code* interpreter_entry_trampoline =
399 isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
400 Code* interpreter_bytecode_dispatch =
401 isolate->builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
402 Code* interpreter_baseline_on_return =
403 isolate->builtins()->builtin(Builtins::kInterpreterMarkBaselineOnReturn);
404
405 return (pc >= interpreter_entry_trampoline->instruction_start() &&
406 pc < interpreter_entry_trampoline->instruction_end()) ||
407 (pc >= interpreter_bytecode_dispatch->instruction_start() &&
408 pc < interpreter_bytecode_dispatch->instruction_end()) ||
409 (pc >= interpreter_baseline_on_return->instruction_start() &&
410 pc < interpreter_baseline_on_return->instruction_end());
411 }
412
ComputeType(const StackFrameIteratorBase * iterator,State * state)413 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
414 State* state) {
415 DCHECK(state->fp != NULL);
416
417 #if defined(USE_SIMULATOR)
418 MSAN_MEMORY_IS_INITIALIZED(
419 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
420 kPointerSize);
421 #endif
422 Object* marker = Memory::Object_at(
423 state->fp + CommonFrameConstants::kContextOrFrameTypeOffset);
424 if (!iterator->can_access_heap_objects_) {
425 // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
426 // means that we are being called from the profiler, which can interrupt
427 // the VM with a signal at any arbitrary instruction, with essentially
428 // anything on the stack. So basically none of these checks are 100%
429 // reliable.
430 #if defined(USE_SIMULATOR)
431 MSAN_MEMORY_IS_INITIALIZED(
432 state->fp + StandardFrameConstants::kFunctionOffset, kPointerSize);
433 #endif
434 Object* maybe_function =
435 Memory::Object_at(state->fp + StandardFrameConstants::kFunctionOffset);
436 if (!marker->IsSmi()) {
437 if (maybe_function->IsSmi()) {
438 return NONE;
439 } else if (FLAG_ignition && IsInterpreterFramePc(iterator->isolate(),
440 *(state->pc_address))) {
441 return INTERPRETED;
442 } else {
443 return JAVA_SCRIPT;
444 }
445 }
446 } else {
447 // Look up the code object to figure out the type of the stack frame.
448 Code* code_obj =
449 GetContainingCode(iterator->isolate(), *(state->pc_address));
450 if (code_obj != nullptr) {
451 switch (code_obj->kind()) {
452 case Code::BUILTIN:
453 if (marker->IsSmi()) break;
454 if (code_obj->is_interpreter_trampoline_builtin()) {
455 return INTERPRETED;
456 }
457 if (code_obj->is_turbofanned()) {
458 // TODO(bmeurer): We treat frames for BUILTIN Code objects as
459 // OptimizedFrame for now (all the builtins with JavaScript
460 // linkage are actually generated with TurboFan currently, so
461 // this is sound).
462 return OPTIMIZED;
463 }
464 return BUILTIN;
465 case Code::FUNCTION:
466 return JAVA_SCRIPT;
467 case Code::OPTIMIZED_FUNCTION:
468 return OPTIMIZED;
469 case Code::WASM_FUNCTION:
470 return WASM;
471 case Code::WASM_TO_JS_FUNCTION:
472 return WASM_TO_JS;
473 case Code::JS_TO_WASM_FUNCTION:
474 return JS_TO_WASM;
475 default:
476 // All other types should have an explicit marker
477 break;
478 }
479 } else {
480 return NONE;
481 }
482 }
483
484 DCHECK(marker->IsSmi());
485 StackFrame::Type candidate =
486 static_cast<StackFrame::Type>(Smi::cast(marker)->value());
487 switch (candidate) {
488 case ENTRY:
489 case ENTRY_CONSTRUCT:
490 case EXIT:
491 case STUB:
492 case STUB_FAILURE_TRAMPOLINE:
493 case INTERNAL:
494 case CONSTRUCT:
495 case ARGUMENTS_ADAPTOR:
496 case WASM_TO_JS:
497 case WASM:
498 return candidate;
499 case JS_TO_WASM:
500 case JAVA_SCRIPT:
501 case OPTIMIZED:
502 case INTERPRETED:
503 default:
504 // Unoptimized and optimized JavaScript frames, including
505 // interpreted frames, should never have a StackFrame::Type
506 // marker. If we find one, we're likely being called from the
507 // profiler in a bogus stack frame.
508 return NONE;
509 }
510 }
511
512
513 #ifdef DEBUG
can_access_heap_objects() const514 bool StackFrame::can_access_heap_objects() const {
515 return iterator_->can_access_heap_objects_;
516 }
517 #endif
518
519
GetCallerState(State * state) const520 StackFrame::Type StackFrame::GetCallerState(State* state) const {
521 ComputeCallerState(state);
522 return ComputeType(iterator_, state);
523 }
524
525
UnpaddedFP() const526 Address StackFrame::UnpaddedFP() const {
527 return fp();
528 }
529
530
unchecked_code() const531 Code* EntryFrame::unchecked_code() const {
532 return isolate()->heap()->js_entry_code();
533 }
534
535
ComputeCallerState(State * state) const536 void EntryFrame::ComputeCallerState(State* state) const {
537 GetCallerState(state);
538 }
539
540
SetCallerFp(Address caller_fp)541 void EntryFrame::SetCallerFp(Address caller_fp) {
542 const int offset = EntryFrameConstants::kCallerFPOffset;
543 Memory::Address_at(this->fp() + offset) = caller_fp;
544 }
545
546
GetCallerState(State * state) const547 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
548 const int offset = EntryFrameConstants::kCallerFPOffset;
549 Address fp = Memory::Address_at(this->fp() + offset);
550 return ExitFrame::GetStateForFramePointer(fp, state);
551 }
552
553
unchecked_code() const554 Code* EntryConstructFrame::unchecked_code() const {
555 return isolate()->heap()->js_construct_entry_code();
556 }
557
558
code_slot() const559 Object*& ExitFrame::code_slot() const {
560 const int offset = ExitFrameConstants::kCodeOffset;
561 return Memory::Object_at(fp() + offset);
562 }
563
564
unchecked_code() const565 Code* ExitFrame::unchecked_code() const {
566 return reinterpret_cast<Code*>(code_slot());
567 }
568
569
ComputeCallerState(State * state) const570 void ExitFrame::ComputeCallerState(State* state) const {
571 // Set up the caller state.
572 state->sp = caller_sp();
573 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
574 state->pc_address = ResolveReturnAddressLocation(
575 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
576 if (FLAG_enable_embedded_constant_pool) {
577 state->constant_pool_address = reinterpret_cast<Address*>(
578 fp() + ExitFrameConstants::kConstantPoolOffset);
579 }
580 }
581
582
SetCallerFp(Address caller_fp)583 void ExitFrame::SetCallerFp(Address caller_fp) {
584 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
585 }
586
587
Iterate(ObjectVisitor * v) const588 void ExitFrame::Iterate(ObjectVisitor* v) const {
589 // The arguments are traversed as part of the expression stack of
590 // the calling frame.
591 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
592 v->VisitPointer(&code_slot());
593 }
594
595
GetCallerStackPointer() const596 Address ExitFrame::GetCallerStackPointer() const {
597 return fp() + ExitFrameConstants::kCallerSPOffset;
598 }
599
600
GetStateForFramePointer(Address fp,State * state)601 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
602 if (fp == 0) return NONE;
603 Address sp = ComputeStackPointer(fp);
604 FillState(fp, sp, state);
605 DCHECK(*state->pc_address != NULL);
606 return EXIT;
607 }
608
ComputeStackPointer(Address fp)609 Address ExitFrame::ComputeStackPointer(Address fp) {
610 #if defined(USE_SIMULATOR)
611 MSAN_MEMORY_IS_INITIALIZED(fp + ExitFrameConstants::kSPOffset, kPointerSize);
612 #endif
613 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
614 }
615
FillState(Address fp,Address sp,State * state)616 void ExitFrame::FillState(Address fp, Address sp, State* state) {
617 state->sp = sp;
618 state->fp = fp;
619 state->pc_address = ResolveReturnAddressLocation(
620 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
621 // The constant pool recorded in the exit frame is not associated
622 // with the pc in this state (the return address into a C entry
623 // stub). ComputeCallerState will retrieve the constant pool
624 // together with the associated caller pc.
625 state->constant_pool_address = NULL;
626 }
627
GetExpressionAddress(int n) const628 Address StandardFrame::GetExpressionAddress(int n) const {
629 const int offset = StandardFrameConstants::kExpressionsOffset;
630 return fp() + offset - n * kPointerSize;
631 }
632
GetExpressionAddress(int n) const633 Address InterpretedFrame::GetExpressionAddress(int n) const {
634 const int offset = InterpreterFrameConstants::kExpressionsOffset;
635 return fp() + offset - n * kPointerSize;
636 }
637
ComputeExpressionsCount() const638 int StandardFrame::ComputeExpressionsCount() const {
639 Address base = GetExpressionAddress(0);
640 Address limit = sp() - kPointerSize;
641 DCHECK(base >= limit); // stack grows downwards
642 // Include register-allocated locals in number of expressions.
643 return static_cast<int>((base - limit) / kPointerSize);
644 }
645
646
ComputeCallerState(State * state) const647 void StandardFrame::ComputeCallerState(State* state) const {
648 state->sp = caller_sp();
649 state->fp = caller_fp();
650 state->pc_address = ResolveReturnAddressLocation(
651 reinterpret_cast<Address*>(ComputePCAddress(fp())));
652 state->constant_pool_address =
653 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
654 }
655
656
SetCallerFp(Address caller_fp)657 void StandardFrame::SetCallerFp(Address caller_fp) {
658 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
659 caller_fp;
660 }
661
662
IterateCompiledFrame(ObjectVisitor * v) const663 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
664 // Make sure that we're not doing "safe" stack frame iteration. We cannot
665 // possibly find pointers in optimized frames in that state.
666 DCHECK(can_access_heap_objects());
667
668 // Compute the safepoint information.
669 unsigned stack_slots = 0;
670 SafepointEntry safepoint_entry;
671 Code* code = StackFrame::GetSafepointData(
672 isolate(), pc(), &safepoint_entry, &stack_slots);
673 unsigned slot_space = stack_slots * kPointerSize;
674
675 // Determine the fixed header and spill slot area size.
676 int frame_header_size = StandardFrameConstants::kFixedFrameSizeFromFp;
677 Object* marker =
678 Memory::Object_at(fp() + CommonFrameConstants::kContextOrFrameTypeOffset);
679 if (marker->IsSmi()) {
680 StackFrame::Type candidate =
681 static_cast<StackFrame::Type>(Smi::cast(marker)->value());
682 switch (candidate) {
683 case ENTRY:
684 case ENTRY_CONSTRUCT:
685 case EXIT:
686 case STUB_FAILURE_TRAMPOLINE:
687 case ARGUMENTS_ADAPTOR:
688 case STUB:
689 case INTERNAL:
690 case CONSTRUCT:
691 case JS_TO_WASM:
692 case WASM_TO_JS:
693 case WASM:
694 frame_header_size = TypedFrameConstants::kFixedFrameSizeFromFp;
695 break;
696 case JAVA_SCRIPT:
697 case OPTIMIZED:
698 case INTERPRETED:
699 case BUILTIN:
700 // These frame types have a context, but they are actually stored
701 // in the place on the stack that one finds the frame type.
702 UNREACHABLE();
703 break;
704 case NONE:
705 case NUMBER_OF_TYPES:
706 case MANUAL:
707 UNREACHABLE();
708 break;
709 }
710 }
711 slot_space -=
712 (frame_header_size + StandardFrameConstants::kFixedFrameSizeAboveFp);
713
714 Object** frame_header_base = &Memory::Object_at(fp() - frame_header_size);
715 Object** frame_header_limit =
716 &Memory::Object_at(fp() - StandardFrameConstants::kCPSlotSize);
717 Object** parameters_base = &Memory::Object_at(sp());
718 Object** parameters_limit = frame_header_base - slot_space / kPointerSize;
719
720 // Visit the parameters that may be on top of the saved registers.
721 if (safepoint_entry.argument_count() > 0) {
722 v->VisitPointers(parameters_base,
723 parameters_base + safepoint_entry.argument_count());
724 parameters_base += safepoint_entry.argument_count();
725 }
726
727 // Skip saved double registers.
728 if (safepoint_entry.has_doubles()) {
729 // Number of doubles not known at snapshot time.
730 DCHECK(!isolate()->serializer_enabled());
731 parameters_base += RegisterConfiguration::Crankshaft()
732 ->num_allocatable_double_registers() *
733 kDoubleSize / kPointerSize;
734 }
735
736 // Visit the registers that contain pointers if any.
737 if (safepoint_entry.HasRegisters()) {
738 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
739 if (safepoint_entry.HasRegisterAt(i)) {
740 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
741 v->VisitPointer(parameters_base + reg_stack_index);
742 }
743 }
744 // Skip the words containing the register values.
745 parameters_base += kNumSafepointRegisters;
746 }
747
748 // We're done dealing with the register bits.
749 uint8_t* safepoint_bits = safepoint_entry.bits();
750 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
751
752 // Visit the rest of the parameters.
753 if (!is_js_to_wasm() && !is_wasm()) {
754 // Non-WASM frames have tagged values as parameters.
755 v->VisitPointers(parameters_base, parameters_limit);
756 }
757
758 // Visit pointer spill slots and locals.
759 for (unsigned index = 0; index < stack_slots; index++) {
760 int byte_index = index >> kBitsPerByteLog2;
761 int bit_index = index & (kBitsPerByte - 1);
762 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
763 v->VisitPointer(parameters_limit + index);
764 }
765 }
766
767 // Visit the return address in the callee and incoming arguments.
768 IteratePc(v, pc_address(), constant_pool_address(), code);
769
770 if (!is_wasm() && !is_wasm_to_js()) {
771 // Visit the context in stub frame and JavaScript frame.
772 // Visit the function in JavaScript frame.
773 v->VisitPointers(frame_header_base, frame_header_limit);
774 }
775 }
776
777
Iterate(ObjectVisitor * v) const778 void StubFrame::Iterate(ObjectVisitor* v) const {
779 IterateCompiledFrame(v);
780 }
781
782
unchecked_code() const783 Code* StubFrame::unchecked_code() const {
784 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
785 }
786
787
GetCallerStackPointer() const788 Address StubFrame::GetCallerStackPointer() const {
789 return fp() + ExitFrameConstants::kCallerSPOffset;
790 }
791
792
GetNumberOfIncomingArguments() const793 int StubFrame::GetNumberOfIncomingArguments() const {
794 return 0;
795 }
796
797
Iterate(ObjectVisitor * v) const798 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
799 IterateCompiledFrame(v);
800 }
801
802
SetParameterValue(int index,Object * value) const803 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
804 Memory::Object_at(GetParameterSlot(index)) = value;
805 }
806
807
IsConstructor() const808 bool JavaScriptFrame::IsConstructor() const {
809 Address fp = caller_fp();
810 if (has_adapted_arguments()) {
811 // Skip the arguments adaptor frame and look at the real caller.
812 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
813 }
814 return IsConstructFrame(fp);
815 }
816
817
HasInlinedFrames() const818 bool JavaScriptFrame::HasInlinedFrames() const {
819 List<JSFunction*> functions(1);
820 GetFunctions(&functions);
821 return functions.length() > 1;
822 }
823
824
GetArgumentsLength() const825 int JavaScriptFrame::GetArgumentsLength() const {
826 // If there is an arguments adaptor frame get the arguments length from it.
827 if (has_adapted_arguments()) {
828 return ArgumentsAdaptorFrame::GetLength(caller_fp());
829 } else {
830 return GetNumberOfIncomingArguments();
831 }
832 }
833
834
unchecked_code() const835 Code* JavaScriptFrame::unchecked_code() const {
836 return function()->code();
837 }
838
839
GetNumberOfIncomingArguments() const840 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
841 DCHECK(can_access_heap_objects() &&
842 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
843
844 return function()->shared()->internal_formal_parameter_count();
845 }
846
847
GetCallerStackPointer() const848 Address JavaScriptFrame::GetCallerStackPointer() const {
849 return fp() + StandardFrameConstants::kCallerSPOffset;
850 }
851
852
GetFunctions(List<JSFunction * > * functions) const853 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) const {
854 DCHECK(functions->length() == 0);
855 functions->Add(function());
856 }
857
Summarize(List<FrameSummary> * functions,FrameSummary::Mode mode) const858 void JavaScriptFrame::Summarize(List<FrameSummary>* functions,
859 FrameSummary::Mode mode) const {
860 DCHECK(functions->length() == 0);
861 Code* code = LookupCode();
862 int offset = static_cast<int>(pc() - code->instruction_start());
863 AbstractCode* abstract_code = AbstractCode::cast(code);
864 FrameSummary summary(receiver(), function(), abstract_code, offset,
865 IsConstructor(), mode);
866 functions->Add(summary);
867 }
868
function() const869 JSFunction* JavaScriptFrame::function() const {
870 return JSFunction::cast(function_slot_object());
871 }
872
receiver() const873 Object* JavaScriptFrame::receiver() const { return GetParameter(-1); }
874
LookupExceptionHandlerInTable(int * stack_depth,HandlerTable::CatchPrediction * prediction)875 int JavaScriptFrame::LookupExceptionHandlerInTable(
876 int* stack_depth, HandlerTable::CatchPrediction* prediction) {
877 Code* code = LookupCode();
878 DCHECK(!code->is_optimized_code());
879 HandlerTable* table = HandlerTable::cast(code->handler_table());
880 int pc_offset = static_cast<int>(pc() - code->entry());
881 return table->LookupRange(pc_offset, stack_depth, prediction);
882 }
883
884
PrintFunctionAndOffset(JSFunction * function,Code * code,Address pc,FILE * file,bool print_line_number)885 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
886 Address pc, FILE* file,
887 bool print_line_number) {
888 PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
889 function->PrintName(file);
890 int code_offset = static_cast<int>(pc - code->instruction_start());
891 PrintF(file, "+%d", code_offset);
892 if (print_line_number) {
893 SharedFunctionInfo* shared = function->shared();
894 int source_pos = code->SourcePosition(code_offset);
895 Object* maybe_script = shared->script();
896 if (maybe_script->IsScript()) {
897 Script* script = Script::cast(maybe_script);
898 int line = script->GetLineNumber(source_pos) + 1;
899 Object* script_name_raw = script->name();
900 if (script_name_raw->IsString()) {
901 String* script_name = String::cast(script->name());
902 base::SmartArrayPointer<char> c_script_name =
903 script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
904 PrintF(file, " at %s:%d", c_script_name.get(), line);
905 } else {
906 PrintF(file, " at <unknown>:%d", line);
907 }
908 } else {
909 PrintF(file, " at <unknown>:<unknown>");
910 }
911 }
912 }
913
914
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)915 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
916 bool print_line_number) {
917 // constructor calls
918 DisallowHeapAllocation no_allocation;
919 JavaScriptFrameIterator it(isolate);
920 while (!it.done()) {
921 if (it.frame()->is_java_script()) {
922 JavaScriptFrame* frame = it.frame();
923 if (frame->IsConstructor()) PrintF(file, "new ");
924 PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
925 frame->pc(), file, print_line_number);
926 if (print_args) {
927 // function arguments
928 // (we are intentionally only printing the actually
929 // supplied parameters, not all parameters required)
930 PrintF(file, "(this=");
931 frame->receiver()->ShortPrint(file);
932 const int length = frame->ComputeParametersCount();
933 for (int i = 0; i < length; i++) {
934 PrintF(file, ", ");
935 frame->GetParameter(i)->ShortPrint(file);
936 }
937 PrintF(file, ")");
938 }
939 break;
940 }
941 it.Advance();
942 }
943 }
944
945
SaveOperandStack(FixedArray * store) const946 void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
947 int operands_count = store->length();
948 DCHECK_LE(operands_count, ComputeOperandsCount());
949 for (int i = 0; i < operands_count; i++) {
950 store->set(i, GetOperand(i));
951 }
952 }
953
954 namespace {
955
CannotDeoptFromAsmCode(Code * code,JSFunction * function)956 bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
957 return code->is_turbofanned() && function->shared()->asm_function() &&
958 !FLAG_turbo_asm_deoptimization;
959 }
960
961 } // namespace
962
FrameSummary(Object * receiver,JSFunction * function,AbstractCode * abstract_code,int code_offset,bool is_constructor,Mode mode)963 FrameSummary::FrameSummary(Object* receiver, JSFunction* function,
964 AbstractCode* abstract_code, int code_offset,
965 bool is_constructor, Mode mode)
966 : receiver_(receiver, function->GetIsolate()),
967 function_(function),
968 abstract_code_(abstract_code),
969 code_offset_(code_offset),
970 is_constructor_(is_constructor) {
971 DCHECK(abstract_code->IsBytecodeArray() ||
972 Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
973 CannotDeoptFromAsmCode(Code::cast(abstract_code), function) ||
974 mode == kApproximateSummary);
975 }
976
GetFirst(JavaScriptFrame * frame)977 FrameSummary FrameSummary::GetFirst(JavaScriptFrame* frame) {
978 List<FrameSummary> frames(FLAG_max_inlining_levels + 1);
979 frame->Summarize(&frames);
980 return frames.first();
981 }
982
Print()983 void FrameSummary::Print() {
984 PrintF("receiver: ");
985 receiver_->ShortPrint();
986 PrintF("\nfunction: ");
987 function_->shared()->DebugName()->ShortPrint();
988 PrintF("\ncode: ");
989 abstract_code_->ShortPrint();
990 if (abstract_code_->IsCode()) {
991 Code* code = abstract_code_->GetCode();
992 if (code->kind() == Code::FUNCTION) PrintF(" UNOPT ");
993 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
994 if (function()->shared()->asm_function()) {
995 DCHECK(CannotDeoptFromAsmCode(code, *function()));
996 PrintF(" ASM ");
997 } else {
998 PrintF(" OPT (approximate)");
999 }
1000 }
1001 } else {
1002 PrintF(" BYTECODE ");
1003 }
1004 PrintF("\npc: %d\n", code_offset_);
1005 }
1006
Summarize(List<FrameSummary> * frames,FrameSummary::Mode mode) const1007 void OptimizedFrame::Summarize(List<FrameSummary>* frames,
1008 FrameSummary::Mode mode) const {
1009 DCHECK(frames->length() == 0);
1010 DCHECK(is_optimized());
1011
1012 // Delegate to JS frame in absence of turbofan deoptimization.
1013 // TODO(turbofan): Revisit once we support deoptimization across the board.
1014 Code* code = LookupCode();
1015 if (code->kind() == Code::BUILTIN ||
1016 CannotDeoptFromAsmCode(code, function())) {
1017 return JavaScriptFrame::Summarize(frames);
1018 }
1019
1020 DisallowHeapAllocation no_gc;
1021 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1022 DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1023 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
1024 DCHECK(data == nullptr);
1025 if (mode == FrameSummary::kApproximateSummary) {
1026 return JavaScriptFrame::Summarize(frames, mode);
1027 }
1028 FATAL("Missing deoptimization information for OptimizedFrame::Summarize.");
1029 }
1030 FixedArray* const literal_array = data->LiteralArray();
1031
1032 TranslationIterator it(data->TranslationByteArray(),
1033 data->TranslationIndex(deopt_index)->value());
1034 Translation::Opcode frame_opcode =
1035 static_cast<Translation::Opcode>(it.Next());
1036 DCHECK_EQ(Translation::BEGIN, frame_opcode);
1037 it.Next(); // Drop frame count.
1038 int jsframe_count = it.Next();
1039
1040 // We create the summary in reverse order because the frames
1041 // in the deoptimization translation are ordered bottom-to-top.
1042 bool is_constructor = IsConstructor();
1043 while (jsframe_count != 0) {
1044 frame_opcode = static_cast<Translation::Opcode>(it.Next());
1045 if (frame_opcode == Translation::JS_FRAME ||
1046 frame_opcode == Translation::INTERPRETED_FRAME) {
1047 jsframe_count--;
1048 BailoutId const bailout_id = BailoutId(it.Next());
1049 SharedFunctionInfo* const shared_info =
1050 SharedFunctionInfo::cast(literal_array->get(it.Next()));
1051 it.Next(); // Skip height.
1052
1053 // The translation commands are ordered and the function is always
1054 // at the first position, and the receiver is next.
1055 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1056
1057 // Get the correct function in the optimized frame.
1058 JSFunction* function;
1059 if (opcode == Translation::LITERAL) {
1060 function = JSFunction::cast(literal_array->get(it.Next()));
1061 } else {
1062 CHECK_EQ(opcode, Translation::STACK_SLOT);
1063 function = JSFunction::cast(StackSlotAt(it.Next()));
1064 }
1065 DCHECK_EQ(shared_info, function->shared());
1066
1067 // If we are at a call, the receiver is always in a stack slot.
1068 // Otherwise we are not guaranteed to get the receiver value.
1069 opcode = static_cast<Translation::Opcode>(it.Next());
1070
1071 // Get the correct receiver in the optimized frame.
1072 Object* receiver;
1073 if (opcode == Translation::LITERAL) {
1074 receiver = literal_array->get(it.Next());
1075 } else if (opcode == Translation::STACK_SLOT) {
1076 receiver = StackSlotAt(it.Next());
1077 } else {
1078 // The receiver is not in a stack slot nor in a literal. We give up.
1079 it.Skip(Translation::NumberOfOperandsFor(opcode));
1080 // TODO(3029): Materializing a captured object (or duplicated
1081 // object) is hard, we return undefined for now. This breaks the
1082 // produced stack trace, as constructor frames aren't marked as
1083 // such anymore.
1084 receiver = isolate()->heap()->undefined_value();
1085 }
1086
1087 AbstractCode* abstract_code;
1088
1089 unsigned code_offset;
1090 if (frame_opcode == Translation::JS_FRAME) {
1091 Code* code = shared_info->code();
1092 DeoptimizationOutputData* const output_data =
1093 DeoptimizationOutputData::cast(code->deoptimization_data());
1094 unsigned const entry =
1095 Deoptimizer::GetOutputInfo(output_data, bailout_id, shared_info);
1096 code_offset = FullCodeGenerator::PcField::decode(entry);
1097 abstract_code = AbstractCode::cast(code);
1098 } else {
1099 DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
1100 // BailoutId points to the next bytecode in the bytecode aray. Subtract
1101 // 1 to get the end of current bytecode.
1102 code_offset = bailout_id.ToInt() - 1;
1103 abstract_code = AbstractCode::cast(shared_info->bytecode_array());
1104 }
1105 FrameSummary summary(receiver, function, abstract_code, code_offset,
1106 is_constructor);
1107 frames->Add(summary);
1108 is_constructor = false;
1109 } else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) {
1110 // The next encountered JS_FRAME will be marked as a constructor call.
1111 it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1112 DCHECK(!is_constructor);
1113 is_constructor = true;
1114 } else {
1115 // Skip over operands to advance to the next opcode.
1116 it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1117 }
1118 }
1119 DCHECK(!is_constructor);
1120 }
1121
1122
LookupExceptionHandlerInTable(int * stack_slots,HandlerTable::CatchPrediction * prediction)1123 int OptimizedFrame::LookupExceptionHandlerInTable(
1124 int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1125 Code* code = LookupCode();
1126 HandlerTable* table = HandlerTable::cast(code->handler_table());
1127 int pc_offset = static_cast<int>(pc() - code->entry());
1128 if (stack_slots) *stack_slots = code->stack_slots();
1129 return table->LookupReturn(pc_offset, prediction);
1130 }
1131
1132
GetDeoptimizationData(int * deopt_index) const1133 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1134 int* deopt_index) const {
1135 DCHECK(is_optimized());
1136
1137 JSFunction* opt_function = function();
1138 Code* code = opt_function->code();
1139
1140 // The code object may have been replaced by lazy deoptimization. Fall
1141 // back to a slow search in this case to find the original optimized
1142 // code object.
1143 if (!code->contains(pc())) {
1144 code = isolate()->inner_pointer_to_code_cache()->
1145 GcSafeFindCodeForInnerPointer(pc());
1146 }
1147 DCHECK(code != NULL);
1148 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1149
1150 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1151 *deopt_index = safepoint_entry.deoptimization_index();
1152 if (*deopt_index != Safepoint::kNoDeoptimizationIndex) {
1153 return DeoptimizationInputData::cast(code->deoptimization_data());
1154 }
1155 return nullptr;
1156 }
1157
1158
GetFunctions(List<JSFunction * > * functions) const1159 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) const {
1160 DCHECK(functions->length() == 0);
1161 DCHECK(is_optimized());
1162
1163 // Delegate to JS frame in absence of turbofan deoptimization.
1164 // TODO(turbofan): Revisit once we support deoptimization across the board.
1165 Code* code = LookupCode();
1166 if (code->kind() == Code::BUILTIN ||
1167 CannotDeoptFromAsmCode(code, function())) {
1168 return JavaScriptFrame::GetFunctions(functions);
1169 }
1170
1171 DisallowHeapAllocation no_gc;
1172 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1173 DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1174 DCHECK_NOT_NULL(data);
1175 DCHECK_NE(Safepoint::kNoDeoptimizationIndex, deopt_index);
1176 FixedArray* const literal_array = data->LiteralArray();
1177
1178 TranslationIterator it(data->TranslationByteArray(),
1179 data->TranslationIndex(deopt_index)->value());
1180 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1181 DCHECK_EQ(Translation::BEGIN, opcode);
1182 it.Next(); // Skip frame count.
1183 int jsframe_count = it.Next();
1184
1185 // We insert the frames in reverse order because the frames
1186 // in the deoptimization translation are ordered bottom-to-top.
1187 while (jsframe_count != 0) {
1188 opcode = static_cast<Translation::Opcode>(it.Next());
1189 // Skip over operands to advance to the next opcode.
1190 it.Skip(Translation::NumberOfOperandsFor(opcode));
1191 if (opcode == Translation::JS_FRAME ||
1192 opcode == Translation::INTERPRETED_FRAME) {
1193 jsframe_count--;
1194
1195 // The translation commands are ordered and the function is always at the
1196 // first position.
1197 opcode = static_cast<Translation::Opcode>(it.Next());
1198
1199 // Get the correct function in the optimized frame.
1200 Object* function;
1201 if (opcode == Translation::LITERAL) {
1202 function = literal_array->get(it.Next());
1203 } else {
1204 CHECK_EQ(Translation::STACK_SLOT, opcode);
1205 function = StackSlotAt(it.Next());
1206 }
1207 functions->Add(JSFunction::cast(function));
1208 }
1209 }
1210 }
1211
1212
StackSlotOffsetRelativeToFp(int slot_index)1213 int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
1214 return StandardFrameConstants::kCallerSPOffset -
1215 ((slot_index + 1) * kPointerSize);
1216 }
1217
1218
StackSlotAt(int index) const1219 Object* OptimizedFrame::StackSlotAt(int index) const {
1220 return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
1221 }
1222
LookupExceptionHandlerInTable(int * context_register,HandlerTable::CatchPrediction * prediction)1223 int InterpretedFrame::LookupExceptionHandlerInTable(
1224 int* context_register, HandlerTable::CatchPrediction* prediction) {
1225 BytecodeArray* bytecode = function()->shared()->bytecode_array();
1226 HandlerTable* table = HandlerTable::cast(bytecode->handler_table());
1227 int pc_offset = GetBytecodeOffset() + 1; // Point after current bytecode.
1228 return table->LookupRange(pc_offset, context_register, prediction);
1229 }
1230
GetBytecodeOffset() const1231 int InterpretedFrame::GetBytecodeOffset() const {
1232 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1233 DCHECK_EQ(
1234 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1235 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1236 int raw_offset = Smi::cast(GetExpression(index))->value();
1237 return raw_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
1238 }
1239
PatchBytecodeOffset(int new_offset)1240 void InterpretedFrame::PatchBytecodeOffset(int new_offset) {
1241 const int index = InterpreterFrameConstants::kBytecodeOffsetExpressionIndex;
1242 DCHECK_EQ(
1243 InterpreterFrameConstants::kBytecodeOffsetFromFp,
1244 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1245 int raw_offset = new_offset + BytecodeArray::kHeaderSize - kHeapObjectTag;
1246 SetExpression(index, Smi::FromInt(raw_offset));
1247 }
1248
GetBytecodeArray() const1249 BytecodeArray* InterpretedFrame::GetBytecodeArray() const {
1250 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1251 DCHECK_EQ(
1252 InterpreterFrameConstants::kBytecodeArrayFromFp,
1253 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1254 return BytecodeArray::cast(GetExpression(index));
1255 }
1256
PatchBytecodeArray(BytecodeArray * bytecode_array)1257 void InterpretedFrame::PatchBytecodeArray(BytecodeArray* bytecode_array) {
1258 const int index = InterpreterFrameConstants::kBytecodeArrayExpressionIndex;
1259 DCHECK_EQ(
1260 InterpreterFrameConstants::kBytecodeArrayFromFp,
1261 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1262 SetExpression(index, bytecode_array);
1263 }
1264
ReadInterpreterRegister(int register_index) const1265 Object* InterpretedFrame::ReadInterpreterRegister(int register_index) const {
1266 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1267 DCHECK_EQ(
1268 InterpreterFrameConstants::kRegisterFileFromFp,
1269 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1270 return GetExpression(index + register_index);
1271 }
1272
WriteInterpreterRegister(int register_index,Object * value)1273 void InterpretedFrame::WriteInterpreterRegister(int register_index,
1274 Object* value) {
1275 const int index = InterpreterFrameConstants::kRegisterFileExpressionIndex;
1276 DCHECK_EQ(
1277 InterpreterFrameConstants::kRegisterFileFromFp,
1278 InterpreterFrameConstants::kExpressionsOffset - index * kPointerSize);
1279 return SetExpression(index + register_index, value);
1280 }
1281
Summarize(List<FrameSummary> * functions,FrameSummary::Mode mode) const1282 void InterpretedFrame::Summarize(List<FrameSummary>* functions,
1283 FrameSummary::Mode mode) const {
1284 DCHECK(functions->length() == 0);
1285 AbstractCode* abstract_code =
1286 AbstractCode::cast(function()->shared()->bytecode_array());
1287 FrameSummary summary(receiver(), function(), abstract_code,
1288 GetBytecodeOffset(), IsConstructor());
1289 functions->Add(summary);
1290 }
1291
GetNumberOfIncomingArguments() const1292 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1293 return Smi::cast(GetExpression(0))->value();
1294 }
1295
GetLength(Address fp)1296 int ArgumentsAdaptorFrame::GetLength(Address fp) {
1297 const int offset = ArgumentsAdaptorFrameConstants::kLengthOffset;
1298 return Smi::cast(Memory::Object_at(fp + offset))->value();
1299 }
1300
unchecked_code() const1301 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1302 return isolate()->builtins()->builtin(
1303 Builtins::kArgumentsAdaptorTrampoline);
1304 }
1305
Print(StringStream * accumulator,PrintMode mode,int index) const1306 void BuiltinFrame::Print(StringStream* accumulator, PrintMode mode,
1307 int index) const {
1308 // TODO(bmeurer)
1309 }
1310
GetNumberOfIncomingArguments() const1311 int BuiltinFrame::GetNumberOfIncomingArguments() const {
1312 return Smi::cast(GetExpression(0))->value();
1313 }
1314
GetCallerStackPointer() const1315 Address InternalFrame::GetCallerStackPointer() const {
1316 // Internal frames have no arguments. The stack pointer of the
1317 // caller is at a fixed offset from the frame pointer.
1318 return fp() + StandardFrameConstants::kCallerSPOffset;
1319 }
1320
unchecked_code() const1321 Code* InternalFrame::unchecked_code() const {
1322 const int offset = InternalFrameConstants::kCodeOffset;
1323 Object* code = Memory::Object_at(fp() + offset);
1324 DCHECK(code != NULL);
1325 return reinterpret_cast<Code*>(code);
1326 }
1327
1328
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1329 void StackFrame::PrintIndex(StringStream* accumulator,
1330 PrintMode mode,
1331 int index) {
1332 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1333 }
1334
Print(StringStream * accumulator,PrintMode mode,int index) const1335 void WasmFrame::Print(StringStream* accumulator, PrintMode mode,
1336 int index) const {
1337 accumulator->Add("wasm frame");
1338 }
1339
unchecked_code() const1340 Code* WasmFrame::unchecked_code() const {
1341 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
1342 }
1343
Iterate(ObjectVisitor * v) const1344 void WasmFrame::Iterate(ObjectVisitor* v) const { IterateCompiledFrame(v); }
1345
GetCallerStackPointer() const1346 Address WasmFrame::GetCallerStackPointer() const {
1347 return fp() + ExitFrameConstants::kCallerSPOffset;
1348 }
1349
wasm_obj()1350 Object* WasmFrame::wasm_obj() {
1351 FixedArray* deopt_data = LookupCode()->deoptimization_data();
1352 DCHECK(deopt_data->length() == 2);
1353 return deopt_data->get(0);
1354 }
1355
function_index()1356 uint32_t WasmFrame::function_index() {
1357 FixedArray* deopt_data = LookupCode()->deoptimization_data();
1358 DCHECK(deopt_data->length() == 2);
1359 Object* func_index_obj = deopt_data->get(1);
1360 if (func_index_obj->IsUndefined(isolate())) return static_cast<uint32_t>(-1);
1361 if (func_index_obj->IsSmi()) return Smi::cast(func_index_obj)->value();
1362 DCHECK(func_index_obj->IsHeapNumber());
1363 uint32_t val = static_cast<uint32_t>(-1);
1364 func_index_obj->ToUint32(&val);
1365 DCHECK(val != static_cast<uint32_t>(-1));
1366 return val;
1367 }
1368
1369 namespace {
1370
1371
PrintFunctionSource(StringStream * accumulator,SharedFunctionInfo * shared,Code * code)1372 void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
1373 Code* code) {
1374 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1375 std::ostringstream os;
1376 os << "--------- s o u r c e c o d e ---------\n"
1377 << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1378 << "\n-----------------------------------------\n";
1379 accumulator->Add(os.str().c_str());
1380 }
1381 }
1382
1383
1384 } // namespace
1385
1386
Print(StringStream * accumulator,PrintMode mode,int index) const1387 void JavaScriptFrame::Print(StringStream* accumulator,
1388 PrintMode mode,
1389 int index) const {
1390 DisallowHeapAllocation no_gc;
1391 Object* receiver = this->receiver();
1392 JSFunction* function = this->function();
1393
1394 accumulator->PrintSecurityTokenIfChanged(function);
1395 PrintIndex(accumulator, mode, index);
1396 Code* code = NULL;
1397 if (IsConstructor()) accumulator->Add("new ");
1398 accumulator->PrintFunction(function, receiver, &code);
1399
1400 // Get scope information for nicer output, if possible. If code is NULL, or
1401 // doesn't contain scope info, scope_info will return 0 for the number of
1402 // parameters, stack local variables, context local variables, stack slots,
1403 // or context slots.
1404 SharedFunctionInfo* shared = function->shared();
1405 ScopeInfo* scope_info = shared->scope_info();
1406 Object* script_obj = shared->script();
1407 if (script_obj->IsScript()) {
1408 Script* script = Script::cast(script_obj);
1409 accumulator->Add(" [");
1410 accumulator->PrintName(script->name());
1411
1412 Address pc = this->pc();
1413 if (code != NULL && code->kind() == Code::FUNCTION &&
1414 pc >= code->instruction_start() && pc < code->instruction_end()) {
1415 int offset = static_cast<int>(pc - code->instruction_start());
1416 int source_pos = code->SourcePosition(offset);
1417 int line = script->GetLineNumber(source_pos) + 1;
1418 accumulator->Add(":%d] [pc=%p]", line, pc);
1419 } else if (is_interpreted()) {
1420 const InterpretedFrame* iframe =
1421 reinterpret_cast<const InterpretedFrame*>(this);
1422 BytecodeArray* bytecodes = iframe->GetBytecodeArray();
1423 int offset = iframe->GetBytecodeOffset();
1424 int source_pos = bytecodes->SourcePosition(offset);
1425 int line = script->GetLineNumber(source_pos) + 1;
1426 accumulator->Add(":%d] [bytecode=%p offset=%d]", line, bytecodes, offset);
1427 } else {
1428 int function_start_pos = shared->start_position();
1429 int line = script->GetLineNumber(function_start_pos) + 1;
1430 accumulator->Add(":~%d] [pc=%p]", line, pc);
1431 }
1432 }
1433
1434 accumulator->Add("(this=%o", receiver);
1435
1436 // Print the parameters.
1437 int parameters_count = ComputeParametersCount();
1438 for (int i = 0; i < parameters_count; i++) {
1439 accumulator->Add(",");
1440 // If we have a name for the parameter we print it. Nameless
1441 // parameters are either because we have more actual parameters
1442 // than formal parameters or because we have no scope information.
1443 if (i < scope_info->ParameterCount()) {
1444 accumulator->PrintName(scope_info->ParameterName(i));
1445 accumulator->Add("=");
1446 }
1447 accumulator->Add("%o", GetParameter(i));
1448 }
1449
1450 accumulator->Add(")");
1451 if (mode == OVERVIEW) {
1452 accumulator->Add("\n");
1453 return;
1454 }
1455 if (is_optimized()) {
1456 accumulator->Add(" {\n// optimized frame\n");
1457 PrintFunctionSource(accumulator, shared, code);
1458 accumulator->Add("}\n");
1459 return;
1460 }
1461 accumulator->Add(" {\n");
1462
1463 // Compute the number of locals and expression stack elements.
1464 int stack_locals_count = scope_info->StackLocalCount();
1465 int heap_locals_count = scope_info->ContextLocalCount();
1466 int expressions_count = ComputeExpressionsCount();
1467
1468 // Print stack-allocated local variables.
1469 if (stack_locals_count > 0) {
1470 accumulator->Add(" // stack-allocated locals\n");
1471 }
1472 for (int i = 0; i < stack_locals_count; i++) {
1473 accumulator->Add(" var ");
1474 accumulator->PrintName(scope_info->StackLocalName(i));
1475 accumulator->Add(" = ");
1476 if (i < expressions_count) {
1477 accumulator->Add("%o", GetExpression(i));
1478 } else {
1479 accumulator->Add("// no expression found - inconsistent frame?");
1480 }
1481 accumulator->Add("\n");
1482 }
1483
1484 // Try to get hold of the context of this frame.
1485 Context* context = NULL;
1486 if (this->context() != NULL && this->context()->IsContext()) {
1487 context = Context::cast(this->context());
1488 }
1489 while (context->IsWithContext()) {
1490 context = context->previous();
1491 DCHECK(context != NULL);
1492 }
1493
1494 // Print heap-allocated local variables.
1495 if (heap_locals_count > 0) {
1496 accumulator->Add(" // heap-allocated locals\n");
1497 }
1498 for (int i = 0; i < heap_locals_count; i++) {
1499 accumulator->Add(" var ");
1500 accumulator->PrintName(scope_info->ContextLocalName(i));
1501 accumulator->Add(" = ");
1502 if (context != NULL) {
1503 int index = Context::MIN_CONTEXT_SLOTS + i;
1504 if (index < context->length()) {
1505 accumulator->Add("%o", context->get(index));
1506 } else {
1507 accumulator->Add(
1508 "// warning: missing context slot - inconsistent frame?");
1509 }
1510 } else {
1511 accumulator->Add("// warning: no context found - inconsistent frame?");
1512 }
1513 accumulator->Add("\n");
1514 }
1515
1516 // Print the expression stack.
1517 int expressions_start = stack_locals_count;
1518 if (expressions_start < expressions_count) {
1519 accumulator->Add(" // expression stack (top to bottom)\n");
1520 }
1521 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1522 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1523 }
1524
1525 PrintFunctionSource(accumulator, shared, code);
1526
1527 accumulator->Add("}\n\n");
1528 }
1529
1530
Print(StringStream * accumulator,PrintMode mode,int index) const1531 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1532 PrintMode mode,
1533 int index) const {
1534 int actual = ComputeParametersCount();
1535 int expected = -1;
1536 JSFunction* function = this->function();
1537 expected = function->shared()->internal_formal_parameter_count();
1538
1539 PrintIndex(accumulator, mode, index);
1540 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1541 if (mode == OVERVIEW) {
1542 accumulator->Add("\n");
1543 return;
1544 }
1545 accumulator->Add(" {\n");
1546
1547 // Print actual arguments.
1548 if (actual > 0) accumulator->Add(" // actual arguments\n");
1549 for (int i = 0; i < actual; i++) {
1550 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1551 if (expected != -1 && i >= expected) {
1552 accumulator->Add(" // not passed to callee");
1553 }
1554 accumulator->Add("\n");
1555 }
1556
1557 accumulator->Add("}\n\n");
1558 }
1559
1560
Iterate(ObjectVisitor * v) const1561 void EntryFrame::Iterate(ObjectVisitor* v) const {
1562 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1563 }
1564
1565
IterateExpressions(ObjectVisitor * v) const1566 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1567 const int offset = StandardFrameConstants::kLastObjectOffset;
1568 Object** base = &Memory::Object_at(sp());
1569 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1570 v->VisitPointers(base, limit);
1571 }
1572
1573
Iterate(ObjectVisitor * v) const1574 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1575 IterateExpressions(v);
1576 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1577 }
1578
Iterate(ObjectVisitor * v) const1579 void InternalFrame::Iterate(ObjectVisitor* v) const {
1580 // Internal frames only have object pointers on the expression stack
1581 // as they never have any arguments.
1582 IterateExpressions(v);
1583 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1584 }
1585
1586
Iterate(ObjectVisitor * v) const1587 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1588 Object** base = &Memory::Object_at(sp());
1589 Object** limit = &Memory::Object_at(
1590 fp() + StubFailureTrampolineFrameConstants::kFixedHeaderBottomOffset);
1591 v->VisitPointers(base, limit);
1592 base = &Memory::Object_at(fp() + StandardFrameConstants::kFunctionOffset);
1593 const int offset = StandardFrameConstants::kLastObjectOffset;
1594 limit = &Memory::Object_at(fp() + offset) + 1;
1595 v->VisitPointers(base, limit);
1596 IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1597 }
1598
1599
GetCallerStackPointer() const1600 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1601 return fp() + StandardFrameConstants::kCallerSPOffset;
1602 }
1603
1604
unchecked_code() const1605 Code* StubFailureTrampolineFrame::unchecked_code() const {
1606 Code* trampoline;
1607 StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1608 FindCodeInCache(&trampoline);
1609 if (trampoline->contains(pc())) {
1610 return trampoline;
1611 }
1612
1613 StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1614 FindCodeInCache(&trampoline);
1615 if (trampoline->contains(pc())) {
1616 return trampoline;
1617 }
1618
1619 UNREACHABLE();
1620 return NULL;
1621 }
1622
1623
1624 // -------------------------------------------------------------------------
1625
1626
FindJavaScriptFrame(int n)1627 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1628 DCHECK(n >= 0);
1629 for (int i = 0; i <= n; i++) {
1630 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1631 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1632 iterator_.Advance();
1633 }
1634 UNREACHABLE();
1635 return NULL;
1636 }
1637
1638
1639 // -------------------------------------------------------------------------
1640
1641
GcSafeMapOfCodeSpaceObject(HeapObject * object)1642 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1643 MapWord map_word = object->map_word();
1644 return map_word.IsForwardingAddress() ?
1645 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1646 }
1647
1648
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1649 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1650 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1651 }
1652
1653
1654 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1655 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1656 Map* map = GcSafeMapOfCodeSpaceObject(code);
1657 DCHECK(map == code->GetHeap()->code_map());
1658 Address start = code->address();
1659 Address end = code->address() + code->SizeFromMap(map);
1660 return start <= addr && addr < end;
1661 }
1662 #endif
1663
1664
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1665 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1666 Address inner_pointer) {
1667 Code* code = reinterpret_cast<Code*>(object);
1668 DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1669 return code;
1670 }
1671
1672
GcSafeFindCodeForInnerPointer(Address inner_pointer)1673 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1674 Address inner_pointer) {
1675 Heap* heap = isolate_->heap();
1676
1677 // Check if the inner pointer points into a large object chunk.
1678 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1679 if (large_page != NULL) {
1680 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1681 }
1682
1683 if (!heap->code_space()->Contains(inner_pointer)) {
1684 return nullptr;
1685 }
1686
1687 // Iterate through the page until we reach the end or find an object starting
1688 // after the inner pointer.
1689 Page* page = Page::FromAddress(inner_pointer);
1690
1691 DCHECK_EQ(page->owner(), heap->code_space());
1692 heap->mark_compact_collector()->sweeper().SweepOrWaitUntilSweepingCompleted(
1693 page);
1694
1695 Address addr = page->skip_list()->StartFor(inner_pointer);
1696
1697 Address top = heap->code_space()->top();
1698 Address limit = heap->code_space()->limit();
1699
1700 while (true) {
1701 if (addr == top && addr != limit) {
1702 addr = limit;
1703 continue;
1704 }
1705
1706 HeapObject* obj = HeapObject::FromAddress(addr);
1707 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1708 Address next_addr = addr + obj_size;
1709 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1710 addr = next_addr;
1711 }
1712 }
1713
1714
1715 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1716 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1717 isolate_->counters()->pc_to_code()->Increment();
1718 DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1719 uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer),
1720 v8::internal::kZeroHashSeed);
1721 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1722 InnerPointerToCodeCacheEntry* entry = cache(index);
1723 if (entry->inner_pointer == inner_pointer) {
1724 isolate_->counters()->pc_to_code_cached()->Increment();
1725 DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1726 } else {
1727 // Because this code may be interrupted by a profiling signal that
1728 // also queries the cache, we cannot update inner_pointer before the code
1729 // has been set. Otherwise, we risk trying to use a cache entry before
1730 // the code has been computed.
1731 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1732 entry->safepoint_entry.Reset();
1733 entry->inner_pointer = inner_pointer;
1734 }
1735 return entry;
1736 }
1737
1738
1739 // -------------------------------------------------------------------------
1740
1741
NumRegs(RegList reglist)1742 int NumRegs(RegList reglist) { return base::bits::CountPopulation(reglist); }
1743
1744
1745 struct JSCallerSavedCodeData {
1746 int reg_code[kNumJSCallerSaved];
1747 };
1748
1749 JSCallerSavedCodeData caller_saved_code_data;
1750
SetUpJSCallerSavedCodeData()1751 void SetUpJSCallerSavedCodeData() {
1752 int i = 0;
1753 for (int r = 0; r < kNumRegs; r++)
1754 if ((kJSCallerSaved & (1 << r)) != 0)
1755 caller_saved_code_data.reg_code[i++] = r;
1756
1757 DCHECK(i == kNumJSCallerSaved);
1758 }
1759
1760
JSCallerSavedCode(int n)1761 int JSCallerSavedCode(int n) {
1762 DCHECK(0 <= n && n < kNumJSCallerSaved);
1763 return caller_saved_code_data.reg_code[n];
1764 }
1765
1766
1767 #define DEFINE_WRAPPER(type, field) \
1768 class field##_Wrapper : public ZoneObject { \
1769 public: /* NOLINT */ \
1770 field##_Wrapper(const field& original) : frame_(original) { \
1771 } \
1772 field frame_; \
1773 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1774 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1775 #undef DEFINE_WRAPPER
1776
1777 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1778 #define FRAME_TYPE_CASE(type, field) \
1779 case StackFrame::type: { \
1780 field##_Wrapper* wrapper = \
1781 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1782 return &wrapper->frame_; \
1783 }
1784
1785 switch (frame->type()) {
1786 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1787 default: UNREACHABLE();
1788 }
1789 #undef FRAME_TYPE_CASE
1790 return NULL;
1791 }
1792
1793
CreateStackMap(Isolate * isolate,Zone * zone)1794 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1795 ZoneList<StackFrame*> list(10, zone);
1796 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1797 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1798 list.Add(frame, zone);
1799 }
1800 return list.ToVector();
1801 }
1802
1803
1804 } // namespace internal
1805 } // namespace v8
1806