• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/frames.h"
6 
7 #include <sstream>
8 
9 #include "src/ast/ast.h"
10 #include "src/ast/scopeinfo.h"
11 #include "src/base/bits.h"
12 #include "src/deoptimizer.h"
13 #include "src/frames-inl.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/register-configuration.h"
16 #include "src/safepoint-table.h"
17 #include "src/string-stream.h"
18 #include "src/vm-state-inl.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 ReturnAddressLocationResolver
24     StackFrame::return_address_location_resolver_ = NULL;
25 
26 
27 // Iterator that supports traversing the stack handlers of a
28 // particular frame. Needs to know the top of the handler chain.
29 class StackHandlerIterator BASE_EMBEDDED {
30  public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)31   StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
32       : limit_(frame->fp()), handler_(handler) {
33     // Make sure the handler has already been unwound to this frame.
34     DCHECK(frame->sp() <= handler->address());
35   }
36 
handler() const37   StackHandler* handler() const { return handler_; }
38 
done()39   bool done() {
40     return handler_ == NULL || handler_->address() > limit_;
41   }
Advance()42   void Advance() {
43     DCHECK(!done());
44     handler_ = handler_->next();
45   }
46 
47  private:
48   const Address limit_;
49   StackHandler* handler_;
50 };
51 
52 
53 // -------------------------------------------------------------------------
54 
55 
56 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)57 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
58                                                bool can_access_heap_objects)
59     : isolate_(isolate),
60       STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
61       frame_(NULL), handler_(NULL),
62       can_access_heap_objects_(can_access_heap_objects) {
63 }
64 #undef INITIALIZE_SINGLETON
65 
66 
StackFrameIterator(Isolate * isolate)67 StackFrameIterator::StackFrameIterator(Isolate* isolate)
68     : StackFrameIteratorBase(isolate, true) {
69   Reset(isolate->thread_local_top());
70 }
71 
72 
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)73 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
74     : StackFrameIteratorBase(isolate, true) {
75   Reset(t);
76 }
77 
78 
Advance()79 void StackFrameIterator::Advance() {
80   DCHECK(!done());
81   // Compute the state of the calling frame before restoring
82   // callee-saved registers and unwinding handlers. This allows the
83   // frame code that computes the caller state to access the top
84   // handler and the value of any callee-saved register if needed.
85   StackFrame::State state;
86   StackFrame::Type type = frame_->GetCallerState(&state);
87 
88   // Unwind handlers corresponding to the current frame.
89   StackHandlerIterator it(frame_, handler_);
90   while (!it.done()) it.Advance();
91   handler_ = it.handler();
92 
93   // Advance to the calling frame.
94   frame_ = SingletonFor(type, &state);
95 
96   // When we're done iterating over the stack frames, the handler
97   // chain must have been completely unwound.
98   DCHECK(!done() || handler_ == NULL);
99 }
100 
101 
Reset(ThreadLocalTop * top)102 void StackFrameIterator::Reset(ThreadLocalTop* top) {
103   StackFrame::State state;
104   StackFrame::Type type = ExitFrame::GetStateForFramePointer(
105       Isolate::c_entry_fp(top), &state);
106   handler_ = StackHandler::FromAddress(Isolate::handler(top));
107   if (SingletonFor(type) == NULL) return;
108   frame_ = SingletonFor(type, &state);
109 }
110 
111 
SingletonFor(StackFrame::Type type,StackFrame::State * state)112 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
113                                              StackFrame::State* state) {
114   if (type == StackFrame::NONE) return NULL;
115   StackFrame* result = SingletonFor(type);
116   DCHECK(result != NULL);
117   result->state_ = *state;
118   return result;
119 }
120 
121 
SingletonFor(StackFrame::Type type)122 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
123 #define FRAME_TYPE_CASE(type, field) \
124   case StackFrame::type: result = &field##_; break;
125 
126   StackFrame* result = NULL;
127   switch (type) {
128     case StackFrame::NONE: return NULL;
129     STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
130     default: break;
131   }
132   return result;
133 
134 #undef FRAME_TYPE_CASE
135 }
136 
137 
138 // -------------------------------------------------------------------------
139 
140 
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)141 JavaScriptFrameIterator::JavaScriptFrameIterator(
142     Isolate* isolate, StackFrame::Id id)
143     : iterator_(isolate) {
144   while (!done()) {
145     Advance();
146     if (frame()->id() == id) return;
147   }
148 }
149 
150 
Advance()151 void JavaScriptFrameIterator::Advance() {
152   do {
153     iterator_.Advance();
154   } while (!iterator_.done() && !iterator_.frame()->is_java_script());
155 }
156 
157 
AdvanceToArgumentsFrame()158 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
159   if (!frame()->has_adapted_arguments()) return;
160   iterator_.Advance();
161   DCHECK(iterator_.frame()->is_arguments_adaptor());
162 }
163 
164 
165 // -------------------------------------------------------------------------
166 
167 
StackTraceFrameIterator(Isolate * isolate)168 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
169     : JavaScriptFrameIterator(isolate) {
170   if (!done() && !IsValidFrame()) Advance();
171 }
172 
173 
Advance()174 void StackTraceFrameIterator::Advance() {
175   while (true) {
176     JavaScriptFrameIterator::Advance();
177     if (done()) return;
178     if (IsValidFrame()) return;
179   }
180 }
181 
182 
IsValidFrame()183 bool StackTraceFrameIterator::IsValidFrame() {
184     if (!frame()->function()->IsJSFunction()) return false;
185     Object* script = frame()->function()->shared()->script();
186     // Don't show functions from native scripts to user.
187     return (script->IsScript() &&
188             Script::TYPE_NATIVE != Script::cast(script)->type());
189 }
190 
191 
192 // -------------------------------------------------------------------------
193 
194 
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)195 SafeStackFrameIterator::SafeStackFrameIterator(
196     Isolate* isolate,
197     Address fp, Address sp, Address js_entry_sp)
198     : StackFrameIteratorBase(isolate, false),
199       low_bound_(sp),
200       high_bound_(js_entry_sp),
201       top_frame_type_(StackFrame::NONE),
202       external_callback_scope_(isolate->external_callback_scope()) {
203   StackFrame::State state;
204   StackFrame::Type type;
205   ThreadLocalTop* top = isolate->thread_local_top();
206   if (IsValidTop(top)) {
207     type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
208     top_frame_type_ = type;
209   } else if (IsValidStackAddress(fp)) {
210     DCHECK(fp != NULL);
211     state.fp = fp;
212     state.sp = sp;
213     state.pc_address = StackFrame::ResolveReturnAddressLocation(
214         reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
215     // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
216     // we check only that kMarkerOffset is within the stack bounds and do
217     // compile time check that kContextOffset slot is pushed on the stack before
218     // kMarkerOffset.
219     STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
220                   StandardFrameConstants::kContextOffset);
221     Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
222     if (IsValidStackAddress(frame_marker)) {
223       type = StackFrame::ComputeType(this, &state);
224       top_frame_type_ = type;
225     } else {
226       // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
227       // The frame anyways will be skipped.
228       type = StackFrame::JAVA_SCRIPT;
229       // Top frame is incomplete so we cannot reliably determine its type.
230       top_frame_type_ = StackFrame::NONE;
231     }
232   } else {
233     return;
234   }
235   if (SingletonFor(type) == NULL) return;
236   frame_ = SingletonFor(type, &state);
237   if (frame_ == NULL) return;
238 
239   Advance();
240 
241   if (frame_ != NULL && !frame_->is_exit() &&
242       external_callback_scope_ != NULL &&
243       external_callback_scope_->scope_address() < frame_->fp()) {
244     // Skip top ExternalCallbackScope if we already advanced to a JS frame
245     // under it. Sampler will anyways take this top external callback.
246     external_callback_scope_ = external_callback_scope_->previous();
247   }
248 }
249 
250 
IsValidTop(ThreadLocalTop * top) const251 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
252   Address c_entry_fp = Isolate::c_entry_fp(top);
253   if (!IsValidExitFrame(c_entry_fp)) return false;
254   // There should be at least one JS_ENTRY stack handler.
255   Address handler = Isolate::handler(top);
256   if (handler == NULL) return false;
257   // Check that there are no js frames on top of the native frames.
258   return c_entry_fp < handler;
259 }
260 
261 
AdvanceOneFrame()262 void SafeStackFrameIterator::AdvanceOneFrame() {
263   DCHECK(!done());
264   StackFrame* last_frame = frame_;
265   Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
266   // Before advancing to the next stack frame, perform pointer validity tests.
267   if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
268     frame_ = NULL;
269     return;
270   }
271 
272   // Advance to the previous frame.
273   StackFrame::State state;
274   StackFrame::Type type = frame_->GetCallerState(&state);
275   frame_ = SingletonFor(type, &state);
276   if (frame_ == NULL) return;
277 
278   // Check that we have actually moved to the previous frame in the stack.
279   if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
280     frame_ = NULL;
281   }
282 }
283 
284 
IsValidFrame(StackFrame * frame) const285 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
286   return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
287 }
288 
289 
IsValidCaller(StackFrame * frame)290 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
291   StackFrame::State state;
292   if (frame->is_entry() || frame->is_entry_construct()) {
293     // See EntryFrame::GetCallerState. It computes the caller FP address
294     // and calls ExitFrame::GetStateForFramePointer on it. We need to be
295     // sure that caller FP address is valid.
296     Address caller_fp = Memory::Address_at(
297         frame->fp() + EntryFrameConstants::kCallerFPOffset);
298     if (!IsValidExitFrame(caller_fp)) return false;
299   } else if (frame->is_arguments_adaptor()) {
300     // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
301     // the number of arguments is stored on stack as Smi. We need to check
302     // that it really an Smi.
303     Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
304         GetExpression(0);
305     if (!number_of_args->IsSmi()) {
306       return false;
307     }
308   }
309   frame->ComputeCallerState(&state);
310   return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
311       SingletonFor(frame->GetCallerState(&state)) != NULL;
312 }
313 
314 
IsValidExitFrame(Address fp) const315 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
316   if (!IsValidStackAddress(fp)) return false;
317   Address sp = ExitFrame::ComputeStackPointer(fp);
318   if (!IsValidStackAddress(sp)) return false;
319   StackFrame::State state;
320   ExitFrame::FillState(fp, sp, &state);
321   return *state.pc_address != NULL;
322 }
323 
324 
Advance()325 void SafeStackFrameIterator::Advance() {
326   while (true) {
327     AdvanceOneFrame();
328     if (done()) return;
329     if (frame_->is_java_script()) return;
330     if (frame_->is_exit() && external_callback_scope_) {
331       // Some of the EXIT frames may have ExternalCallbackScope allocated on
332       // top of them. In that case the scope corresponds to the first EXIT
333       // frame beneath it. There may be other EXIT frames on top of the
334       // ExternalCallbackScope, just skip them as we cannot collect any useful
335       // information about them.
336       if (external_callback_scope_->scope_address() < frame_->fp()) {
337         frame_->state_.pc_address =
338             external_callback_scope_->callback_entrypoint_address();
339         external_callback_scope_ = external_callback_scope_->previous();
340         DCHECK(external_callback_scope_ == NULL ||
341                external_callback_scope_->scope_address() > frame_->fp());
342         return;
343       }
344     }
345   }
346 }
347 
348 
349 // -------------------------------------------------------------------------
350 
351 
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)352 Code* StackFrame::GetSafepointData(Isolate* isolate,
353                                    Address inner_pointer,
354                                    SafepointEntry* safepoint_entry,
355                                    unsigned* stack_slots) {
356   InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
357       isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
358   if (!entry->safepoint_entry.is_valid()) {
359     entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
360     DCHECK(entry->safepoint_entry.is_valid());
361   } else {
362     DCHECK(entry->safepoint_entry.Equals(
363         entry->code->GetSafepointEntry(inner_pointer)));
364   }
365 
366   // Fill in the results and return the code.
367   Code* code = entry->code;
368   *safepoint_entry = entry->safepoint_entry;
369   *stack_slots = code->stack_slots();
370   return code;
371 }
372 
373 
374 #ifdef DEBUG
375 static bool GcSafeCodeContains(HeapObject* object, Address addr);
376 #endif
377 
378 
IteratePc(ObjectVisitor * v,Address * pc_address,Address * constant_pool_address,Code * holder)379 void StackFrame::IteratePc(ObjectVisitor* v, Address* pc_address,
380                            Address* constant_pool_address, Code* holder) {
381   Address pc = *pc_address;
382   DCHECK(GcSafeCodeContains(holder, pc));
383   unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
384   Object* code = holder;
385   v->VisitPointer(&code);
386   if (code != holder) {
387     holder = reinterpret_cast<Code*>(code);
388     pc = holder->instruction_start() + pc_offset;
389     *pc_address = pc;
390     if (FLAG_enable_embedded_constant_pool && constant_pool_address) {
391       *constant_pool_address = holder->constant_pool();
392     }
393   }
394 }
395 
396 
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)397 void StackFrame::SetReturnAddressLocationResolver(
398     ReturnAddressLocationResolver resolver) {
399   DCHECK(return_address_location_resolver_ == NULL);
400   return_address_location_resolver_ = resolver;
401 }
402 
403 
ComputeType(const StackFrameIteratorBase * iterator,State * state)404 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
405                                          State* state) {
406   DCHECK(state->fp != NULL);
407 
408   if (!iterator->can_access_heap_objects_) {
409     // TODO(titzer): "can_access_heap_objects" is kind of bogus. It really
410     // means that we are being called from the profiler, which can interrupt
411     // the VM with a signal at any arbitrary instruction, with essentially
412     // anything on the stack. So basically none of these checks are 100%
413     // reliable.
414     if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
415       // An adapter frame has a special SMI constant for the context and
416       // is not distinguished through the marker.
417       return ARGUMENTS_ADAPTOR;
418     }
419     Object* marker =
420         Memory::Object_at(state->fp + StandardFrameConstants::kMarkerOffset);
421     if (marker->IsSmi()) {
422       return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
423     } else {
424       return JAVA_SCRIPT;
425     }
426   }
427 
428   // Look up the code object to figure out the type of the stack frame.
429   Code* code_obj = GetContainingCode(iterator->isolate(), *(state->pc_address));
430 
431   Object* marker =
432       Memory::Object_at(state->fp + StandardFrameConstants::kMarkerOffset);
433   if (code_obj != nullptr) {
434     switch (code_obj->kind()) {
435       case Code::FUNCTION:
436         return JAVA_SCRIPT;
437       case Code::OPTIMIZED_FUNCTION:
438         return OPTIMIZED;
439       case Code::WASM_FUNCTION:
440         return STUB;
441       case Code::BUILTIN:
442         if (!marker->IsSmi()) {
443           if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
444             // An adapter frame has a special SMI constant for the context and
445             // is not distinguished through the marker.
446             return ARGUMENTS_ADAPTOR;
447           } else {
448             // The interpreter entry trampoline has a non-SMI marker.
449             DCHECK(code_obj->is_interpreter_entry_trampoline());
450             return INTERPRETED;
451           }
452         }
453         break;  // Marker encodes the frame type.
454       case Code::HANDLER:
455         if (!marker->IsSmi()) {
456           // Only hydrogen code stub handlers can have a non-SMI marker.
457           DCHECK(code_obj->is_hydrogen_stub());
458           return OPTIMIZED;
459         }
460         break;  // Marker encodes the frame type.
461       default:
462         break;  // Marker encodes the frame type.
463     }
464   }
465 
466   // Didn't find a code object, or the code kind wasn't specific enough.
467   // The marker should encode the frame type.
468   return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
469 }
470 
471 
472 #ifdef DEBUG
can_access_heap_objects() const473 bool StackFrame::can_access_heap_objects() const {
474   return iterator_->can_access_heap_objects_;
475 }
476 #endif
477 
478 
GetCallerState(State * state) const479 StackFrame::Type StackFrame::GetCallerState(State* state) const {
480   ComputeCallerState(state);
481   return ComputeType(iterator_, state);
482 }
483 
484 
UnpaddedFP() const485 Address StackFrame::UnpaddedFP() const {
486 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
487   if (!is_optimized()) return fp();
488   int32_t alignment_state = Memory::int32_at(
489     fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
490 
491   return (alignment_state == kAlignmentPaddingPushed) ?
492     (fp() + kPointerSize) : fp();
493 #else
494   return fp();
495 #endif
496 }
497 
498 
unchecked_code() const499 Code* EntryFrame::unchecked_code() const {
500   return isolate()->heap()->js_entry_code();
501 }
502 
503 
ComputeCallerState(State * state) const504 void EntryFrame::ComputeCallerState(State* state) const {
505   GetCallerState(state);
506 }
507 
508 
SetCallerFp(Address caller_fp)509 void EntryFrame::SetCallerFp(Address caller_fp) {
510   const int offset = EntryFrameConstants::kCallerFPOffset;
511   Memory::Address_at(this->fp() + offset) = caller_fp;
512 }
513 
514 
GetCallerState(State * state) const515 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
516   const int offset = EntryFrameConstants::kCallerFPOffset;
517   Address fp = Memory::Address_at(this->fp() + offset);
518   return ExitFrame::GetStateForFramePointer(fp, state);
519 }
520 
521 
unchecked_code() const522 Code* EntryConstructFrame::unchecked_code() const {
523   return isolate()->heap()->js_construct_entry_code();
524 }
525 
526 
code_slot() const527 Object*& ExitFrame::code_slot() const {
528   const int offset = ExitFrameConstants::kCodeOffset;
529   return Memory::Object_at(fp() + offset);
530 }
531 
532 
unchecked_code() const533 Code* ExitFrame::unchecked_code() const {
534   return reinterpret_cast<Code*>(code_slot());
535 }
536 
537 
ComputeCallerState(State * state) const538 void ExitFrame::ComputeCallerState(State* state) const {
539   // Set up the caller state.
540   state->sp = caller_sp();
541   state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
542   state->pc_address = ResolveReturnAddressLocation(
543       reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
544   if (FLAG_enable_embedded_constant_pool) {
545     state->constant_pool_address = reinterpret_cast<Address*>(
546         fp() + ExitFrameConstants::kConstantPoolOffset);
547   }
548 }
549 
550 
SetCallerFp(Address caller_fp)551 void ExitFrame::SetCallerFp(Address caller_fp) {
552   Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
553 }
554 
555 
Iterate(ObjectVisitor * v) const556 void ExitFrame::Iterate(ObjectVisitor* v) const {
557   // The arguments are traversed as part of the expression stack of
558   // the calling frame.
559   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
560   v->VisitPointer(&code_slot());
561 }
562 
563 
GetCallerStackPointer() const564 Address ExitFrame::GetCallerStackPointer() const {
565   return fp() + ExitFrameConstants::kCallerSPDisplacement;
566 }
567 
568 
GetStateForFramePointer(Address fp,State * state)569 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
570   if (fp == 0) return NONE;
571   Address sp = ComputeStackPointer(fp);
572   FillState(fp, sp, state);
573   DCHECK(*state->pc_address != NULL);
574   return EXIT;
575 }
576 
577 
ComputeStackPointer(Address fp)578 Address ExitFrame::ComputeStackPointer(Address fp) {
579   return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
580 }
581 
582 
FillState(Address fp,Address sp,State * state)583 void ExitFrame::FillState(Address fp, Address sp, State* state) {
584   state->sp = sp;
585   state->fp = fp;
586   state->pc_address = ResolveReturnAddressLocation(
587       reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
588   // The constant pool recorded in the exit frame is not associated
589   // with the pc in this state (the return address into a C entry
590   // stub).  ComputeCallerState will retrieve the constant pool
591   // together with the associated caller pc.
592   state->constant_pool_address = NULL;
593 }
594 
595 
GetExpressionAddress(int n) const596 Address StandardFrame::GetExpressionAddress(int n) const {
597   const int offset = StandardFrameConstants::kExpressionsOffset;
598   return fp() + offset - n * kPointerSize;
599 }
600 
601 
GetExpression(Address fp,int index)602 Object* StandardFrame::GetExpression(Address fp, int index) {
603   return Memory::Object_at(GetExpressionAddress(fp, index));
604 }
605 
606 
GetExpressionAddress(Address fp,int n)607 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
608   const int offset = StandardFrameConstants::kExpressionsOffset;
609   return fp + offset - n * kPointerSize;
610 }
611 
612 
ComputeExpressionsCount() const613 int StandardFrame::ComputeExpressionsCount() const {
614   const int offset =
615       StandardFrameConstants::kExpressionsOffset + kPointerSize;
616   Address base = fp() + offset;
617   Address limit = sp();
618   DCHECK(base >= limit);  // stack grows downwards
619   // Include register-allocated locals in number of expressions.
620   return static_cast<int>((base - limit) / kPointerSize);
621 }
622 
623 
ComputeCallerState(State * state) const624 void StandardFrame::ComputeCallerState(State* state) const {
625   state->sp = caller_sp();
626   state->fp = caller_fp();
627   state->pc_address = ResolveReturnAddressLocation(
628       reinterpret_cast<Address*>(ComputePCAddress(fp())));
629   state->constant_pool_address =
630       reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
631 }
632 
633 
SetCallerFp(Address caller_fp)634 void StandardFrame::SetCallerFp(Address caller_fp) {
635   Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
636       caller_fp;
637 }
638 
639 
IterateCompiledFrame(ObjectVisitor * v) const640 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
641   // Make sure that we're not doing "safe" stack frame iteration. We cannot
642   // possibly find pointers in optimized frames in that state.
643   DCHECK(can_access_heap_objects());
644 
645   // Compute the safepoint information.
646   unsigned stack_slots = 0;
647   SafepointEntry safepoint_entry;
648   Code* code = StackFrame::GetSafepointData(
649       isolate(), pc(), &safepoint_entry, &stack_slots);
650   unsigned slot_space = stack_slots * kPointerSize;
651 
652   // Visit the outgoing parameters.
653   Object** parameters_base = &Memory::Object_at(sp());
654   Object** parameters_limit = &Memory::Object_at(
655       fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
656 
657   // Visit the parameters that may be on top of the saved registers.
658   if (safepoint_entry.argument_count() > 0) {
659     v->VisitPointers(parameters_base,
660                      parameters_base + safepoint_entry.argument_count());
661     parameters_base += safepoint_entry.argument_count();
662   }
663 
664   // Skip saved double registers.
665   if (safepoint_entry.has_doubles()) {
666     // Number of doubles not known at snapshot time.
667     DCHECK(!isolate()->serializer_enabled());
668     parameters_base +=
669         RegisterConfiguration::ArchDefault(RegisterConfiguration::CRANKSHAFT)
670             ->num_allocatable_double_registers() *
671         kDoubleSize / kPointerSize;
672   }
673 
674   // Visit the registers that contain pointers if any.
675   if (safepoint_entry.HasRegisters()) {
676     for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
677       if (safepoint_entry.HasRegisterAt(i)) {
678         int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
679         v->VisitPointer(parameters_base + reg_stack_index);
680       }
681     }
682     // Skip the words containing the register values.
683     parameters_base += kNumSafepointRegisters;
684   }
685 
686   // We're done dealing with the register bits.
687   uint8_t* safepoint_bits = safepoint_entry.bits();
688   safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
689 
690   // Visit the rest of the parameters.
691   v->VisitPointers(parameters_base, parameters_limit);
692 
693   // Visit pointer spill slots and locals.
694   for (unsigned index = 0; index < stack_slots; index++) {
695     int byte_index = index >> kBitsPerByteLog2;
696     int bit_index = index & (kBitsPerByte - 1);
697     if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
698       v->VisitPointer(parameters_limit + index);
699     }
700   }
701 
702   // Visit the return address in the callee and incoming arguments.
703   IteratePc(v, pc_address(), constant_pool_address(), code);
704 
705   // Visit the context in stub frame and JavaScript frame.
706   // Visit the function in JavaScript frame.
707   Object** fixed_base = &Memory::Object_at(
708       fp() + StandardFrameConstants::kMarkerOffset);
709   Object** fixed_limit = &Memory::Object_at(fp());
710   v->VisitPointers(fixed_base, fixed_limit);
711 }
712 
713 
Iterate(ObjectVisitor * v) const714 void StubFrame::Iterate(ObjectVisitor* v) const {
715   IterateCompiledFrame(v);
716 }
717 
718 
unchecked_code() const719 Code* StubFrame::unchecked_code() const {
720   return static_cast<Code*>(isolate()->FindCodeObject(pc()));
721 }
722 
723 
GetCallerStackPointer() const724 Address StubFrame::GetCallerStackPointer() const {
725   return fp() + ExitFrameConstants::kCallerSPDisplacement;
726 }
727 
728 
GetNumberOfIncomingArguments() const729 int StubFrame::GetNumberOfIncomingArguments() const {
730   return 0;
731 }
732 
733 
Iterate(ObjectVisitor * v) const734 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
735   IterateCompiledFrame(v);
736 }
737 
738 
SetParameterValue(int index,Object * value) const739 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
740   Memory::Object_at(GetParameterSlot(index)) = value;
741 }
742 
743 
IsConstructor() const744 bool JavaScriptFrame::IsConstructor() const {
745   Address fp = caller_fp();
746   if (has_adapted_arguments()) {
747     // Skip the arguments adaptor frame and look at the real caller.
748     fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
749   }
750   return IsConstructFrame(fp);
751 }
752 
753 
HasInlinedFrames() const754 bool JavaScriptFrame::HasInlinedFrames() const {
755   List<JSFunction*> functions(1);
756   GetFunctions(&functions);
757   return functions.length() > 1;
758 }
759 
760 
GetArgumentsLength() const761 int JavaScriptFrame::GetArgumentsLength() const {
762   // If there is an arguments adaptor frame get the arguments length from it.
763   if (has_adapted_arguments()) {
764     STATIC_ASSERT(ArgumentsAdaptorFrameConstants::kLengthOffset ==
765                   StandardFrameConstants::kExpressionsOffset);
766     return Smi::cast(GetExpression(caller_fp(), 0))->value();
767   } else {
768     return GetNumberOfIncomingArguments();
769   }
770 }
771 
772 
unchecked_code() const773 Code* JavaScriptFrame::unchecked_code() const {
774   return function()->code();
775 }
776 
777 
GetNumberOfIncomingArguments() const778 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
779   DCHECK(can_access_heap_objects() &&
780          isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
781 
782   return function()->shared()->internal_formal_parameter_count();
783 }
784 
785 
GetCallerStackPointer() const786 Address JavaScriptFrame::GetCallerStackPointer() const {
787   return fp() + StandardFrameConstants::kCallerSPOffset;
788 }
789 
790 
GetFunctions(List<JSFunction * > * functions) const791 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) const {
792   DCHECK(functions->length() == 0);
793   functions->Add(function());
794 }
795 
796 
Summarize(List<FrameSummary> * functions)797 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
798   DCHECK(functions->length() == 0);
799   Code* code_pointer = LookupCode();
800   int offset = static_cast<int>(pc() - code_pointer->address());
801   FrameSummary summary(receiver(),
802                        function(),
803                        code_pointer,
804                        offset,
805                        IsConstructor());
806   functions->Add(summary);
807 }
808 
809 
LookupExceptionHandlerInTable(int * stack_slots,HandlerTable::CatchPrediction * prediction)810 int JavaScriptFrame::LookupExceptionHandlerInTable(
811     int* stack_slots, HandlerTable::CatchPrediction* prediction) {
812   Code* code = LookupCode();
813   DCHECK(!code->is_optimized_code());
814   HandlerTable* table = HandlerTable::cast(code->handler_table());
815   int pc_offset = static_cast<int>(pc() - code->entry());
816   return table->LookupRange(pc_offset, stack_slots, prediction);
817 }
818 
819 
PrintFunctionAndOffset(JSFunction * function,Code * code,Address pc,FILE * file,bool print_line_number)820 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
821                                              Address pc, FILE* file,
822                                              bool print_line_number) {
823   PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
824   function->PrintName(file);
825   int code_offset = static_cast<int>(pc - code->instruction_start());
826   PrintF(file, "+%d", code_offset);
827   if (print_line_number) {
828     SharedFunctionInfo* shared = function->shared();
829     int source_pos = code->SourcePosition(pc);
830     Object* maybe_script = shared->script();
831     if (maybe_script->IsScript()) {
832       Script* script = Script::cast(maybe_script);
833       int line = script->GetLineNumber(source_pos) + 1;
834       Object* script_name_raw = script->name();
835       if (script_name_raw->IsString()) {
836         String* script_name = String::cast(script->name());
837         base::SmartArrayPointer<char> c_script_name =
838             script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
839         PrintF(file, " at %s:%d", c_script_name.get(), line);
840       } else {
841         PrintF(file, " at <unknown>:%d", line);
842       }
843     } else {
844       PrintF(file, " at <unknown>:<unknown>");
845     }
846   }
847 }
848 
849 
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)850 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
851                                bool print_line_number) {
852   // constructor calls
853   DisallowHeapAllocation no_allocation;
854   JavaScriptFrameIterator it(isolate);
855   while (!it.done()) {
856     if (it.frame()->is_java_script()) {
857       JavaScriptFrame* frame = it.frame();
858       if (frame->IsConstructor()) PrintF(file, "new ");
859       PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
860                              frame->pc(), file, print_line_number);
861       if (print_args) {
862         // function arguments
863         // (we are intentionally only printing the actually
864         // supplied parameters, not all parameters required)
865         PrintF(file, "(this=");
866         frame->receiver()->ShortPrint(file);
867         const int length = frame->ComputeParametersCount();
868         for (int i = 0; i < length; i++) {
869           PrintF(file, ", ");
870           frame->GetParameter(i)->ShortPrint(file);
871         }
872         PrintF(file, ")");
873       }
874       break;
875     }
876     it.Advance();
877   }
878 }
879 
880 
SaveOperandStack(FixedArray * store) const881 void JavaScriptFrame::SaveOperandStack(FixedArray* store) const {
882   int operands_count = store->length();
883   DCHECK_LE(operands_count, ComputeOperandsCount());
884   for (int i = 0; i < operands_count; i++) {
885     store->set(i, GetOperand(i));
886   }
887 }
888 
889 
RestoreOperandStack(FixedArray * store)890 void JavaScriptFrame::RestoreOperandStack(FixedArray* store) {
891   int operands_count = store->length();
892   DCHECK_LE(operands_count, ComputeOperandsCount());
893   for (int i = 0; i < operands_count; i++) {
894     DCHECK_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
895     Memory::Object_at(GetOperandSlot(i)) = store->get(i);
896   }
897 }
898 
899 
FrameSummary(Object * receiver,JSFunction * function,Code * code,int offset,bool is_constructor)900 FrameSummary::FrameSummary(Object* receiver, JSFunction* function, Code* code,
901                            int offset, bool is_constructor)
902     : receiver_(receiver, function->GetIsolate()),
903       function_(function),
904       code_(code),
905       offset_(offset),
906       is_constructor_(is_constructor) {}
907 
908 
Print()909 void FrameSummary::Print() {
910   PrintF("receiver: ");
911   receiver_->ShortPrint();
912   PrintF("\nfunction: ");
913   function_->shared()->DebugName()->ShortPrint();
914   PrintF("\ncode: ");
915   code_->ShortPrint();
916   if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
917   if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
918   PrintF("\npc: %d\n", offset_);
919 }
920 
921 
Summarize(List<FrameSummary> * frames)922 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
923   DCHECK(frames->length() == 0);
924   DCHECK(is_optimized());
925 
926   // Delegate to JS frame in absence of turbofan deoptimization.
927   // TODO(turbofan): Revisit once we support deoptimization across the board.
928   if (LookupCode()->is_turbofanned() && function()->shared()->asm_function() &&
929       !FLAG_turbo_asm_deoptimization) {
930     return JavaScriptFrame::Summarize(frames);
931   }
932 
933   DisallowHeapAllocation no_gc;
934   int deopt_index = Safepoint::kNoDeoptimizationIndex;
935   DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
936   FixedArray* const literal_array = data->LiteralArray();
937 
938   TranslationIterator it(data->TranslationByteArray(),
939                          data->TranslationIndex(deopt_index)->value());
940   Translation::Opcode frame_opcode =
941       static_cast<Translation::Opcode>(it.Next());
942   DCHECK_EQ(Translation::BEGIN, frame_opcode);
943   it.Next();  // Drop frame count.
944   int jsframe_count = it.Next();
945 
946   // We create the summary in reverse order because the frames
947   // in the deoptimization translation are ordered bottom-to-top.
948   bool is_constructor = IsConstructor();
949   while (jsframe_count != 0) {
950     frame_opcode = static_cast<Translation::Opcode>(it.Next());
951     if (frame_opcode == Translation::JS_FRAME ||
952         frame_opcode == Translation::INTERPRETED_FRAME) {
953       jsframe_count--;
954       BailoutId const ast_id = BailoutId(it.Next());
955       SharedFunctionInfo* const shared_info =
956           SharedFunctionInfo::cast(literal_array->get(it.Next()));
957       it.Next();  // Skip height.
958 
959       // The translation commands are ordered and the function is always
960       // at the first position, and the receiver is next.
961       Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
962 
963       // Get the correct function in the optimized frame.
964       JSFunction* function;
965       if (opcode == Translation::LITERAL) {
966         function = JSFunction::cast(literal_array->get(it.Next()));
967       } else if (opcode == Translation::STACK_SLOT) {
968         function = JSFunction::cast(StackSlotAt(it.Next()));
969       } else {
970         CHECK_EQ(Translation::JS_FRAME_FUNCTION, opcode);
971         function = this->function();
972       }
973       DCHECK_EQ(shared_info, function->shared());
974 
975       // If we are at a call, the receiver is always in a stack slot.
976       // Otherwise we are not guaranteed to get the receiver value.
977       opcode = static_cast<Translation::Opcode>(it.Next());
978 
979       // Get the correct receiver in the optimized frame.
980       Object* receiver;
981       if (opcode == Translation::LITERAL) {
982         receiver = literal_array->get(it.Next());
983       } else if (opcode == Translation::STACK_SLOT) {
984         receiver = StackSlotAt(it.Next());
985       } else if (opcode == Translation::JS_FRAME_FUNCTION) {
986         receiver = this->function();
987       } else {
988         // The receiver is not in a stack slot nor in a literal.  We give up.
989         it.Skip(Translation::NumberOfOperandsFor(opcode));
990         // TODO(3029): Materializing a captured object (or duplicated
991         // object) is hard, we return undefined for now. This breaks the
992         // produced stack trace, as constructor frames aren't marked as
993         // such anymore.
994         receiver = isolate()->heap()->undefined_value();
995       }
996 
997       Code* const code = shared_info->code();
998 
999       unsigned pc_offset;
1000       if (frame_opcode == Translation::JS_FRAME) {
1001         DeoptimizationOutputData* const output_data =
1002             DeoptimizationOutputData::cast(code->deoptimization_data());
1003         unsigned const entry =
1004             Deoptimizer::GetOutputInfo(output_data, ast_id, shared_info);
1005         pc_offset =
1006             FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
1007         DCHECK_NE(0U, pc_offset);
1008       } else {
1009         // TODO(rmcilroy): Modify FrameSummary to enable us to summarize
1010         // based on the BytecodeArray and bytecode offset.
1011         DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME);
1012         pc_offset = 0;
1013       }
1014       FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1015       frames->Add(summary);
1016       is_constructor = false;
1017     } else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) {
1018       // The next encountered JS_FRAME will be marked as a constructor call.
1019       it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1020       DCHECK(!is_constructor);
1021       is_constructor = true;
1022     } else {
1023       // Skip over operands to advance to the next opcode.
1024       it.Skip(Translation::NumberOfOperandsFor(frame_opcode));
1025     }
1026   }
1027   DCHECK(!is_constructor);
1028 }
1029 
1030 
LookupExceptionHandlerInTable(int * stack_slots,HandlerTable::CatchPrediction * prediction)1031 int OptimizedFrame::LookupExceptionHandlerInTable(
1032     int* stack_slots, HandlerTable::CatchPrediction* prediction) {
1033   Code* code = LookupCode();
1034   DCHECK(code->is_optimized_code());
1035   HandlerTable* table = HandlerTable::cast(code->handler_table());
1036   int pc_offset = static_cast<int>(pc() - code->entry());
1037   *stack_slots = code->stack_slots();
1038   return table->LookupReturn(pc_offset, prediction);
1039 }
1040 
1041 
GetDeoptimizationData(int * deopt_index) const1042 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1043     int* deopt_index) const {
1044   DCHECK(is_optimized());
1045 
1046   JSFunction* opt_function = function();
1047   Code* code = opt_function->code();
1048 
1049   // The code object may have been replaced by lazy deoptimization. Fall
1050   // back to a slow search in this case to find the original optimized
1051   // code object.
1052   if (!code->contains(pc())) {
1053     code = isolate()->inner_pointer_to_code_cache()->
1054         GcSafeFindCodeForInnerPointer(pc());
1055   }
1056   DCHECK(code != NULL);
1057   DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1058 
1059   SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1060   *deopt_index = safepoint_entry.deoptimization_index();
1061   DCHECK(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1062 
1063   return DeoptimizationInputData::cast(code->deoptimization_data());
1064 }
1065 
1066 
GetFunctions(List<JSFunction * > * functions) const1067 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) const {
1068   DCHECK(functions->length() == 0);
1069   DCHECK(is_optimized());
1070 
1071   // Delegate to JS frame in absence of turbofan deoptimization.
1072   // TODO(turbofan): Revisit once we support deoptimization across the board.
1073   if (LookupCode()->is_turbofanned() && function()->shared()->asm_function() &&
1074       !FLAG_turbo_asm_deoptimization) {
1075     return JavaScriptFrame::GetFunctions(functions);
1076   }
1077 
1078   DisallowHeapAllocation no_gc;
1079   int deopt_index = Safepoint::kNoDeoptimizationIndex;
1080   DeoptimizationInputData* const data = GetDeoptimizationData(&deopt_index);
1081   FixedArray* const literal_array = data->LiteralArray();
1082 
1083   TranslationIterator it(data->TranslationByteArray(),
1084                          data->TranslationIndex(deopt_index)->value());
1085   Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1086   DCHECK_EQ(Translation::BEGIN, opcode);
1087   it.Next();  // Skip frame count.
1088   int jsframe_count = it.Next();
1089 
1090   // We insert the frames in reverse order because the frames
1091   // in the deoptimization translation are ordered bottom-to-top.
1092   while (jsframe_count != 0) {
1093     opcode = static_cast<Translation::Opcode>(it.Next());
1094     // Skip over operands to advance to the next opcode.
1095     it.Skip(Translation::NumberOfOperandsFor(opcode));
1096     if (opcode == Translation::JS_FRAME ||
1097         opcode == Translation::INTERPRETED_FRAME) {
1098       jsframe_count--;
1099 
1100       // The translation commands are ordered and the function is always at the
1101       // first position.
1102       opcode = static_cast<Translation::Opcode>(it.Next());
1103 
1104       // Get the correct function in the optimized frame.
1105       Object* function;
1106       if (opcode == Translation::LITERAL) {
1107         function = literal_array->get(it.Next());
1108       } else if (opcode == Translation::STACK_SLOT) {
1109         function = StackSlotAt(it.Next());
1110       } else {
1111         CHECK_EQ(Translation::JS_FRAME_FUNCTION, opcode);
1112         function = this->function();
1113       }
1114       functions->Add(JSFunction::cast(function));
1115     }
1116   }
1117 }
1118 
1119 
StackSlotOffsetRelativeToFp(int slot_index)1120 int OptimizedFrame::StackSlotOffsetRelativeToFp(int slot_index) {
1121   return StandardFrameConstants::kCallerSPOffset -
1122          ((slot_index + 1) * kPointerSize);
1123 }
1124 
1125 
StackSlotAt(int index) const1126 Object* OptimizedFrame::StackSlotAt(int index) const {
1127   return Memory::Object_at(fp() + StackSlotOffsetRelativeToFp(index));
1128 }
1129 
1130 
GetNumberOfIncomingArguments() const1131 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1132   return Smi::cast(GetExpression(0))->value();
1133 }
1134 
1135 
GetCallerStackPointer() const1136 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1137   return fp() + StandardFrameConstants::kCallerSPOffset;
1138 }
1139 
1140 
GetCallerStackPointer() const1141 Address InternalFrame::GetCallerStackPointer() const {
1142   // Internal frames have no arguments. The stack pointer of the
1143   // caller is at a fixed offset from the frame pointer.
1144   return fp() + StandardFrameConstants::kCallerSPOffset;
1145 }
1146 
1147 
unchecked_code() const1148 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1149   return isolate()->builtins()->builtin(
1150       Builtins::kArgumentsAdaptorTrampoline);
1151 }
1152 
1153 
unchecked_code() const1154 Code* InternalFrame::unchecked_code() const {
1155   const int offset = InternalFrameConstants::kCodeOffset;
1156   Object* code = Memory::Object_at(fp() + offset);
1157   DCHECK(code != NULL);
1158   return reinterpret_cast<Code*>(code);
1159 }
1160 
1161 
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1162 void StackFrame::PrintIndex(StringStream* accumulator,
1163                             PrintMode mode,
1164                             int index) {
1165   accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1166 }
1167 
1168 
1169 namespace {
1170 
1171 
PrintFunctionSource(StringStream * accumulator,SharedFunctionInfo * shared,Code * code)1172 void PrintFunctionSource(StringStream* accumulator, SharedFunctionInfo* shared,
1173                          Code* code) {
1174   if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1175     std::ostringstream os;
1176     os << "--------- s o u r c e   c o d e ---------\n"
1177        << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1178        << "\n-----------------------------------------\n";
1179     accumulator->Add(os.str().c_str());
1180   }
1181 }
1182 
1183 
1184 }  // namespace
1185 
1186 
Print(StringStream * accumulator,PrintMode mode,int index) const1187 void JavaScriptFrame::Print(StringStream* accumulator,
1188                             PrintMode mode,
1189                             int index) const {
1190   DisallowHeapAllocation no_gc;
1191   Object* receiver = this->receiver();
1192   JSFunction* function = this->function();
1193 
1194   accumulator->PrintSecurityTokenIfChanged(function);
1195   PrintIndex(accumulator, mode, index);
1196   Code* code = NULL;
1197   if (IsConstructor()) accumulator->Add("new ");
1198   accumulator->PrintFunction(function, receiver, &code);
1199 
1200   // Get scope information for nicer output, if possible. If code is NULL, or
1201   // doesn't contain scope info, scope_info will return 0 for the number of
1202   // parameters, stack local variables, context local variables, stack slots,
1203   // or context slots.
1204   SharedFunctionInfo* shared = function->shared();
1205   ScopeInfo* scope_info = shared->scope_info();
1206   Object* script_obj = shared->script();
1207   if (script_obj->IsScript()) {
1208     Script* script = Script::cast(script_obj);
1209     accumulator->Add(" [");
1210     accumulator->PrintName(script->name());
1211 
1212     Address pc = this->pc();
1213     if (code != NULL && code->kind() == Code::FUNCTION &&
1214         pc >= code->instruction_start() && pc < code->instruction_end()) {
1215       int source_pos = code->SourcePosition(pc);
1216       int line = script->GetLineNumber(source_pos) + 1;
1217       accumulator->Add(":%d", line);
1218     } else {
1219       int function_start_pos = shared->start_position();
1220       int line = script->GetLineNumber(function_start_pos) + 1;
1221       accumulator->Add(":~%d", line);
1222     }
1223 
1224     accumulator->Add("] [pc=%p] ", pc);
1225   }
1226 
1227   accumulator->Add("(this=%o", receiver);
1228 
1229   // Print the parameters.
1230   int parameters_count = ComputeParametersCount();
1231   for (int i = 0; i < parameters_count; i++) {
1232     accumulator->Add(",");
1233     // If we have a name for the parameter we print it. Nameless
1234     // parameters are either because we have more actual parameters
1235     // than formal parameters or because we have no scope information.
1236     if (i < scope_info->ParameterCount()) {
1237       accumulator->PrintName(scope_info->ParameterName(i));
1238       accumulator->Add("=");
1239     }
1240     accumulator->Add("%o", GetParameter(i));
1241   }
1242 
1243   accumulator->Add(")");
1244   if (mode == OVERVIEW) {
1245     accumulator->Add("\n");
1246     return;
1247   }
1248   if (is_optimized()) {
1249     accumulator->Add(" {\n// optimized frame\n");
1250     PrintFunctionSource(accumulator, shared, code);
1251     accumulator->Add("}\n");
1252     return;
1253   }
1254   accumulator->Add(" {\n");
1255 
1256   // Compute the number of locals and expression stack elements.
1257   int stack_locals_count = scope_info->StackLocalCount();
1258   int heap_locals_count = scope_info->ContextLocalCount();
1259   int expressions_count = ComputeExpressionsCount();
1260 
1261   // Print stack-allocated local variables.
1262   if (stack_locals_count > 0) {
1263     accumulator->Add("  // stack-allocated locals\n");
1264   }
1265   for (int i = 0; i < stack_locals_count; i++) {
1266     accumulator->Add("  var ");
1267     accumulator->PrintName(scope_info->StackLocalName(i));
1268     accumulator->Add(" = ");
1269     if (i < expressions_count) {
1270       accumulator->Add("%o", GetExpression(i));
1271     } else {
1272       accumulator->Add("// no expression found - inconsistent frame?");
1273     }
1274     accumulator->Add("\n");
1275   }
1276 
1277   // Try to get hold of the context of this frame.
1278   Context* context = NULL;
1279   if (this->context() != NULL && this->context()->IsContext()) {
1280     context = Context::cast(this->context());
1281   }
1282   while (context->IsWithContext()) {
1283     context = context->previous();
1284     DCHECK(context != NULL);
1285   }
1286 
1287   // Print heap-allocated local variables.
1288   if (heap_locals_count > 0) {
1289     accumulator->Add("  // heap-allocated locals\n");
1290   }
1291   for (int i = 0; i < heap_locals_count; i++) {
1292     accumulator->Add("  var ");
1293     accumulator->PrintName(scope_info->ContextLocalName(i));
1294     accumulator->Add(" = ");
1295     if (context != NULL) {
1296       int index = Context::MIN_CONTEXT_SLOTS + i;
1297       if (index < context->length()) {
1298         accumulator->Add("%o", context->get(index));
1299       } else {
1300         accumulator->Add(
1301             "// warning: missing context slot - inconsistent frame?");
1302       }
1303     } else {
1304       accumulator->Add("// warning: no context found - inconsistent frame?");
1305     }
1306     accumulator->Add("\n");
1307   }
1308 
1309   // Print the expression stack.
1310   int expressions_start = stack_locals_count;
1311   if (expressions_start < expressions_count) {
1312     accumulator->Add("  // expression stack (top to bottom)\n");
1313   }
1314   for (int i = expressions_count - 1; i >= expressions_start; i--) {
1315     accumulator->Add("  [%02d] : %o\n", i, GetExpression(i));
1316   }
1317 
1318   PrintFunctionSource(accumulator, shared, code);
1319 
1320   accumulator->Add("}\n\n");
1321 }
1322 
1323 
Print(StringStream * accumulator,PrintMode mode,int index) const1324 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1325                                   PrintMode mode,
1326                                   int index) const {
1327   int actual = ComputeParametersCount();
1328   int expected = -1;
1329   JSFunction* function = this->function();
1330   expected = function->shared()->internal_formal_parameter_count();
1331 
1332   PrintIndex(accumulator, mode, index);
1333   accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1334   if (mode == OVERVIEW) {
1335     accumulator->Add("\n");
1336     return;
1337   }
1338   accumulator->Add(" {\n");
1339 
1340   // Print actual arguments.
1341   if (actual > 0) accumulator->Add("  // actual arguments\n");
1342   for (int i = 0; i < actual; i++) {
1343     accumulator->Add("  [%02d] : %o", i, GetParameter(i));
1344     if (expected != -1 && i >= expected) {
1345       accumulator->Add("  // not passed to callee");
1346     }
1347     accumulator->Add("\n");
1348   }
1349 
1350   accumulator->Add("}\n\n");
1351 }
1352 
1353 
Iterate(ObjectVisitor * v) const1354 void EntryFrame::Iterate(ObjectVisitor* v) const {
1355   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1356 }
1357 
1358 
IterateExpressions(ObjectVisitor * v) const1359 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1360   const int offset = StandardFrameConstants::kLastObjectOffset;
1361   Object** base = &Memory::Object_at(sp());
1362   Object** limit = &Memory::Object_at(fp() + offset) + 1;
1363   v->VisitPointers(base, limit);
1364 }
1365 
1366 
Iterate(ObjectVisitor * v) const1367 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1368   IterateExpressions(v);
1369   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1370 }
1371 
1372 
Iterate(ObjectVisitor * v) const1373 void InternalFrame::Iterate(ObjectVisitor* v) const {
1374   // Internal frames only have object pointers on the expression stack
1375   // as they never have any arguments.
1376   IterateExpressions(v);
1377   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1378 }
1379 
1380 
Iterate(ObjectVisitor * v) const1381 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1382   Object** base = &Memory::Object_at(sp());
1383   Object** limit = &Memory::Object_at(fp() +
1384                                       kFirstRegisterParameterFrameOffset);
1385   v->VisitPointers(base, limit);
1386   base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1387   const int offset = StandardFrameConstants::kLastObjectOffset;
1388   limit = &Memory::Object_at(fp() + offset) + 1;
1389   v->VisitPointers(base, limit);
1390   IteratePc(v, pc_address(), constant_pool_address(), LookupCode());
1391 }
1392 
1393 
GetCallerStackPointer() const1394 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1395   return fp() + StandardFrameConstants::kCallerSPOffset;
1396 }
1397 
1398 
unchecked_code() const1399 Code* StubFailureTrampolineFrame::unchecked_code() const {
1400   Code* trampoline;
1401   StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1402       FindCodeInCache(&trampoline);
1403   if (trampoline->contains(pc())) {
1404     return trampoline;
1405   }
1406 
1407   StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1408       FindCodeInCache(&trampoline);
1409   if (trampoline->contains(pc())) {
1410     return trampoline;
1411   }
1412 
1413   UNREACHABLE();
1414   return NULL;
1415 }
1416 
1417 
1418 // -------------------------------------------------------------------------
1419 
1420 
FindJavaScriptFrame(int n)1421 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1422   DCHECK(n >= 0);
1423   for (int i = 0; i <= n; i++) {
1424     while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1425     if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1426     iterator_.Advance();
1427   }
1428   UNREACHABLE();
1429   return NULL;
1430 }
1431 
1432 
1433 // -------------------------------------------------------------------------
1434 
1435 
GcSafeMapOfCodeSpaceObject(HeapObject * object)1436 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1437   MapWord map_word = object->map_word();
1438   return map_word.IsForwardingAddress() ?
1439       map_word.ToForwardingAddress()->map() : map_word.ToMap();
1440 }
1441 
1442 
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1443 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1444   return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1445 }
1446 
1447 
1448 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1449 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1450   Map* map = GcSafeMapOfCodeSpaceObject(code);
1451   DCHECK(map == code->GetHeap()->code_map());
1452   Address start = code->address();
1453   Address end = code->address() + code->SizeFromMap(map);
1454   return start <= addr && addr < end;
1455 }
1456 #endif
1457 
1458 
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1459 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1460                                                 Address inner_pointer) {
1461   Code* code = reinterpret_cast<Code*>(object);
1462   DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1463   return code;
1464 }
1465 
1466 
GcSafeFindCodeForInnerPointer(Address inner_pointer)1467 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1468     Address inner_pointer) {
1469   Heap* heap = isolate_->heap();
1470   if (!heap->code_space()->Contains(inner_pointer) &&
1471       !heap->lo_space()->Contains(inner_pointer)) {
1472     return nullptr;
1473   }
1474 
1475   // Check if the inner pointer points into a large object chunk.
1476   LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1477   if (large_page != NULL) {
1478     return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1479   }
1480 
1481   // Iterate through the page until we reach the end or find an object starting
1482   // after the inner pointer.
1483   Page* page = Page::FromAddress(inner_pointer);
1484 
1485   DCHECK_EQ(page->owner(), heap->code_space());
1486   heap->mark_compact_collector()->SweepOrWaitUntilSweepingCompleted(page);
1487 
1488   Address addr = page->skip_list()->StartFor(inner_pointer);
1489 
1490   Address top = heap->code_space()->top();
1491   Address limit = heap->code_space()->limit();
1492 
1493   while (true) {
1494     if (addr == top && addr != limit) {
1495       addr = limit;
1496       continue;
1497     }
1498 
1499     HeapObject* obj = HeapObject::FromAddress(addr);
1500     int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1501     Address next_addr = addr + obj_size;
1502     if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1503     addr = next_addr;
1504   }
1505 }
1506 
1507 
1508 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1509     InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1510   isolate_->counters()->pc_to_code()->Increment();
1511   DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1512   uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer),
1513                                      v8::internal::kZeroHashSeed);
1514   uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1515   InnerPointerToCodeCacheEntry* entry = cache(index);
1516   if (entry->inner_pointer == inner_pointer) {
1517     isolate_->counters()->pc_to_code_cached()->Increment();
1518     DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1519   } else {
1520     // Because this code may be interrupted by a profiling signal that
1521     // also queries the cache, we cannot update inner_pointer before the code
1522     // has been set. Otherwise, we risk trying to use a cache entry before
1523     // the code has been computed.
1524     entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1525     entry->safepoint_entry.Reset();
1526     entry->inner_pointer = inner_pointer;
1527   }
1528   return entry;
1529 }
1530 
1531 
1532 // -------------------------------------------------------------------------
1533 
1534 
NumRegs(RegList reglist)1535 int NumRegs(RegList reglist) { return base::bits::CountPopulation(reglist); }
1536 
1537 
1538 struct JSCallerSavedCodeData {
1539   int reg_code[kNumJSCallerSaved];
1540 };
1541 
1542 JSCallerSavedCodeData caller_saved_code_data;
1543 
SetUpJSCallerSavedCodeData()1544 void SetUpJSCallerSavedCodeData() {
1545   int i = 0;
1546   for (int r = 0; r < kNumRegs; r++)
1547     if ((kJSCallerSaved & (1 << r)) != 0)
1548       caller_saved_code_data.reg_code[i++] = r;
1549 
1550   DCHECK(i == kNumJSCallerSaved);
1551 }
1552 
1553 
JSCallerSavedCode(int n)1554 int JSCallerSavedCode(int n) {
1555   DCHECK(0 <= n && n < kNumJSCallerSaved);
1556   return caller_saved_code_data.reg_code[n];
1557 }
1558 
1559 
1560 #define DEFINE_WRAPPER(type, field)                              \
1561 class field##_Wrapper : public ZoneObject {                      \
1562  public:  /* NOLINT */                                           \
1563   field##_Wrapper(const field& original) : frame_(original) {    \
1564   }                                                              \
1565   field frame_;                                                  \
1566 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1567 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1568 #undef DEFINE_WRAPPER
1569 
1570 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1571 #define FRAME_TYPE_CASE(type, field) \
1572   case StackFrame::type: { \
1573     field##_Wrapper* wrapper = \
1574         new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1575     return &wrapper->frame_; \
1576   }
1577 
1578   switch (frame->type()) {
1579     STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1580     default: UNREACHABLE();
1581   }
1582 #undef FRAME_TYPE_CASE
1583   return NULL;
1584 }
1585 
1586 
CreateStackMap(Isolate * isolate,Zone * zone)1587 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1588   ZoneList<StackFrame*> list(10, zone);
1589   for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1590     StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1591     list.Add(frame, zone);
1592   }
1593   return list.ToVector();
1594 }
1595 
1596 
1597 }  // namespace internal
1598 }  // namespace v8
1599