1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/ast.h"
8 #include "src/base/bits.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames-inl.h"
11 #include "src/full-codegen.h"
12 #include "src/heap/mark-compact.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopeinfo.h"
15 #include "src/string-stream.h"
16 #include "src/vm-state-inl.h"
17
18 namespace v8 {
19 namespace internal {
20
21
22 ReturnAddressLocationResolver
23 StackFrame::return_address_location_resolver_ = NULL;
24
25
26 // Iterator that supports traversing the stack handlers of a
27 // particular frame. Needs to know the top of the handler chain.
28 class StackHandlerIterator BASE_EMBEDDED {
29 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)30 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
31 : limit_(frame->fp()), handler_(handler) {
32 // Make sure the handler has already been unwound to this frame.
33 DCHECK(frame->sp() <= handler->address());
34 }
35
handler() const36 StackHandler* handler() const { return handler_; }
37
done()38 bool done() {
39 return handler_ == NULL || handler_->address() > limit_;
40 }
Advance()41 void Advance() {
42 DCHECK(!done());
43 handler_ = handler_->next();
44 }
45
46 private:
47 const Address limit_;
48 StackHandler* handler_;
49 };
50
51
52 // -------------------------------------------------------------------------
53
54
55 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)56 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
57 bool can_access_heap_objects)
58 : isolate_(isolate),
59 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
60 frame_(NULL), handler_(NULL),
61 can_access_heap_objects_(can_access_heap_objects) {
62 }
63 #undef INITIALIZE_SINGLETON
64
65
StackFrameIterator(Isolate * isolate)66 StackFrameIterator::StackFrameIterator(Isolate* isolate)
67 : StackFrameIteratorBase(isolate, true) {
68 Reset(isolate->thread_local_top());
69 }
70
71
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)72 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
73 : StackFrameIteratorBase(isolate, true) {
74 Reset(t);
75 }
76
77
Advance()78 void StackFrameIterator::Advance() {
79 DCHECK(!done());
80 // Compute the state of the calling frame before restoring
81 // callee-saved registers and unwinding handlers. This allows the
82 // frame code that computes the caller state to access the top
83 // handler and the value of any callee-saved register if needed.
84 StackFrame::State state;
85 StackFrame::Type type = frame_->GetCallerState(&state);
86
87 // Unwind handlers corresponding to the current frame.
88 StackHandlerIterator it(frame_, handler_);
89 while (!it.done()) it.Advance();
90 handler_ = it.handler();
91
92 // Advance to the calling frame.
93 frame_ = SingletonFor(type, &state);
94
95 // When we're done iterating over the stack frames, the handler
96 // chain must have been completely unwound.
97 DCHECK(!done() || handler_ == NULL);
98 }
99
100
Reset(ThreadLocalTop * top)101 void StackFrameIterator::Reset(ThreadLocalTop* top) {
102 StackFrame::State state;
103 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
104 Isolate::c_entry_fp(top), &state);
105 handler_ = StackHandler::FromAddress(Isolate::handler(top));
106 if (SingletonFor(type) == NULL) return;
107 frame_ = SingletonFor(type, &state);
108 }
109
110
SingletonFor(StackFrame::Type type,StackFrame::State * state)111 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
112 StackFrame::State* state) {
113 if (type == StackFrame::NONE) return NULL;
114 StackFrame* result = SingletonFor(type);
115 DCHECK(result != NULL);
116 result->state_ = *state;
117 return result;
118 }
119
120
SingletonFor(StackFrame::Type type)121 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
122 #define FRAME_TYPE_CASE(type, field) \
123 case StackFrame::type: result = &field##_; break;
124
125 StackFrame* result = NULL;
126 switch (type) {
127 case StackFrame::NONE: return NULL;
128 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
129 default: break;
130 }
131 return result;
132
133 #undef FRAME_TYPE_CASE
134 }
135
136
137 // -------------------------------------------------------------------------
138
139
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)140 JavaScriptFrameIterator::JavaScriptFrameIterator(
141 Isolate* isolate, StackFrame::Id id)
142 : iterator_(isolate) {
143 while (!done()) {
144 Advance();
145 if (frame()->id() == id) return;
146 }
147 }
148
149
Advance()150 void JavaScriptFrameIterator::Advance() {
151 do {
152 iterator_.Advance();
153 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
154 }
155
156
AdvanceToArgumentsFrame()157 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
158 if (!frame()->has_adapted_arguments()) return;
159 iterator_.Advance();
160 DCHECK(iterator_.frame()->is_arguments_adaptor());
161 }
162
163
164 // -------------------------------------------------------------------------
165
166
StackTraceFrameIterator(Isolate * isolate)167 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
168 : JavaScriptFrameIterator(isolate) {
169 if (!done() && !IsValidFrame()) Advance();
170 }
171
172
Advance()173 void StackTraceFrameIterator::Advance() {
174 while (true) {
175 JavaScriptFrameIterator::Advance();
176 if (done()) return;
177 if (IsValidFrame()) return;
178 }
179 }
180
181
IsValidFrame()182 bool StackTraceFrameIterator::IsValidFrame() {
183 if (!frame()->function()->IsJSFunction()) return false;
184 Object* script = frame()->function()->shared()->script();
185 // Don't show functions from native scripts to user.
186 return (script->IsScript() &&
187 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
188 }
189
190
191 // -------------------------------------------------------------------------
192
193
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)194 SafeStackFrameIterator::SafeStackFrameIterator(
195 Isolate* isolate,
196 Address fp, Address sp, Address js_entry_sp)
197 : StackFrameIteratorBase(isolate, false),
198 low_bound_(sp),
199 high_bound_(js_entry_sp),
200 top_frame_type_(StackFrame::NONE),
201 external_callback_scope_(isolate->external_callback_scope()) {
202 StackFrame::State state;
203 StackFrame::Type type;
204 ThreadLocalTop* top = isolate->thread_local_top();
205 if (IsValidTop(top)) {
206 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
207 top_frame_type_ = type;
208 } else if (IsValidStackAddress(fp)) {
209 DCHECK(fp != NULL);
210 state.fp = fp;
211 state.sp = sp;
212 state.pc_address = StackFrame::ResolveReturnAddressLocation(
213 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
214 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
215 // we check only that kMarkerOffset is within the stack bounds and do
216 // compile time check that kContextOffset slot is pushed on the stack before
217 // kMarkerOffset.
218 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
219 StandardFrameConstants::kContextOffset);
220 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
221 if (IsValidStackAddress(frame_marker)) {
222 type = StackFrame::ComputeType(this, &state);
223 top_frame_type_ = type;
224 } else {
225 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
226 // The frame anyways will be skipped.
227 type = StackFrame::JAVA_SCRIPT;
228 // Top frame is incomplete so we cannot reliably determine its type.
229 top_frame_type_ = StackFrame::NONE;
230 }
231 } else {
232 return;
233 }
234 if (SingletonFor(type) == NULL) return;
235 frame_ = SingletonFor(type, &state);
236 if (frame_ == NULL) return;
237
238 Advance();
239
240 if (frame_ != NULL && !frame_->is_exit() &&
241 external_callback_scope_ != NULL &&
242 external_callback_scope_->scope_address() < frame_->fp()) {
243 // Skip top ExternalCallbackScope if we already advanced to a JS frame
244 // under it. Sampler will anyways take this top external callback.
245 external_callback_scope_ = external_callback_scope_->previous();
246 }
247 }
248
249
IsValidTop(ThreadLocalTop * top) const250 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
251 Address c_entry_fp = Isolate::c_entry_fp(top);
252 if (!IsValidExitFrame(c_entry_fp)) return false;
253 // There should be at least one JS_ENTRY stack handler.
254 Address handler = Isolate::handler(top);
255 if (handler == NULL) return false;
256 // Check that there are no js frames on top of the native frames.
257 return c_entry_fp < handler;
258 }
259
260
AdvanceOneFrame()261 void SafeStackFrameIterator::AdvanceOneFrame() {
262 DCHECK(!done());
263 StackFrame* last_frame = frame_;
264 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
265 // Before advancing to the next stack frame, perform pointer validity tests.
266 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
267 frame_ = NULL;
268 return;
269 }
270
271 // Advance to the previous frame.
272 StackFrame::State state;
273 StackFrame::Type type = frame_->GetCallerState(&state);
274 frame_ = SingletonFor(type, &state);
275 if (frame_ == NULL) return;
276
277 // Check that we have actually moved to the previous frame in the stack.
278 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
279 frame_ = NULL;
280 }
281 }
282
283
IsValidFrame(StackFrame * frame) const284 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
285 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
286 }
287
288
IsValidCaller(StackFrame * frame)289 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
290 StackFrame::State state;
291 if (frame->is_entry() || frame->is_entry_construct()) {
292 // See EntryFrame::GetCallerState. It computes the caller FP address
293 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
294 // sure that caller FP address is valid.
295 Address caller_fp = Memory::Address_at(
296 frame->fp() + EntryFrameConstants::kCallerFPOffset);
297 if (!IsValidExitFrame(caller_fp)) return false;
298 } else if (frame->is_arguments_adaptor()) {
299 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
300 // the number of arguments is stored on stack as Smi. We need to check
301 // that it really an Smi.
302 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
303 GetExpression(0);
304 if (!number_of_args->IsSmi()) {
305 return false;
306 }
307 }
308 frame->ComputeCallerState(&state);
309 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
310 SingletonFor(frame->GetCallerState(&state)) != NULL;
311 }
312
313
IsValidExitFrame(Address fp) const314 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
315 if (!IsValidStackAddress(fp)) return false;
316 Address sp = ExitFrame::ComputeStackPointer(fp);
317 if (!IsValidStackAddress(sp)) return false;
318 StackFrame::State state;
319 ExitFrame::FillState(fp, sp, &state);
320 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
321 return false;
322 }
323 return *state.pc_address != NULL;
324 }
325
326
Advance()327 void SafeStackFrameIterator::Advance() {
328 while (true) {
329 AdvanceOneFrame();
330 if (done()) return;
331 if (frame_->is_java_script()) return;
332 if (frame_->is_exit() && external_callback_scope_) {
333 // Some of the EXIT frames may have ExternalCallbackScope allocated on
334 // top of them. In that case the scope corresponds to the first EXIT
335 // frame beneath it. There may be other EXIT frames on top of the
336 // ExternalCallbackScope, just skip them as we cannot collect any useful
337 // information about them.
338 if (external_callback_scope_->scope_address() < frame_->fp()) {
339 Address* callback_address =
340 external_callback_scope_->callback_address();
341 if (*callback_address != NULL) {
342 frame_->state_.pc_address = callback_address;
343 }
344 external_callback_scope_ = external_callback_scope_->previous();
345 DCHECK(external_callback_scope_ == NULL ||
346 external_callback_scope_->scope_address() > frame_->fp());
347 return;
348 }
349 }
350 }
351 }
352
353
354 // -------------------------------------------------------------------------
355
356
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)357 Code* StackFrame::GetSafepointData(Isolate* isolate,
358 Address inner_pointer,
359 SafepointEntry* safepoint_entry,
360 unsigned* stack_slots) {
361 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
362 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
363 if (!entry->safepoint_entry.is_valid()) {
364 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
365 DCHECK(entry->safepoint_entry.is_valid());
366 } else {
367 DCHECK(entry->safepoint_entry.Equals(
368 entry->code->GetSafepointEntry(inner_pointer)));
369 }
370
371 // Fill in the results and return the code.
372 Code* code = entry->code;
373 *safepoint_entry = entry->safepoint_entry;
374 *stack_slots = code->stack_slots();
375 return code;
376 }
377
378
HasHandler() const379 bool StackFrame::HasHandler() const {
380 StackHandlerIterator it(this, top_handler());
381 return !it.done();
382 }
383
384
385 #ifdef DEBUG
386 static bool GcSafeCodeContains(HeapObject* object, Address addr);
387 #endif
388
389
IteratePc(ObjectVisitor * v,Address * pc_address,Code * holder)390 void StackFrame::IteratePc(ObjectVisitor* v,
391 Address* pc_address,
392 Code* holder) {
393 Address pc = *pc_address;
394 DCHECK(GcSafeCodeContains(holder, pc));
395 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
396 Object* code = holder;
397 v->VisitPointer(&code);
398 if (code != holder) {
399 holder = reinterpret_cast<Code*>(code);
400 pc = holder->instruction_start() + pc_offset;
401 *pc_address = pc;
402 }
403 }
404
405
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)406 void StackFrame::SetReturnAddressLocationResolver(
407 ReturnAddressLocationResolver resolver) {
408 DCHECK(return_address_location_resolver_ == NULL);
409 return_address_location_resolver_ = resolver;
410 }
411
412
ComputeType(const StackFrameIteratorBase * iterator,State * state)413 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
414 State* state) {
415 DCHECK(state->fp != NULL);
416 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
417 return ARGUMENTS_ADAPTOR;
418 }
419 // The marker and function offsets overlap. If the marker isn't a
420 // smi then the frame is a JavaScript frame -- and the marker is
421 // really the function.
422 const int offset = StandardFrameConstants::kMarkerOffset;
423 Object* marker = Memory::Object_at(state->fp + offset);
424 if (!marker->IsSmi()) {
425 // If we're using a "safe" stack iterator, we treat optimized
426 // frames as normal JavaScript frames to avoid having to look
427 // into the heap to determine the state. This is safe as long
428 // as nobody tries to GC...
429 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
430 Code::Kind kind = GetContainingCode(iterator->isolate(),
431 *(state->pc_address))->kind();
432 DCHECK(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
433 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
434 }
435 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
436 }
437
438
439 #ifdef DEBUG
can_access_heap_objects() const440 bool StackFrame::can_access_heap_objects() const {
441 return iterator_->can_access_heap_objects_;
442 }
443 #endif
444
445
GetCallerState(State * state) const446 StackFrame::Type StackFrame::GetCallerState(State* state) const {
447 ComputeCallerState(state);
448 return ComputeType(iterator_, state);
449 }
450
451
UnpaddedFP() const452 Address StackFrame::UnpaddedFP() const {
453 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
454 if (!is_optimized()) return fp();
455 int32_t alignment_state = Memory::int32_at(
456 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
457
458 return (alignment_state == kAlignmentPaddingPushed) ?
459 (fp() + kPointerSize) : fp();
460 #else
461 return fp();
462 #endif
463 }
464
465
unchecked_code() const466 Code* EntryFrame::unchecked_code() const {
467 return isolate()->heap()->js_entry_code();
468 }
469
470
ComputeCallerState(State * state) const471 void EntryFrame::ComputeCallerState(State* state) const {
472 GetCallerState(state);
473 }
474
475
SetCallerFp(Address caller_fp)476 void EntryFrame::SetCallerFp(Address caller_fp) {
477 const int offset = EntryFrameConstants::kCallerFPOffset;
478 Memory::Address_at(this->fp() + offset) = caller_fp;
479 }
480
481
GetCallerState(State * state) const482 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
483 const int offset = EntryFrameConstants::kCallerFPOffset;
484 Address fp = Memory::Address_at(this->fp() + offset);
485 return ExitFrame::GetStateForFramePointer(fp, state);
486 }
487
488
unchecked_code() const489 Code* EntryConstructFrame::unchecked_code() const {
490 return isolate()->heap()->js_construct_entry_code();
491 }
492
493
code_slot() const494 Object*& ExitFrame::code_slot() const {
495 const int offset = ExitFrameConstants::kCodeOffset;
496 return Memory::Object_at(fp() + offset);
497 }
498
499
unchecked_code() const500 Code* ExitFrame::unchecked_code() const {
501 return reinterpret_cast<Code*>(code_slot());
502 }
503
504
ComputeCallerState(State * state) const505 void ExitFrame::ComputeCallerState(State* state) const {
506 // Set up the caller state.
507 state->sp = caller_sp();
508 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
509 state->pc_address = ResolveReturnAddressLocation(
510 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
511 if (FLAG_enable_ool_constant_pool) {
512 state->constant_pool_address = reinterpret_cast<Address*>(
513 fp() + ExitFrameConstants::kConstantPoolOffset);
514 }
515 }
516
517
SetCallerFp(Address caller_fp)518 void ExitFrame::SetCallerFp(Address caller_fp) {
519 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
520 }
521
522
Iterate(ObjectVisitor * v) const523 void ExitFrame::Iterate(ObjectVisitor* v) const {
524 // The arguments are traversed as part of the expression stack of
525 // the calling frame.
526 IteratePc(v, pc_address(), LookupCode());
527 v->VisitPointer(&code_slot());
528 if (FLAG_enable_ool_constant_pool) {
529 v->VisitPointer(&constant_pool_slot());
530 }
531 }
532
533
GetCallerStackPointer() const534 Address ExitFrame::GetCallerStackPointer() const {
535 return fp() + ExitFrameConstants::kCallerSPDisplacement;
536 }
537
538
GetStateForFramePointer(Address fp,State * state)539 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
540 if (fp == 0) return NONE;
541 Address sp = ComputeStackPointer(fp);
542 FillState(fp, sp, state);
543 DCHECK(*state->pc_address != NULL);
544 return EXIT;
545 }
546
547
ComputeStackPointer(Address fp)548 Address ExitFrame::ComputeStackPointer(Address fp) {
549 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
550 }
551
552
FillState(Address fp,Address sp,State * state)553 void ExitFrame::FillState(Address fp, Address sp, State* state) {
554 state->sp = sp;
555 state->fp = fp;
556 state->pc_address = ResolveReturnAddressLocation(
557 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
558 state->constant_pool_address =
559 reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
560 }
561
562
GetExpressionAddress(int n) const563 Address StandardFrame::GetExpressionAddress(int n) const {
564 const int offset = StandardFrameConstants::kExpressionsOffset;
565 return fp() + offset - n * kPointerSize;
566 }
567
568
GetExpression(Address fp,int index)569 Object* StandardFrame::GetExpression(Address fp, int index) {
570 return Memory::Object_at(GetExpressionAddress(fp, index));
571 }
572
573
GetExpressionAddress(Address fp,int n)574 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
575 const int offset = StandardFrameConstants::kExpressionsOffset;
576 return fp + offset - n * kPointerSize;
577 }
578
579
ComputeExpressionsCount() const580 int StandardFrame::ComputeExpressionsCount() const {
581 const int offset =
582 StandardFrameConstants::kExpressionsOffset + kPointerSize;
583 Address base = fp() + offset;
584 Address limit = sp();
585 DCHECK(base >= limit); // stack grows downwards
586 // Include register-allocated locals in number of expressions.
587 return static_cast<int>((base - limit) / kPointerSize);
588 }
589
590
ComputeCallerState(State * state) const591 void StandardFrame::ComputeCallerState(State* state) const {
592 state->sp = caller_sp();
593 state->fp = caller_fp();
594 state->pc_address = ResolveReturnAddressLocation(
595 reinterpret_cast<Address*>(ComputePCAddress(fp())));
596 state->constant_pool_address =
597 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
598 }
599
600
SetCallerFp(Address caller_fp)601 void StandardFrame::SetCallerFp(Address caller_fp) {
602 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
603 caller_fp;
604 }
605
606
IsExpressionInsideHandler(int n) const607 bool StandardFrame::IsExpressionInsideHandler(int n) const {
608 Address address = GetExpressionAddress(n);
609 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
610 if (it.handler()->includes(address)) return true;
611 }
612 return false;
613 }
614
615
IterateCompiledFrame(ObjectVisitor * v) const616 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
617 // Make sure that we're not doing "safe" stack frame iteration. We cannot
618 // possibly find pointers in optimized frames in that state.
619 DCHECK(can_access_heap_objects());
620
621 // Compute the safepoint information.
622 unsigned stack_slots = 0;
623 SafepointEntry safepoint_entry;
624 Code* code = StackFrame::GetSafepointData(
625 isolate(), pc(), &safepoint_entry, &stack_slots);
626 unsigned slot_space = stack_slots * kPointerSize;
627
628 // Visit the outgoing parameters.
629 Object** parameters_base = &Memory::Object_at(sp());
630 Object** parameters_limit = &Memory::Object_at(
631 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
632
633 // Visit the parameters that may be on top of the saved registers.
634 if (safepoint_entry.argument_count() > 0) {
635 v->VisitPointers(parameters_base,
636 parameters_base + safepoint_entry.argument_count());
637 parameters_base += safepoint_entry.argument_count();
638 }
639
640 // Skip saved double registers.
641 if (safepoint_entry.has_doubles()) {
642 // Number of doubles not known at snapshot time.
643 DCHECK(!isolate()->serializer_enabled());
644 parameters_base += DoubleRegister::NumAllocatableRegisters() *
645 kDoubleSize / kPointerSize;
646 }
647
648 // Visit the registers that contain pointers if any.
649 if (safepoint_entry.HasRegisters()) {
650 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
651 if (safepoint_entry.HasRegisterAt(i)) {
652 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
653 v->VisitPointer(parameters_base + reg_stack_index);
654 }
655 }
656 // Skip the words containing the register values.
657 parameters_base += kNumSafepointRegisters;
658 }
659
660 // We're done dealing with the register bits.
661 uint8_t* safepoint_bits = safepoint_entry.bits();
662 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
663
664 // Visit the rest of the parameters.
665 v->VisitPointers(parameters_base, parameters_limit);
666
667 // Visit pointer spill slots and locals.
668 for (unsigned index = 0; index < stack_slots; index++) {
669 int byte_index = index >> kBitsPerByteLog2;
670 int bit_index = index & (kBitsPerByte - 1);
671 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
672 v->VisitPointer(parameters_limit + index);
673 }
674 }
675
676 // Visit the return address in the callee and incoming arguments.
677 IteratePc(v, pc_address(), code);
678
679 // Visit the context in stub frame and JavaScript frame.
680 // Visit the function in JavaScript frame.
681 Object** fixed_base = &Memory::Object_at(
682 fp() + StandardFrameConstants::kMarkerOffset);
683 Object** fixed_limit = &Memory::Object_at(fp());
684 v->VisitPointers(fixed_base, fixed_limit);
685 }
686
687
Iterate(ObjectVisitor * v) const688 void StubFrame::Iterate(ObjectVisitor* v) const {
689 IterateCompiledFrame(v);
690 }
691
692
unchecked_code() const693 Code* StubFrame::unchecked_code() const {
694 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
695 }
696
697
GetCallerStackPointer() const698 Address StubFrame::GetCallerStackPointer() const {
699 return fp() + ExitFrameConstants::kCallerSPDisplacement;
700 }
701
702
GetNumberOfIncomingArguments() const703 int StubFrame::GetNumberOfIncomingArguments() const {
704 return 0;
705 }
706
707
Iterate(ObjectVisitor * v) const708 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
709 #ifdef DEBUG
710 // Make sure that optimized frames do not contain any stack handlers.
711 StackHandlerIterator it(this, top_handler());
712 DCHECK(it.done());
713 #endif
714
715 IterateCompiledFrame(v);
716 }
717
718
SetParameterValue(int index,Object * value) const719 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
720 Memory::Object_at(GetParameterSlot(index)) = value;
721 }
722
723
IsConstructor() const724 bool JavaScriptFrame::IsConstructor() const {
725 Address fp = caller_fp();
726 if (has_adapted_arguments()) {
727 // Skip the arguments adaptor frame and look at the real caller.
728 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
729 }
730 return IsConstructFrame(fp);
731 }
732
733
GetArgumentsLength() const734 int JavaScriptFrame::GetArgumentsLength() const {
735 // If there is an arguments adaptor frame get the arguments length from it.
736 if (has_adapted_arguments()) {
737 return Smi::cast(GetExpression(caller_fp(), 0))->value();
738 } else {
739 return GetNumberOfIncomingArguments();
740 }
741 }
742
743
unchecked_code() const744 Code* JavaScriptFrame::unchecked_code() const {
745 return function()->code();
746 }
747
748
GetNumberOfIncomingArguments() const749 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
750 DCHECK(can_access_heap_objects() &&
751 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
752
753 return function()->shared()->formal_parameter_count();
754 }
755
756
GetCallerStackPointer() const757 Address JavaScriptFrame::GetCallerStackPointer() const {
758 return fp() + StandardFrameConstants::kCallerSPOffset;
759 }
760
761
GetFunctions(List<JSFunction * > * functions)762 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
763 DCHECK(functions->length() == 0);
764 functions->Add(function());
765 }
766
767
Summarize(List<FrameSummary> * functions)768 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
769 DCHECK(functions->length() == 0);
770 Code* code_pointer = LookupCode();
771 int offset = static_cast<int>(pc() - code_pointer->address());
772 FrameSummary summary(receiver(),
773 function(),
774 code_pointer,
775 offset,
776 IsConstructor());
777 functions->Add(summary);
778 }
779
780
PrintFunctionAndOffset(JSFunction * function,Code * code,Address pc,FILE * file,bool print_line_number)781 void JavaScriptFrame::PrintFunctionAndOffset(JSFunction* function, Code* code,
782 Address pc, FILE* file,
783 bool print_line_number) {
784 PrintF(file, "%s", function->IsOptimized() ? "*" : "~");
785 function->PrintName(file);
786 int code_offset = static_cast<int>(pc - code->instruction_start());
787 PrintF(file, "+%d", code_offset);
788 if (print_line_number) {
789 SharedFunctionInfo* shared = function->shared();
790 int source_pos = code->SourcePosition(pc);
791 Object* maybe_script = shared->script();
792 if (maybe_script->IsScript()) {
793 Script* script = Script::cast(maybe_script);
794 int line = script->GetLineNumber(source_pos) + 1;
795 Object* script_name_raw = script->name();
796 if (script_name_raw->IsString()) {
797 String* script_name = String::cast(script->name());
798 SmartArrayPointer<char> c_script_name =
799 script_name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
800 PrintF(file, " at %s:%d", c_script_name.get(), line);
801 } else {
802 PrintF(file, " at <unknown>:%d", line);
803 }
804 } else {
805 PrintF(file, " at <unknown>:<unknown>");
806 }
807 }
808 }
809
810
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)811 void JavaScriptFrame::PrintTop(Isolate* isolate, FILE* file, bool print_args,
812 bool print_line_number) {
813 // constructor calls
814 DisallowHeapAllocation no_allocation;
815 JavaScriptFrameIterator it(isolate);
816 while (!it.done()) {
817 if (it.frame()->is_java_script()) {
818 JavaScriptFrame* frame = it.frame();
819 if (frame->IsConstructor()) PrintF(file, "new ");
820 PrintFunctionAndOffset(frame->function(), frame->unchecked_code(),
821 frame->pc(), file, print_line_number);
822 if (print_args) {
823 // function arguments
824 // (we are intentionally only printing the actually
825 // supplied parameters, not all parameters required)
826 PrintF(file, "(this=");
827 frame->receiver()->ShortPrint(file);
828 const int length = frame->ComputeParametersCount();
829 for (int i = 0; i < length; i++) {
830 PrintF(file, ", ");
831 frame->GetParameter(i)->ShortPrint(file);
832 }
833 PrintF(file, ")");
834 }
835 break;
836 }
837 it.Advance();
838 }
839 }
840
841
SaveOperandStack(FixedArray * store,int * stack_handler_index) const842 void JavaScriptFrame::SaveOperandStack(FixedArray* store,
843 int* stack_handler_index) const {
844 int operands_count = store->length();
845 DCHECK_LE(operands_count, ComputeOperandsCount());
846
847 // Visit the stack in LIFO order, saving operands and stack handlers into the
848 // array. The saved stack handlers store a link to the next stack handler,
849 // which will allow RestoreOperandStack to rewind the handlers.
850 StackHandlerIterator it(this, top_handler());
851 int i = operands_count - 1;
852 *stack_handler_index = -1;
853 for (; !it.done(); it.Advance()) {
854 StackHandler* handler = it.handler();
855 // Save operands pushed after the handler was pushed.
856 for (; GetOperandSlot(i) < handler->address(); i--) {
857 store->set(i, GetOperand(i));
858 }
859 DCHECK_GE(i + 1, StackHandlerConstants::kSlotCount);
860 DCHECK_EQ(handler->address(), GetOperandSlot(i));
861 int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
862 handler->Unwind(isolate(), store, next_stack_handler_index,
863 *stack_handler_index);
864 *stack_handler_index = next_stack_handler_index;
865 i -= StackHandlerConstants::kSlotCount;
866 }
867
868 // Save any remaining operands.
869 for (; i >= 0; i--) {
870 store->set(i, GetOperand(i));
871 }
872 }
873
874
RestoreOperandStack(FixedArray * store,int stack_handler_index)875 void JavaScriptFrame::RestoreOperandStack(FixedArray* store,
876 int stack_handler_index) {
877 int operands_count = store->length();
878 DCHECK_LE(operands_count, ComputeOperandsCount());
879 int i = 0;
880 while (i <= stack_handler_index) {
881 if (i < stack_handler_index) {
882 // An operand.
883 DCHECK_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
884 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
885 i++;
886 } else {
887 // A stack handler.
888 DCHECK_EQ(i, stack_handler_index);
889 // The FixedArray store grows up. The stack grows down. So the operand
890 // slot for i actually points to the bottom of the top word in the
891 // handler. The base of the StackHandler* is the address of the bottom
892 // word, which will be the last slot that is in the handler.
893 int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
894 StackHandler *handler =
895 StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
896 stack_handler_index = handler->Rewind(isolate(), store, i, fp());
897 i += StackHandlerConstants::kSlotCount;
898 }
899 }
900
901 for (; i < operands_count; i++) {
902 DCHECK_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
903 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
904 }
905 }
906
907
Print()908 void FrameSummary::Print() {
909 PrintF("receiver: ");
910 receiver_->ShortPrint();
911 PrintF("\nfunction: ");
912 function_->shared()->DebugName()->ShortPrint();
913 PrintF("\ncode: ");
914 code_->ShortPrint();
915 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
916 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
917 PrintF("\npc: %d\n", offset_);
918 }
919
920
LiteralAt(FixedArray * literal_array,int literal_id)921 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
922 int literal_id) {
923 if (literal_id == Translation::kSelfLiteralId) {
924 return function();
925 }
926
927 return JSFunction::cast(literal_array->get(literal_id));
928 }
929
930
Summarize(List<FrameSummary> * frames)931 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
932 DCHECK(frames->length() == 0);
933 DCHECK(is_optimized());
934
935 // Delegate to JS frame in absence of turbofan deoptimization.
936 // TODO(turbofan): Revisit once we support deoptimization across the board.
937 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
938 return JavaScriptFrame::Summarize(frames);
939 }
940
941 int deopt_index = Safepoint::kNoDeoptimizationIndex;
942 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
943 FixedArray* literal_array = data->LiteralArray();
944
945 // BUG(3243555): Since we don't have a lazy-deopt registered at
946 // throw-statements, we can't use the translation at the call-site of
947 // throw. An entry with no deoptimization index indicates a call-site
948 // without a lazy-deopt. As a consequence we are not allowed to inline
949 // functions containing throw.
950 DCHECK(deopt_index != Safepoint::kNoDeoptimizationIndex);
951
952 TranslationIterator it(data->TranslationByteArray(),
953 data->TranslationIndex(deopt_index)->value());
954 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
955 DCHECK(opcode == Translation::BEGIN);
956 it.Next(); // Drop frame count.
957 int jsframe_count = it.Next();
958
959 // We create the summary in reverse order because the frames
960 // in the deoptimization translation are ordered bottom-to-top.
961 bool is_constructor = IsConstructor();
962 int i = jsframe_count;
963 while (i > 0) {
964 opcode = static_cast<Translation::Opcode>(it.Next());
965 if (opcode == Translation::JS_FRAME) {
966 i--;
967 BailoutId ast_id = BailoutId(it.Next());
968 JSFunction* function = LiteralAt(literal_array, it.Next());
969 it.Next(); // Skip height.
970
971 // The translation commands are ordered and the receiver is always
972 // at the first position.
973 // If we are at a call, the receiver is always in a stack slot.
974 // Otherwise we are not guaranteed to get the receiver value.
975 opcode = static_cast<Translation::Opcode>(it.Next());
976 int index = it.Next();
977
978 // Get the correct receiver in the optimized frame.
979 Object* receiver = NULL;
980 if (opcode == Translation::LITERAL) {
981 receiver = data->LiteralArray()->get(index);
982 } else if (opcode == Translation::STACK_SLOT) {
983 // Positive index means the value is spilled to the locals
984 // area. Negative means it is stored in the incoming parameter
985 // area.
986 if (index >= 0) {
987 receiver = GetExpression(index);
988 } else {
989 // Index -1 overlaps with last parameter, -n with the first parameter,
990 // (-n - 1) with the receiver with n being the number of parameters
991 // of the outermost, optimized frame.
992 int parameter_count = ComputeParametersCount();
993 int parameter_index = index + parameter_count;
994 receiver = (parameter_index == -1)
995 ? this->receiver()
996 : this->GetParameter(parameter_index);
997 }
998 } else {
999 // The receiver is not in a stack slot nor in a literal. We give up.
1000 // TODO(3029): Materializing a captured object (or duplicated
1001 // object) is hard, we return undefined for now. This breaks the
1002 // produced stack trace, as constructor frames aren't marked as
1003 // such anymore.
1004 receiver = isolate()->heap()->undefined_value();
1005 }
1006
1007 Code* code = function->shared()->code();
1008 DeoptimizationOutputData* output_data =
1009 DeoptimizationOutputData::cast(code->deoptimization_data());
1010 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
1011 ast_id,
1012 function->shared());
1013 unsigned pc_offset =
1014 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
1015 DCHECK(pc_offset > 0);
1016
1017 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1018 frames->Add(summary);
1019 is_constructor = false;
1020 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
1021 // The next encountered JS_FRAME will be marked as a constructor call.
1022 it.Skip(Translation::NumberOfOperandsFor(opcode));
1023 DCHECK(!is_constructor);
1024 is_constructor = true;
1025 } else {
1026 // Skip over operands to advance to the next opcode.
1027 it.Skip(Translation::NumberOfOperandsFor(opcode));
1028 }
1029 }
1030 DCHECK(!is_constructor);
1031 }
1032
1033
GetDeoptimizationData(int * deopt_index)1034 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1035 int* deopt_index) {
1036 DCHECK(is_optimized());
1037
1038 JSFunction* opt_function = function();
1039 Code* code = opt_function->code();
1040
1041 // The code object may have been replaced by lazy deoptimization. Fall
1042 // back to a slow search in this case to find the original optimized
1043 // code object.
1044 if (!code->contains(pc())) {
1045 code = isolate()->inner_pointer_to_code_cache()->
1046 GcSafeFindCodeForInnerPointer(pc());
1047 }
1048 DCHECK(code != NULL);
1049 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1050
1051 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1052 *deopt_index = safepoint_entry.deoptimization_index();
1053 DCHECK(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1054
1055 return DeoptimizationInputData::cast(code->deoptimization_data());
1056 }
1057
1058
GetInlineCount()1059 int OptimizedFrame::GetInlineCount() {
1060 DCHECK(is_optimized());
1061
1062 // Delegate to JS frame in absence of turbofan deoptimization.
1063 // TODO(turbofan): Revisit once we support deoptimization across the board.
1064 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
1065 return JavaScriptFrame::GetInlineCount();
1066 }
1067
1068 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1069 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1070
1071 TranslationIterator it(data->TranslationByteArray(),
1072 data->TranslationIndex(deopt_index)->value());
1073 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1074 DCHECK(opcode == Translation::BEGIN);
1075 USE(opcode);
1076 it.Next(); // Drop frame count.
1077 int jsframe_count = it.Next();
1078 return jsframe_count;
1079 }
1080
1081
GetFunctions(List<JSFunction * > * functions)1082 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
1083 DCHECK(functions->length() == 0);
1084 DCHECK(is_optimized());
1085
1086 // Delegate to JS frame in absence of turbofan deoptimization.
1087 // TODO(turbofan): Revisit once we support deoptimization across the board.
1088 if (LookupCode()->is_turbofanned() && !FLAG_turbo_deoptimization) {
1089 return JavaScriptFrame::GetFunctions(functions);
1090 }
1091
1092 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1093 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1094 FixedArray* literal_array = data->LiteralArray();
1095
1096 TranslationIterator it(data->TranslationByteArray(),
1097 data->TranslationIndex(deopt_index)->value());
1098 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1099 DCHECK(opcode == Translation::BEGIN);
1100 it.Next(); // Drop frame count.
1101 int jsframe_count = it.Next();
1102
1103 // We insert the frames in reverse order because the frames
1104 // in the deoptimization translation are ordered bottom-to-top.
1105 while (jsframe_count > 0) {
1106 opcode = static_cast<Translation::Opcode>(it.Next());
1107 if (opcode == Translation::JS_FRAME) {
1108 jsframe_count--;
1109 it.Next(); // Skip ast id.
1110 JSFunction* function = LiteralAt(literal_array, it.Next());
1111 it.Next(); // Skip height.
1112 functions->Add(function);
1113 } else {
1114 // Skip over operands to advance to the next opcode.
1115 it.Skip(Translation::NumberOfOperandsFor(opcode));
1116 }
1117 }
1118 }
1119
1120
GetNumberOfIncomingArguments() const1121 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1122 return Smi::cast(GetExpression(0))->value();
1123 }
1124
1125
GetCallerStackPointer() const1126 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1127 return fp() + StandardFrameConstants::kCallerSPOffset;
1128 }
1129
1130
GetCallerStackPointer() const1131 Address InternalFrame::GetCallerStackPointer() const {
1132 // Internal frames have no arguments. The stack pointer of the
1133 // caller is at a fixed offset from the frame pointer.
1134 return fp() + StandardFrameConstants::kCallerSPOffset;
1135 }
1136
1137
unchecked_code() const1138 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1139 return isolate()->builtins()->builtin(
1140 Builtins::kArgumentsAdaptorTrampoline);
1141 }
1142
1143
unchecked_code() const1144 Code* InternalFrame::unchecked_code() const {
1145 const int offset = InternalFrameConstants::kCodeOffset;
1146 Object* code = Memory::Object_at(fp() + offset);
1147 DCHECK(code != NULL);
1148 return reinterpret_cast<Code*>(code);
1149 }
1150
1151
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1152 void StackFrame::PrintIndex(StringStream* accumulator,
1153 PrintMode mode,
1154 int index) {
1155 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1156 }
1157
1158
Print(StringStream * accumulator,PrintMode mode,int index) const1159 void JavaScriptFrame::Print(StringStream* accumulator,
1160 PrintMode mode,
1161 int index) const {
1162 DisallowHeapAllocation no_gc;
1163 Object* receiver = this->receiver();
1164 JSFunction* function = this->function();
1165
1166 accumulator->PrintSecurityTokenIfChanged(function);
1167 PrintIndex(accumulator, mode, index);
1168 Code* code = NULL;
1169 if (IsConstructor()) accumulator->Add("new ");
1170 accumulator->PrintFunction(function, receiver, &code);
1171
1172 // Get scope information for nicer output, if possible. If code is NULL, or
1173 // doesn't contain scope info, scope_info will return 0 for the number of
1174 // parameters, stack local variables, context local variables, stack slots,
1175 // or context slots.
1176 SharedFunctionInfo* shared = function->shared();
1177 ScopeInfo* scope_info = shared->scope_info();
1178 Object* script_obj = shared->script();
1179 if (script_obj->IsScript()) {
1180 Script* script = Script::cast(script_obj);
1181 accumulator->Add(" [");
1182 accumulator->PrintName(script->name());
1183
1184 Address pc = this->pc();
1185 if (code != NULL && code->kind() == Code::FUNCTION &&
1186 pc >= code->instruction_start() && pc < code->instruction_end()) {
1187 int source_pos = code->SourcePosition(pc);
1188 int line = script->GetLineNumber(source_pos) + 1;
1189 accumulator->Add(":%d", line);
1190 } else {
1191 int function_start_pos = shared->start_position();
1192 int line = script->GetLineNumber(function_start_pos) + 1;
1193 accumulator->Add(":~%d", line);
1194 }
1195
1196 accumulator->Add("] ");
1197 }
1198
1199 accumulator->Add("(this=%o", receiver);
1200
1201 // Print the parameters.
1202 int parameters_count = ComputeParametersCount();
1203 for (int i = 0; i < parameters_count; i++) {
1204 accumulator->Add(",");
1205 // If we have a name for the parameter we print it. Nameless
1206 // parameters are either because we have more actual parameters
1207 // than formal parameters or because we have no scope information.
1208 if (i < scope_info->ParameterCount()) {
1209 accumulator->PrintName(scope_info->ParameterName(i));
1210 accumulator->Add("=");
1211 }
1212 accumulator->Add("%o", GetParameter(i));
1213 }
1214
1215 accumulator->Add(")");
1216 if (mode == OVERVIEW) {
1217 accumulator->Add("\n");
1218 return;
1219 }
1220 if (is_optimized()) {
1221 accumulator->Add(" {\n// optimized frame\n}\n");
1222 return;
1223 }
1224 accumulator->Add(" {\n");
1225
1226 // Compute the number of locals and expression stack elements.
1227 int stack_locals_count = scope_info->StackLocalCount();
1228 int heap_locals_count = scope_info->ContextLocalCount();
1229 int expressions_count = ComputeExpressionsCount();
1230
1231 // Print stack-allocated local variables.
1232 if (stack_locals_count > 0) {
1233 accumulator->Add(" // stack-allocated locals\n");
1234 }
1235 for (int i = 0; i < stack_locals_count; i++) {
1236 accumulator->Add(" var ");
1237 accumulator->PrintName(scope_info->StackLocalName(i));
1238 accumulator->Add(" = ");
1239 if (i < expressions_count) {
1240 accumulator->Add("%o", GetExpression(i));
1241 } else {
1242 accumulator->Add("// no expression found - inconsistent frame?");
1243 }
1244 accumulator->Add("\n");
1245 }
1246
1247 // Try to get hold of the context of this frame.
1248 Context* context = NULL;
1249 if (this->context() != NULL && this->context()->IsContext()) {
1250 context = Context::cast(this->context());
1251 }
1252 while (context->IsWithContext()) {
1253 context = context->previous();
1254 DCHECK(context != NULL);
1255 }
1256
1257 // Print heap-allocated local variables.
1258 if (heap_locals_count > 0) {
1259 accumulator->Add(" // heap-allocated locals\n");
1260 }
1261 for (int i = 0; i < heap_locals_count; i++) {
1262 accumulator->Add(" var ");
1263 accumulator->PrintName(scope_info->ContextLocalName(i));
1264 accumulator->Add(" = ");
1265 if (context != NULL) {
1266 int index = Context::MIN_CONTEXT_SLOTS + i;
1267 if (index < context->length()) {
1268 accumulator->Add("%o", context->get(index));
1269 } else {
1270 accumulator->Add(
1271 "// warning: missing context slot - inconsistent frame?");
1272 }
1273 } else {
1274 accumulator->Add("// warning: no context found - inconsistent frame?");
1275 }
1276 accumulator->Add("\n");
1277 }
1278
1279 // Print the expression stack.
1280 int expressions_start = stack_locals_count;
1281 if (expressions_start < expressions_count) {
1282 accumulator->Add(" // expression stack (top to bottom)\n");
1283 }
1284 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1285 if (IsExpressionInsideHandler(i)) continue;
1286 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1287 }
1288
1289 // Print details about the function.
1290 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1291 OStringStream os;
1292 SharedFunctionInfo* shared = function->shared();
1293 os << "--------- s o u r c e c o d e ---------\n"
1294 << SourceCodeOf(shared, FLAG_max_stack_trace_source_length)
1295 << "\n-----------------------------------------\n";
1296 accumulator->Add(os.c_str());
1297 }
1298
1299 accumulator->Add("}\n\n");
1300 }
1301
1302
Print(StringStream * accumulator,PrintMode mode,int index) const1303 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1304 PrintMode mode,
1305 int index) const {
1306 int actual = ComputeParametersCount();
1307 int expected = -1;
1308 JSFunction* function = this->function();
1309 expected = function->shared()->formal_parameter_count();
1310
1311 PrintIndex(accumulator, mode, index);
1312 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1313 if (mode == OVERVIEW) {
1314 accumulator->Add("\n");
1315 return;
1316 }
1317 accumulator->Add(" {\n");
1318
1319 // Print actual arguments.
1320 if (actual > 0) accumulator->Add(" // actual arguments\n");
1321 for (int i = 0; i < actual; i++) {
1322 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1323 if (expected != -1 && i >= expected) {
1324 accumulator->Add(" // not passed to callee");
1325 }
1326 accumulator->Add("\n");
1327 }
1328
1329 accumulator->Add("}\n\n");
1330 }
1331
1332
Iterate(ObjectVisitor * v) const1333 void EntryFrame::Iterate(ObjectVisitor* v) const {
1334 StackHandlerIterator it(this, top_handler());
1335 DCHECK(!it.done());
1336 StackHandler* handler = it.handler();
1337 DCHECK(handler->is_js_entry());
1338 handler->Iterate(v, LookupCode());
1339 #ifdef DEBUG
1340 // Make sure that the entry frame does not contain more than one
1341 // stack handler.
1342 it.Advance();
1343 DCHECK(it.done());
1344 #endif
1345 IteratePc(v, pc_address(), LookupCode());
1346 }
1347
1348
IterateExpressions(ObjectVisitor * v) const1349 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1350 const int offset = StandardFrameConstants::kLastObjectOffset;
1351 Object** base = &Memory::Object_at(sp());
1352 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1353 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1354 StackHandler* handler = it.handler();
1355 // Traverse pointers down to - but not including - the next
1356 // handler in the handler chain. Update the base to skip the
1357 // handler and allow the handler to traverse its own pointers.
1358 const Address address = handler->address();
1359 v->VisitPointers(base, reinterpret_cast<Object**>(address));
1360 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1361 // Traverse the pointers in the handler itself.
1362 handler->Iterate(v, LookupCode());
1363 }
1364 v->VisitPointers(base, limit);
1365 }
1366
1367
Iterate(ObjectVisitor * v) const1368 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1369 IterateExpressions(v);
1370 IteratePc(v, pc_address(), LookupCode());
1371 }
1372
1373
Iterate(ObjectVisitor * v) const1374 void InternalFrame::Iterate(ObjectVisitor* v) const {
1375 // Internal frames only have object pointers on the expression stack
1376 // as they never have any arguments.
1377 IterateExpressions(v);
1378 IteratePc(v, pc_address(), LookupCode());
1379 }
1380
1381
Iterate(ObjectVisitor * v) const1382 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1383 Object** base = &Memory::Object_at(sp());
1384 Object** limit = &Memory::Object_at(fp() +
1385 kFirstRegisterParameterFrameOffset);
1386 v->VisitPointers(base, limit);
1387 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1388 const int offset = StandardFrameConstants::kLastObjectOffset;
1389 limit = &Memory::Object_at(fp() + offset) + 1;
1390 v->VisitPointers(base, limit);
1391 IteratePc(v, pc_address(), LookupCode());
1392 }
1393
1394
GetCallerStackPointer() const1395 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1396 return fp() + StandardFrameConstants::kCallerSPOffset;
1397 }
1398
1399
unchecked_code() const1400 Code* StubFailureTrampolineFrame::unchecked_code() const {
1401 Code* trampoline;
1402 StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1403 FindCodeInCache(&trampoline);
1404 if (trampoline->contains(pc())) {
1405 return trampoline;
1406 }
1407
1408 StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1409 FindCodeInCache(&trampoline);
1410 if (trampoline->contains(pc())) {
1411 return trampoline;
1412 }
1413
1414 UNREACHABLE();
1415 return NULL;
1416 }
1417
1418
1419 // -------------------------------------------------------------------------
1420
1421
FindJavaScriptFrame(int n)1422 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1423 DCHECK(n >= 0);
1424 for (int i = 0; i <= n; i++) {
1425 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1426 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1427 iterator_.Advance();
1428 }
1429 UNREACHABLE();
1430 return NULL;
1431 }
1432
1433
1434 // -------------------------------------------------------------------------
1435
1436
GcSafeMapOfCodeSpaceObject(HeapObject * object)1437 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1438 MapWord map_word = object->map_word();
1439 return map_word.IsForwardingAddress() ?
1440 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1441 }
1442
1443
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1444 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1445 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1446 }
1447
1448
1449 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1450 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1451 Map* map = GcSafeMapOfCodeSpaceObject(code);
1452 DCHECK(map == code->GetHeap()->code_map());
1453 Address start = code->address();
1454 Address end = code->address() + code->SizeFromMap(map);
1455 return start <= addr && addr < end;
1456 }
1457 #endif
1458
1459
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1460 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1461 Address inner_pointer) {
1462 Code* code = reinterpret_cast<Code*>(object);
1463 DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
1464 return code;
1465 }
1466
1467
GcSafeFindCodeForInnerPointer(Address inner_pointer)1468 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1469 Address inner_pointer) {
1470 Heap* heap = isolate_->heap();
1471 // Check if the inner pointer points into a large object chunk.
1472 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1473 if (large_page != NULL) {
1474 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1475 }
1476
1477 // Iterate through the page until we reach the end or find an object starting
1478 // after the inner pointer.
1479 Page* page = Page::FromAddress(inner_pointer);
1480
1481 Address addr = page->skip_list()->StartFor(inner_pointer);
1482
1483 Address top = heap->code_space()->top();
1484 Address limit = heap->code_space()->limit();
1485
1486 while (true) {
1487 if (addr == top && addr != limit) {
1488 addr = limit;
1489 continue;
1490 }
1491
1492 HeapObject* obj = HeapObject::FromAddress(addr);
1493 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1494 Address next_addr = addr + obj_size;
1495 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1496 addr = next_addr;
1497 }
1498 }
1499
1500
1501 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1502 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1503 isolate_->counters()->pc_to_code()->Increment();
1504 DCHECK(base::bits::IsPowerOfTwo32(kInnerPointerToCodeCacheSize));
1505 uint32_t hash = ComputeIntegerHash(
1506 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1507 v8::internal::kZeroHashSeed);
1508 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1509 InnerPointerToCodeCacheEntry* entry = cache(index);
1510 if (entry->inner_pointer == inner_pointer) {
1511 isolate_->counters()->pc_to_code_cached()->Increment();
1512 DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1513 } else {
1514 // Because this code may be interrupted by a profiling signal that
1515 // also queries the cache, we cannot update inner_pointer before the code
1516 // has been set. Otherwise, we risk trying to use a cache entry before
1517 // the code has been computed.
1518 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1519 entry->safepoint_entry.Reset();
1520 entry->inner_pointer = inner_pointer;
1521 }
1522 return entry;
1523 }
1524
1525
1526 // -------------------------------------------------------------------------
1527
1528
Unwind(Isolate * isolate,FixedArray * array,int offset,int previous_handler_offset) const1529 void StackHandler::Unwind(Isolate* isolate,
1530 FixedArray* array,
1531 int offset,
1532 int previous_handler_offset) const {
1533 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1534 DCHECK_LE(0, offset);
1535 DCHECK_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1536 // Unwinding a stack handler into an array chains it in the opposite
1537 // direction, re-using the "next" slot as a "previous" link, so that stack
1538 // handlers can be later re-wound in the correct order. Decode the "state"
1539 // slot into "index" and "kind" and store them separately, using the fp slot.
1540 array->set(offset, Smi::FromInt(previous_handler_offset)); // next
1541 array->set(offset + 1, *code_address()); // code
1542 array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state
1543 array->set(offset + 3, *context_address()); // context
1544 array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp
1545
1546 *isolate->handler_address() = next()->address();
1547 }
1548
1549
Rewind(Isolate * isolate,FixedArray * array,int offset,Address fp)1550 int StackHandler::Rewind(Isolate* isolate,
1551 FixedArray* array,
1552 int offset,
1553 Address fp) {
1554 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1555 DCHECK_LE(0, offset);
1556 DCHECK_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1557 Smi* prev_handler_offset = Smi::cast(array->get(offset));
1558 Code* code = Code::cast(array->get(offset + 1));
1559 Smi* smi_index = Smi::cast(array->get(offset + 2));
1560 Object* context = array->get(offset + 3);
1561 Smi* smi_kind = Smi::cast(array->get(offset + 4));
1562
1563 unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
1564 IndexField::encode(static_cast<unsigned>(smi_index->value()));
1565
1566 Memory::Address_at(address() + StackHandlerConstants::kNextOffset) =
1567 *isolate->handler_address();
1568 Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code;
1569 Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
1570 Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
1571 context;
1572 SetFp(address() + StackHandlerConstants::kFPOffset, fp);
1573
1574 *isolate->handler_address() = address();
1575
1576 return prev_handler_offset->value();
1577 }
1578
1579
1580 // -------------------------------------------------------------------------
1581
NumRegs(RegList reglist)1582 int NumRegs(RegList reglist) { return base::bits::CountPopulation32(reglist); }
1583
1584
1585 struct JSCallerSavedCodeData {
1586 int reg_code[kNumJSCallerSaved];
1587 };
1588
1589 JSCallerSavedCodeData caller_saved_code_data;
1590
SetUpJSCallerSavedCodeData()1591 void SetUpJSCallerSavedCodeData() {
1592 int i = 0;
1593 for (int r = 0; r < kNumRegs; r++)
1594 if ((kJSCallerSaved & (1 << r)) != 0)
1595 caller_saved_code_data.reg_code[i++] = r;
1596
1597 DCHECK(i == kNumJSCallerSaved);
1598 }
1599
1600
JSCallerSavedCode(int n)1601 int JSCallerSavedCode(int n) {
1602 DCHECK(0 <= n && n < kNumJSCallerSaved);
1603 return caller_saved_code_data.reg_code[n];
1604 }
1605
1606
1607 #define DEFINE_WRAPPER(type, field) \
1608 class field##_Wrapper : public ZoneObject { \
1609 public: /* NOLINT */ \
1610 field##_Wrapper(const field& original) : frame_(original) { \
1611 } \
1612 field frame_; \
1613 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1614 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1615 #undef DEFINE_WRAPPER
1616
1617 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1618 #define FRAME_TYPE_CASE(type, field) \
1619 case StackFrame::type: { \
1620 field##_Wrapper* wrapper = \
1621 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1622 return &wrapper->frame_; \
1623 }
1624
1625 switch (frame->type()) {
1626 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1627 default: UNREACHABLE();
1628 }
1629 #undef FRAME_TYPE_CASE
1630 return NULL;
1631 }
1632
1633
CreateStackMap(Isolate * isolate,Zone * zone)1634 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1635 ZoneList<StackFrame*> list(10, zone);
1636 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1637 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1638 list.Add(frame, zone);
1639 }
1640 return list.ToVector();
1641 }
1642
1643
1644 } } // namespace v8::internal
1645