1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/ast.h"
8 #include "src/deoptimizer.h"
9 #include "src/frames-inl.h"
10 #include "src/full-codegen.h"
11 #include "src/mark-compact.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopeinfo.h"
14 #include "src/string-stream.h"
15 #include "src/vm-state-inl.h"
16
17 namespace v8 {
18 namespace internal {
19
20
21 ReturnAddressLocationResolver
22 StackFrame::return_address_location_resolver_ = NULL;
23
24
25 // Iterator that supports traversing the stack handlers of a
26 // particular frame. Needs to know the top of the handler chain.
27 class StackHandlerIterator BASE_EMBEDDED {
28 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)29 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
30 : limit_(frame->fp()), handler_(handler) {
31 // Make sure the handler has already been unwound to this frame.
32 ASSERT(frame->sp() <= handler->address());
33 }
34
handler() const35 StackHandler* handler() const { return handler_; }
36
done()37 bool done() {
38 return handler_ == NULL || handler_->address() > limit_;
39 }
Advance()40 void Advance() {
41 ASSERT(!done());
42 handler_ = handler_->next();
43 }
44
45 private:
46 const Address limit_;
47 StackHandler* handler_;
48 };
49
50
51 // -------------------------------------------------------------------------
52
53
54 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)55 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
56 bool can_access_heap_objects)
57 : isolate_(isolate),
58 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
59 frame_(NULL), handler_(NULL),
60 can_access_heap_objects_(can_access_heap_objects) {
61 }
62 #undef INITIALIZE_SINGLETON
63
64
StackFrameIterator(Isolate * isolate)65 StackFrameIterator::StackFrameIterator(Isolate* isolate)
66 : StackFrameIteratorBase(isolate, true) {
67 Reset(isolate->thread_local_top());
68 }
69
70
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)71 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
72 : StackFrameIteratorBase(isolate, true) {
73 Reset(t);
74 }
75
76
Advance()77 void StackFrameIterator::Advance() {
78 ASSERT(!done());
79 // Compute the state of the calling frame before restoring
80 // callee-saved registers and unwinding handlers. This allows the
81 // frame code that computes the caller state to access the top
82 // handler and the value of any callee-saved register if needed.
83 StackFrame::State state;
84 StackFrame::Type type = frame_->GetCallerState(&state);
85
86 // Unwind handlers corresponding to the current frame.
87 StackHandlerIterator it(frame_, handler_);
88 while (!it.done()) it.Advance();
89 handler_ = it.handler();
90
91 // Advance to the calling frame.
92 frame_ = SingletonFor(type, &state);
93
94 // When we're done iterating over the stack frames, the handler
95 // chain must have been completely unwound.
96 ASSERT(!done() || handler_ == NULL);
97 }
98
99
Reset(ThreadLocalTop * top)100 void StackFrameIterator::Reset(ThreadLocalTop* top) {
101 StackFrame::State state;
102 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
103 Isolate::c_entry_fp(top), &state);
104 handler_ = StackHandler::FromAddress(Isolate::handler(top));
105 if (SingletonFor(type) == NULL) return;
106 frame_ = SingletonFor(type, &state);
107 }
108
109
SingletonFor(StackFrame::Type type,StackFrame::State * state)110 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
111 StackFrame::State* state) {
112 if (type == StackFrame::NONE) return NULL;
113 StackFrame* result = SingletonFor(type);
114 ASSERT(result != NULL);
115 result->state_ = *state;
116 return result;
117 }
118
119
SingletonFor(StackFrame::Type type)120 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
121 #define FRAME_TYPE_CASE(type, field) \
122 case StackFrame::type: result = &field##_; break;
123
124 StackFrame* result = NULL;
125 switch (type) {
126 case StackFrame::NONE: return NULL;
127 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
128 default: break;
129 }
130 return result;
131
132 #undef FRAME_TYPE_CASE
133 }
134
135
136 // -------------------------------------------------------------------------
137
138
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)139 JavaScriptFrameIterator::JavaScriptFrameIterator(
140 Isolate* isolate, StackFrame::Id id)
141 : iterator_(isolate) {
142 while (!done()) {
143 Advance();
144 if (frame()->id() == id) return;
145 }
146 }
147
148
Advance()149 void JavaScriptFrameIterator::Advance() {
150 do {
151 iterator_.Advance();
152 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
153 }
154
155
AdvanceToArgumentsFrame()156 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
157 if (!frame()->has_adapted_arguments()) return;
158 iterator_.Advance();
159 ASSERT(iterator_.frame()->is_arguments_adaptor());
160 }
161
162
163 // -------------------------------------------------------------------------
164
165
StackTraceFrameIterator(Isolate * isolate)166 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
167 : JavaScriptFrameIterator(isolate) {
168 if (!done() && !IsValidFrame()) Advance();
169 }
170
171
Advance()172 void StackTraceFrameIterator::Advance() {
173 while (true) {
174 JavaScriptFrameIterator::Advance();
175 if (done()) return;
176 if (IsValidFrame()) return;
177 }
178 }
179
180
IsValidFrame()181 bool StackTraceFrameIterator::IsValidFrame() {
182 if (!frame()->function()->IsJSFunction()) return false;
183 Object* script = frame()->function()->shared()->script();
184 // Don't show functions from native scripts to user.
185 return (script->IsScript() &&
186 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
187 }
188
189
190 // -------------------------------------------------------------------------
191
192
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)193 SafeStackFrameIterator::SafeStackFrameIterator(
194 Isolate* isolate,
195 Address fp, Address sp, Address js_entry_sp)
196 : StackFrameIteratorBase(isolate, false),
197 low_bound_(sp),
198 high_bound_(js_entry_sp),
199 top_frame_type_(StackFrame::NONE),
200 external_callback_scope_(isolate->external_callback_scope()) {
201 StackFrame::State state;
202 StackFrame::Type type;
203 ThreadLocalTop* top = isolate->thread_local_top();
204 if (IsValidTop(top)) {
205 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
206 top_frame_type_ = type;
207 } else if (IsValidStackAddress(fp)) {
208 ASSERT(fp != NULL);
209 state.fp = fp;
210 state.sp = sp;
211 state.pc_address = StackFrame::ResolveReturnAddressLocation(
212 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
213 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
214 // we check only that kMarkerOffset is within the stack bounds and do
215 // compile time check that kContextOffset slot is pushed on the stack before
216 // kMarkerOffset.
217 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
218 StandardFrameConstants::kContextOffset);
219 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
220 if (IsValidStackAddress(frame_marker)) {
221 type = StackFrame::ComputeType(this, &state);
222 top_frame_type_ = type;
223 } else {
224 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
225 // The frame anyways will be skipped.
226 type = StackFrame::JAVA_SCRIPT;
227 // Top frame is incomplete so we cannot reliably determine its type.
228 top_frame_type_ = StackFrame::NONE;
229 }
230 } else {
231 return;
232 }
233 if (SingletonFor(type) == NULL) return;
234 frame_ = SingletonFor(type, &state);
235 if (frame_ == NULL) return;
236
237 Advance();
238
239 if (frame_ != NULL && !frame_->is_exit() &&
240 external_callback_scope_ != NULL &&
241 external_callback_scope_->scope_address() < frame_->fp()) {
242 // Skip top ExternalCallbackScope if we already advanced to a JS frame
243 // under it. Sampler will anyways take this top external callback.
244 external_callback_scope_ = external_callback_scope_->previous();
245 }
246 }
247
248
IsValidTop(ThreadLocalTop * top) const249 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
250 Address c_entry_fp = Isolate::c_entry_fp(top);
251 if (!IsValidExitFrame(c_entry_fp)) return false;
252 // There should be at least one JS_ENTRY stack handler.
253 Address handler = Isolate::handler(top);
254 if (handler == NULL) return false;
255 // Check that there are no js frames on top of the native frames.
256 return c_entry_fp < handler;
257 }
258
259
AdvanceOneFrame()260 void SafeStackFrameIterator::AdvanceOneFrame() {
261 ASSERT(!done());
262 StackFrame* last_frame = frame_;
263 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
264 // Before advancing to the next stack frame, perform pointer validity tests.
265 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
266 frame_ = NULL;
267 return;
268 }
269
270 // Advance to the previous frame.
271 StackFrame::State state;
272 StackFrame::Type type = frame_->GetCallerState(&state);
273 frame_ = SingletonFor(type, &state);
274 if (frame_ == NULL) return;
275
276 // Check that we have actually moved to the previous frame in the stack.
277 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
278 frame_ = NULL;
279 }
280 }
281
282
IsValidFrame(StackFrame * frame) const283 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
284 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
285 }
286
287
IsValidCaller(StackFrame * frame)288 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
289 StackFrame::State state;
290 if (frame->is_entry() || frame->is_entry_construct()) {
291 // See EntryFrame::GetCallerState. It computes the caller FP address
292 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
293 // sure that caller FP address is valid.
294 Address caller_fp = Memory::Address_at(
295 frame->fp() + EntryFrameConstants::kCallerFPOffset);
296 if (!IsValidExitFrame(caller_fp)) return false;
297 } else if (frame->is_arguments_adaptor()) {
298 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
299 // the number of arguments is stored on stack as Smi. We need to check
300 // that it really an Smi.
301 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
302 GetExpression(0);
303 if (!number_of_args->IsSmi()) {
304 return false;
305 }
306 }
307 frame->ComputeCallerState(&state);
308 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
309 SingletonFor(frame->GetCallerState(&state)) != NULL;
310 }
311
312
IsValidExitFrame(Address fp) const313 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
314 if (!IsValidStackAddress(fp)) return false;
315 Address sp = ExitFrame::ComputeStackPointer(fp);
316 if (!IsValidStackAddress(sp)) return false;
317 StackFrame::State state;
318 ExitFrame::FillState(fp, sp, &state);
319 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
320 return false;
321 }
322 return *state.pc_address != NULL;
323 }
324
325
Advance()326 void SafeStackFrameIterator::Advance() {
327 while (true) {
328 AdvanceOneFrame();
329 if (done()) return;
330 if (frame_->is_java_script()) return;
331 if (frame_->is_exit() && external_callback_scope_) {
332 // Some of the EXIT frames may have ExternalCallbackScope allocated on
333 // top of them. In that case the scope corresponds to the first EXIT
334 // frame beneath it. There may be other EXIT frames on top of the
335 // ExternalCallbackScope, just skip them as we cannot collect any useful
336 // information about them.
337 if (external_callback_scope_->scope_address() < frame_->fp()) {
338 Address* callback_address =
339 external_callback_scope_->callback_address();
340 if (*callback_address != NULL) {
341 frame_->state_.pc_address = callback_address;
342 }
343 external_callback_scope_ = external_callback_scope_->previous();
344 ASSERT(external_callback_scope_ == NULL ||
345 external_callback_scope_->scope_address() > frame_->fp());
346 return;
347 }
348 }
349 }
350 }
351
352
353 // -------------------------------------------------------------------------
354
355
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)356 Code* StackFrame::GetSafepointData(Isolate* isolate,
357 Address inner_pointer,
358 SafepointEntry* safepoint_entry,
359 unsigned* stack_slots) {
360 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
361 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
362 if (!entry->safepoint_entry.is_valid()) {
363 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
364 ASSERT(entry->safepoint_entry.is_valid());
365 } else {
366 ASSERT(entry->safepoint_entry.Equals(
367 entry->code->GetSafepointEntry(inner_pointer)));
368 }
369
370 // Fill in the results and return the code.
371 Code* code = entry->code;
372 *safepoint_entry = entry->safepoint_entry;
373 *stack_slots = code->stack_slots();
374 return code;
375 }
376
377
HasHandler() const378 bool StackFrame::HasHandler() const {
379 StackHandlerIterator it(this, top_handler());
380 return !it.done();
381 }
382
383
384 #ifdef DEBUG
385 static bool GcSafeCodeContains(HeapObject* object, Address addr);
386 #endif
387
388
IteratePc(ObjectVisitor * v,Address * pc_address,Code * holder)389 void StackFrame::IteratePc(ObjectVisitor* v,
390 Address* pc_address,
391 Code* holder) {
392 Address pc = *pc_address;
393 ASSERT(GcSafeCodeContains(holder, pc));
394 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
395 Object* code = holder;
396 v->VisitPointer(&code);
397 if (code != holder) {
398 holder = reinterpret_cast<Code*>(code);
399 pc = holder->instruction_start() + pc_offset;
400 *pc_address = pc;
401 }
402 }
403
404
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)405 void StackFrame::SetReturnAddressLocationResolver(
406 ReturnAddressLocationResolver resolver) {
407 ASSERT(return_address_location_resolver_ == NULL);
408 return_address_location_resolver_ = resolver;
409 }
410
411
ComputeType(const StackFrameIteratorBase * iterator,State * state)412 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
413 State* state) {
414 ASSERT(state->fp != NULL);
415 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
416 return ARGUMENTS_ADAPTOR;
417 }
418 // The marker and function offsets overlap. If the marker isn't a
419 // smi then the frame is a JavaScript frame -- and the marker is
420 // really the function.
421 const int offset = StandardFrameConstants::kMarkerOffset;
422 Object* marker = Memory::Object_at(state->fp + offset);
423 if (!marker->IsSmi()) {
424 // If we're using a "safe" stack iterator, we treat optimized
425 // frames as normal JavaScript frames to avoid having to look
426 // into the heap to determine the state. This is safe as long
427 // as nobody tries to GC...
428 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
429 Code::Kind kind = GetContainingCode(iterator->isolate(),
430 *(state->pc_address))->kind();
431 ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
432 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
433 }
434 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
435 }
436
437
438 #ifdef DEBUG
can_access_heap_objects() const439 bool StackFrame::can_access_heap_objects() const {
440 return iterator_->can_access_heap_objects_;
441 }
442 #endif
443
444
GetCallerState(State * state) const445 StackFrame::Type StackFrame::GetCallerState(State* state) const {
446 ComputeCallerState(state);
447 return ComputeType(iterator_, state);
448 }
449
450
UnpaddedFP() const451 Address StackFrame::UnpaddedFP() const {
452 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
453 if (!is_optimized()) return fp();
454 int32_t alignment_state = Memory::int32_at(
455 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
456
457 return (alignment_state == kAlignmentPaddingPushed) ?
458 (fp() + kPointerSize) : fp();
459 #else
460 return fp();
461 #endif
462 }
463
464
unchecked_code() const465 Code* EntryFrame::unchecked_code() const {
466 return isolate()->heap()->js_entry_code();
467 }
468
469
ComputeCallerState(State * state) const470 void EntryFrame::ComputeCallerState(State* state) const {
471 GetCallerState(state);
472 }
473
474
SetCallerFp(Address caller_fp)475 void EntryFrame::SetCallerFp(Address caller_fp) {
476 const int offset = EntryFrameConstants::kCallerFPOffset;
477 Memory::Address_at(this->fp() + offset) = caller_fp;
478 }
479
480
GetCallerState(State * state) const481 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
482 const int offset = EntryFrameConstants::kCallerFPOffset;
483 Address fp = Memory::Address_at(this->fp() + offset);
484 return ExitFrame::GetStateForFramePointer(fp, state);
485 }
486
487
unchecked_code() const488 Code* EntryConstructFrame::unchecked_code() const {
489 return isolate()->heap()->js_construct_entry_code();
490 }
491
492
code_slot() const493 Object*& ExitFrame::code_slot() const {
494 const int offset = ExitFrameConstants::kCodeOffset;
495 return Memory::Object_at(fp() + offset);
496 }
497
498
unchecked_code() const499 Code* ExitFrame::unchecked_code() const {
500 return reinterpret_cast<Code*>(code_slot());
501 }
502
503
ComputeCallerState(State * state) const504 void ExitFrame::ComputeCallerState(State* state) const {
505 // Set up the caller state.
506 state->sp = caller_sp();
507 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
508 state->pc_address = ResolveReturnAddressLocation(
509 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
510 if (FLAG_enable_ool_constant_pool) {
511 state->constant_pool_address = reinterpret_cast<Address*>(
512 fp() + ExitFrameConstants::kConstantPoolOffset);
513 }
514 }
515
516
SetCallerFp(Address caller_fp)517 void ExitFrame::SetCallerFp(Address caller_fp) {
518 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
519 }
520
521
Iterate(ObjectVisitor * v) const522 void ExitFrame::Iterate(ObjectVisitor* v) const {
523 // The arguments are traversed as part of the expression stack of
524 // the calling frame.
525 IteratePc(v, pc_address(), LookupCode());
526 v->VisitPointer(&code_slot());
527 if (FLAG_enable_ool_constant_pool) {
528 v->VisitPointer(&constant_pool_slot());
529 }
530 }
531
532
GetCallerStackPointer() const533 Address ExitFrame::GetCallerStackPointer() const {
534 return fp() + ExitFrameConstants::kCallerSPDisplacement;
535 }
536
537
GetStateForFramePointer(Address fp,State * state)538 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
539 if (fp == 0) return NONE;
540 Address sp = ComputeStackPointer(fp);
541 FillState(fp, sp, state);
542 ASSERT(*state->pc_address != NULL);
543 return EXIT;
544 }
545
546
ComputeStackPointer(Address fp)547 Address ExitFrame::ComputeStackPointer(Address fp) {
548 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
549 }
550
551
FillState(Address fp,Address sp,State * state)552 void ExitFrame::FillState(Address fp, Address sp, State* state) {
553 state->sp = sp;
554 state->fp = fp;
555 state->pc_address = ResolveReturnAddressLocation(
556 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
557 state->constant_pool_address =
558 reinterpret_cast<Address*>(fp + ExitFrameConstants::kConstantPoolOffset);
559 }
560
561
GetExpressionAddress(int n) const562 Address StandardFrame::GetExpressionAddress(int n) const {
563 const int offset = StandardFrameConstants::kExpressionsOffset;
564 return fp() + offset - n * kPointerSize;
565 }
566
567
GetExpression(Address fp,int index)568 Object* StandardFrame::GetExpression(Address fp, int index) {
569 return Memory::Object_at(GetExpressionAddress(fp, index));
570 }
571
572
GetExpressionAddress(Address fp,int n)573 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
574 const int offset = StandardFrameConstants::kExpressionsOffset;
575 return fp + offset - n * kPointerSize;
576 }
577
578
ComputeExpressionsCount() const579 int StandardFrame::ComputeExpressionsCount() const {
580 const int offset =
581 StandardFrameConstants::kExpressionsOffset + kPointerSize;
582 Address base = fp() + offset;
583 Address limit = sp();
584 ASSERT(base >= limit); // stack grows downwards
585 // Include register-allocated locals in number of expressions.
586 return static_cast<int>((base - limit) / kPointerSize);
587 }
588
589
ComputeCallerState(State * state) const590 void StandardFrame::ComputeCallerState(State* state) const {
591 state->sp = caller_sp();
592 state->fp = caller_fp();
593 state->pc_address = ResolveReturnAddressLocation(
594 reinterpret_cast<Address*>(ComputePCAddress(fp())));
595 state->constant_pool_address =
596 reinterpret_cast<Address*>(ComputeConstantPoolAddress(fp()));
597 }
598
599
SetCallerFp(Address caller_fp)600 void StandardFrame::SetCallerFp(Address caller_fp) {
601 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
602 caller_fp;
603 }
604
605
IsExpressionInsideHandler(int n) const606 bool StandardFrame::IsExpressionInsideHandler(int n) const {
607 Address address = GetExpressionAddress(n);
608 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
609 if (it.handler()->includes(address)) return true;
610 }
611 return false;
612 }
613
614
IterateCompiledFrame(ObjectVisitor * v) const615 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
616 // Make sure that we're not doing "safe" stack frame iteration. We cannot
617 // possibly find pointers in optimized frames in that state.
618 ASSERT(can_access_heap_objects());
619
620 // Compute the safepoint information.
621 unsigned stack_slots = 0;
622 SafepointEntry safepoint_entry;
623 Code* code = StackFrame::GetSafepointData(
624 isolate(), pc(), &safepoint_entry, &stack_slots);
625 unsigned slot_space = stack_slots * kPointerSize;
626
627 // Visit the outgoing parameters.
628 Object** parameters_base = &Memory::Object_at(sp());
629 Object** parameters_limit = &Memory::Object_at(
630 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
631
632 // Visit the parameters that may be on top of the saved registers.
633 if (safepoint_entry.argument_count() > 0) {
634 v->VisitPointers(parameters_base,
635 parameters_base + safepoint_entry.argument_count());
636 parameters_base += safepoint_entry.argument_count();
637 }
638
639 // Skip saved double registers.
640 if (safepoint_entry.has_doubles()) {
641 // Number of doubles not known at snapshot time.
642 ASSERT(!isolate()->serializer_enabled());
643 parameters_base += DoubleRegister::NumAllocatableRegisters() *
644 kDoubleSize / kPointerSize;
645 }
646
647 // Visit the registers that contain pointers if any.
648 if (safepoint_entry.HasRegisters()) {
649 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
650 if (safepoint_entry.HasRegisterAt(i)) {
651 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
652 v->VisitPointer(parameters_base + reg_stack_index);
653 }
654 }
655 // Skip the words containing the register values.
656 parameters_base += kNumSafepointRegisters;
657 }
658
659 // We're done dealing with the register bits.
660 uint8_t* safepoint_bits = safepoint_entry.bits();
661 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
662
663 // Visit the rest of the parameters.
664 v->VisitPointers(parameters_base, parameters_limit);
665
666 // Visit pointer spill slots and locals.
667 for (unsigned index = 0; index < stack_slots; index++) {
668 int byte_index = index >> kBitsPerByteLog2;
669 int bit_index = index & (kBitsPerByte - 1);
670 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
671 v->VisitPointer(parameters_limit + index);
672 }
673 }
674
675 // Visit the return address in the callee and incoming arguments.
676 IteratePc(v, pc_address(), code);
677
678 // Visit the context in stub frame and JavaScript frame.
679 // Visit the function in JavaScript frame.
680 Object** fixed_base = &Memory::Object_at(
681 fp() + StandardFrameConstants::kMarkerOffset);
682 Object** fixed_limit = &Memory::Object_at(fp());
683 v->VisitPointers(fixed_base, fixed_limit);
684 }
685
686
Iterate(ObjectVisitor * v) const687 void StubFrame::Iterate(ObjectVisitor* v) const {
688 IterateCompiledFrame(v);
689 }
690
691
unchecked_code() const692 Code* StubFrame::unchecked_code() const {
693 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
694 }
695
696
GetCallerStackPointer() const697 Address StubFrame::GetCallerStackPointer() const {
698 return fp() + ExitFrameConstants::kCallerSPDisplacement;
699 }
700
701
GetNumberOfIncomingArguments() const702 int StubFrame::GetNumberOfIncomingArguments() const {
703 return 0;
704 }
705
706
Iterate(ObjectVisitor * v) const707 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
708 #ifdef DEBUG
709 // Make sure that optimized frames do not contain any stack handlers.
710 StackHandlerIterator it(this, top_handler());
711 ASSERT(it.done());
712 #endif
713
714 IterateCompiledFrame(v);
715 }
716
717
SetParameterValue(int index,Object * value) const718 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
719 Memory::Object_at(GetParameterSlot(index)) = value;
720 }
721
722
IsConstructor() const723 bool JavaScriptFrame::IsConstructor() const {
724 Address fp = caller_fp();
725 if (has_adapted_arguments()) {
726 // Skip the arguments adaptor frame and look at the real caller.
727 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
728 }
729 return IsConstructFrame(fp);
730 }
731
732
GetArgumentsLength() const733 int JavaScriptFrame::GetArgumentsLength() const {
734 // If there is an arguments adaptor frame get the arguments length from it.
735 if (has_adapted_arguments()) {
736 return Smi::cast(GetExpression(caller_fp(), 0))->value();
737 } else {
738 return GetNumberOfIncomingArguments();
739 }
740 }
741
742
unchecked_code() const743 Code* JavaScriptFrame::unchecked_code() const {
744 return function()->code();
745 }
746
747
GetNumberOfIncomingArguments() const748 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
749 ASSERT(can_access_heap_objects() &&
750 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
751
752 return function()->shared()->formal_parameter_count();
753 }
754
755
GetCallerStackPointer() const756 Address JavaScriptFrame::GetCallerStackPointer() const {
757 return fp() + StandardFrameConstants::kCallerSPOffset;
758 }
759
760
GetFunctions(List<JSFunction * > * functions)761 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
762 ASSERT(functions->length() == 0);
763 functions->Add(function());
764 }
765
766
Summarize(List<FrameSummary> * functions)767 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
768 ASSERT(functions->length() == 0);
769 Code* code_pointer = LookupCode();
770 int offset = static_cast<int>(pc() - code_pointer->address());
771 FrameSummary summary(receiver(),
772 function(),
773 code_pointer,
774 offset,
775 IsConstructor());
776 functions->Add(summary);
777 }
778
779
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)780 void JavaScriptFrame::PrintTop(Isolate* isolate,
781 FILE* file,
782 bool print_args,
783 bool print_line_number) {
784 // constructor calls
785 DisallowHeapAllocation no_allocation;
786 JavaScriptFrameIterator it(isolate);
787 while (!it.done()) {
788 if (it.frame()->is_java_script()) {
789 JavaScriptFrame* frame = it.frame();
790 if (frame->IsConstructor()) PrintF(file, "new ");
791 // function name
792 JSFunction* fun = frame->function();
793 fun->PrintName();
794 Code* js_code = frame->unchecked_code();
795 Address pc = frame->pc();
796 int code_offset =
797 static_cast<int>(pc - js_code->instruction_start());
798 PrintF("+%d", code_offset);
799 SharedFunctionInfo* shared = fun->shared();
800 if (print_line_number) {
801 Code* code = Code::cast(isolate->FindCodeObject(pc));
802 int source_pos = code->SourcePosition(pc);
803 Object* maybe_script = shared->script();
804 if (maybe_script->IsScript()) {
805 Script* script = Script::cast(maybe_script);
806 int line = script->GetLineNumber(source_pos) + 1;
807 Object* script_name_raw = script->name();
808 if (script_name_raw->IsString()) {
809 String* script_name = String::cast(script->name());
810 SmartArrayPointer<char> c_script_name =
811 script_name->ToCString(DISALLOW_NULLS,
812 ROBUST_STRING_TRAVERSAL);
813 PrintF(file, " at %s:%d", c_script_name.get(), line);
814 } else {
815 PrintF(file, " at <unknown>:%d", line);
816 }
817 } else {
818 PrintF(file, " at <unknown>:<unknown>");
819 }
820 }
821
822 if (print_args) {
823 // function arguments
824 // (we are intentionally only printing the actually
825 // supplied parameters, not all parameters required)
826 PrintF(file, "(this=");
827 frame->receiver()->ShortPrint(file);
828 const int length = frame->ComputeParametersCount();
829 for (int i = 0; i < length; i++) {
830 PrintF(file, ", ");
831 frame->GetParameter(i)->ShortPrint(file);
832 }
833 PrintF(file, ")");
834 }
835 break;
836 }
837 it.Advance();
838 }
839 }
840
841
SaveOperandStack(FixedArray * store,int * stack_handler_index) const842 void JavaScriptFrame::SaveOperandStack(FixedArray* store,
843 int* stack_handler_index) const {
844 int operands_count = store->length();
845 ASSERT_LE(operands_count, ComputeOperandsCount());
846
847 // Visit the stack in LIFO order, saving operands and stack handlers into the
848 // array. The saved stack handlers store a link to the next stack handler,
849 // which will allow RestoreOperandStack to rewind the handlers.
850 StackHandlerIterator it(this, top_handler());
851 int i = operands_count - 1;
852 *stack_handler_index = -1;
853 for (; !it.done(); it.Advance()) {
854 StackHandler* handler = it.handler();
855 // Save operands pushed after the handler was pushed.
856 for (; GetOperandSlot(i) < handler->address(); i--) {
857 store->set(i, GetOperand(i));
858 }
859 ASSERT_GE(i + 1, StackHandlerConstants::kSlotCount);
860 ASSERT_EQ(handler->address(), GetOperandSlot(i));
861 int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
862 handler->Unwind(isolate(), store, next_stack_handler_index,
863 *stack_handler_index);
864 *stack_handler_index = next_stack_handler_index;
865 i -= StackHandlerConstants::kSlotCount;
866 }
867
868 // Save any remaining operands.
869 for (; i >= 0; i--) {
870 store->set(i, GetOperand(i));
871 }
872 }
873
874
RestoreOperandStack(FixedArray * store,int stack_handler_index)875 void JavaScriptFrame::RestoreOperandStack(FixedArray* store,
876 int stack_handler_index) {
877 int operands_count = store->length();
878 ASSERT_LE(operands_count, ComputeOperandsCount());
879 int i = 0;
880 while (i <= stack_handler_index) {
881 if (i < stack_handler_index) {
882 // An operand.
883 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
884 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
885 i++;
886 } else {
887 // A stack handler.
888 ASSERT_EQ(i, stack_handler_index);
889 // The FixedArray store grows up. The stack grows down. So the operand
890 // slot for i actually points to the bottom of the top word in the
891 // handler. The base of the StackHandler* is the address of the bottom
892 // word, which will be the last slot that is in the handler.
893 int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
894 StackHandler *handler =
895 StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
896 stack_handler_index = handler->Rewind(isolate(), store, i, fp());
897 i += StackHandlerConstants::kSlotCount;
898 }
899 }
900
901 for (; i < operands_count; i++) {
902 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
903 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
904 }
905 }
906
907
Print()908 void FrameSummary::Print() {
909 PrintF("receiver: ");
910 receiver_->ShortPrint();
911 PrintF("\nfunction: ");
912 function_->shared()->DebugName()->ShortPrint();
913 PrintF("\ncode: ");
914 code_->ShortPrint();
915 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
916 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
917 PrintF("\npc: %d\n", offset_);
918 }
919
920
LiteralAt(FixedArray * literal_array,int literal_id)921 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
922 int literal_id) {
923 if (literal_id == Translation::kSelfLiteralId) {
924 return function();
925 }
926
927 return JSFunction::cast(literal_array->get(literal_id));
928 }
929
930
Summarize(List<FrameSummary> * frames)931 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
932 ASSERT(frames->length() == 0);
933 ASSERT(is_optimized());
934
935 int deopt_index = Safepoint::kNoDeoptimizationIndex;
936 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
937 FixedArray* literal_array = data->LiteralArray();
938
939 // BUG(3243555): Since we don't have a lazy-deopt registered at
940 // throw-statements, we can't use the translation at the call-site of
941 // throw. An entry with no deoptimization index indicates a call-site
942 // without a lazy-deopt. As a consequence we are not allowed to inline
943 // functions containing throw.
944 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
945 JavaScriptFrame::Summarize(frames);
946 return;
947 }
948
949 TranslationIterator it(data->TranslationByteArray(),
950 data->TranslationIndex(deopt_index)->value());
951 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
952 ASSERT(opcode == Translation::BEGIN);
953 it.Next(); // Drop frame count.
954 int jsframe_count = it.Next();
955
956 // We create the summary in reverse order because the frames
957 // in the deoptimization translation are ordered bottom-to-top.
958 bool is_constructor = IsConstructor();
959 int i = jsframe_count;
960 while (i > 0) {
961 opcode = static_cast<Translation::Opcode>(it.Next());
962 if (opcode == Translation::JS_FRAME) {
963 i--;
964 BailoutId ast_id = BailoutId(it.Next());
965 JSFunction* function = LiteralAt(literal_array, it.Next());
966 it.Next(); // Skip height.
967
968 // The translation commands are ordered and the receiver is always
969 // at the first position.
970 // If we are at a call, the receiver is always in a stack slot.
971 // Otherwise we are not guaranteed to get the receiver value.
972 opcode = static_cast<Translation::Opcode>(it.Next());
973 int index = it.Next();
974
975 // Get the correct receiver in the optimized frame.
976 Object* receiver = NULL;
977 if (opcode == Translation::LITERAL) {
978 receiver = data->LiteralArray()->get(index);
979 } else if (opcode == Translation::STACK_SLOT) {
980 // Positive index means the value is spilled to the locals
981 // area. Negative means it is stored in the incoming parameter
982 // area.
983 if (index >= 0) {
984 receiver = GetExpression(index);
985 } else {
986 // Index -1 overlaps with last parameter, -n with the first parameter,
987 // (-n - 1) with the receiver with n being the number of parameters
988 // of the outermost, optimized frame.
989 int parameter_count = ComputeParametersCount();
990 int parameter_index = index + parameter_count;
991 receiver = (parameter_index == -1)
992 ? this->receiver()
993 : this->GetParameter(parameter_index);
994 }
995 } else {
996 // The receiver is not in a stack slot nor in a literal. We give up.
997 // TODO(3029): Materializing a captured object (or duplicated
998 // object) is hard, we return undefined for now. This breaks the
999 // produced stack trace, as constructor frames aren't marked as
1000 // such anymore.
1001 receiver = isolate()->heap()->undefined_value();
1002 }
1003
1004 Code* code = function->shared()->code();
1005 DeoptimizationOutputData* output_data =
1006 DeoptimizationOutputData::cast(code->deoptimization_data());
1007 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
1008 ast_id,
1009 function->shared());
1010 unsigned pc_offset =
1011 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
1012 ASSERT(pc_offset > 0);
1013
1014 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1015 frames->Add(summary);
1016 is_constructor = false;
1017 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
1018 // The next encountered JS_FRAME will be marked as a constructor call.
1019 it.Skip(Translation::NumberOfOperandsFor(opcode));
1020 ASSERT(!is_constructor);
1021 is_constructor = true;
1022 } else {
1023 // Skip over operands to advance to the next opcode.
1024 it.Skip(Translation::NumberOfOperandsFor(opcode));
1025 }
1026 }
1027 ASSERT(!is_constructor);
1028 }
1029
1030
GetDeoptimizationData(int * deopt_index)1031 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1032 int* deopt_index) {
1033 ASSERT(is_optimized());
1034
1035 JSFunction* opt_function = function();
1036 Code* code = opt_function->code();
1037
1038 // The code object may have been replaced by lazy deoptimization. Fall
1039 // back to a slow search in this case to find the original optimized
1040 // code object.
1041 if (!code->contains(pc())) {
1042 code = isolate()->inner_pointer_to_code_cache()->
1043 GcSafeFindCodeForInnerPointer(pc());
1044 }
1045 ASSERT(code != NULL);
1046 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
1047
1048 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1049 *deopt_index = safepoint_entry.deoptimization_index();
1050 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1051
1052 return DeoptimizationInputData::cast(code->deoptimization_data());
1053 }
1054
1055
GetInlineCount()1056 int OptimizedFrame::GetInlineCount() {
1057 ASSERT(is_optimized());
1058
1059 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1060 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1061
1062 TranslationIterator it(data->TranslationByteArray(),
1063 data->TranslationIndex(deopt_index)->value());
1064 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1065 ASSERT(opcode == Translation::BEGIN);
1066 USE(opcode);
1067 it.Next(); // Drop frame count.
1068 int jsframe_count = it.Next();
1069 return jsframe_count;
1070 }
1071
1072
GetFunctions(List<JSFunction * > * functions)1073 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
1074 ASSERT(functions->length() == 0);
1075 ASSERT(is_optimized());
1076
1077 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1078 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1079 FixedArray* literal_array = data->LiteralArray();
1080
1081 TranslationIterator it(data->TranslationByteArray(),
1082 data->TranslationIndex(deopt_index)->value());
1083 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1084 ASSERT(opcode == Translation::BEGIN);
1085 it.Next(); // Drop frame count.
1086 int jsframe_count = it.Next();
1087
1088 // We insert the frames in reverse order because the frames
1089 // in the deoptimization translation are ordered bottom-to-top.
1090 while (jsframe_count > 0) {
1091 opcode = static_cast<Translation::Opcode>(it.Next());
1092 if (opcode == Translation::JS_FRAME) {
1093 jsframe_count--;
1094 it.Next(); // Skip ast id.
1095 JSFunction* function = LiteralAt(literal_array, it.Next());
1096 it.Next(); // Skip height.
1097 functions->Add(function);
1098 } else {
1099 // Skip over operands to advance to the next opcode.
1100 it.Skip(Translation::NumberOfOperandsFor(opcode));
1101 }
1102 }
1103 }
1104
1105
GetNumberOfIncomingArguments() const1106 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1107 return Smi::cast(GetExpression(0))->value();
1108 }
1109
1110
GetCallerStackPointer() const1111 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1112 return fp() + StandardFrameConstants::kCallerSPOffset;
1113 }
1114
1115
GetCallerStackPointer() const1116 Address InternalFrame::GetCallerStackPointer() const {
1117 // Internal frames have no arguments. The stack pointer of the
1118 // caller is at a fixed offset from the frame pointer.
1119 return fp() + StandardFrameConstants::kCallerSPOffset;
1120 }
1121
1122
unchecked_code() const1123 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1124 return isolate()->builtins()->builtin(
1125 Builtins::kArgumentsAdaptorTrampoline);
1126 }
1127
1128
unchecked_code() const1129 Code* InternalFrame::unchecked_code() const {
1130 const int offset = InternalFrameConstants::kCodeOffset;
1131 Object* code = Memory::Object_at(fp() + offset);
1132 ASSERT(code != NULL);
1133 return reinterpret_cast<Code*>(code);
1134 }
1135
1136
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1137 void StackFrame::PrintIndex(StringStream* accumulator,
1138 PrintMode mode,
1139 int index) {
1140 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1141 }
1142
1143
Print(StringStream * accumulator,PrintMode mode,int index) const1144 void JavaScriptFrame::Print(StringStream* accumulator,
1145 PrintMode mode,
1146 int index) const {
1147 DisallowHeapAllocation no_gc;
1148 Object* receiver = this->receiver();
1149 JSFunction* function = this->function();
1150
1151 accumulator->PrintSecurityTokenIfChanged(function);
1152 PrintIndex(accumulator, mode, index);
1153 Code* code = NULL;
1154 if (IsConstructor()) accumulator->Add("new ");
1155 accumulator->PrintFunction(function, receiver, &code);
1156
1157 // Get scope information for nicer output, if possible. If code is NULL, or
1158 // doesn't contain scope info, scope_info will return 0 for the number of
1159 // parameters, stack local variables, context local variables, stack slots,
1160 // or context slots.
1161 SharedFunctionInfo* shared = function->shared();
1162 ScopeInfo* scope_info = shared->scope_info();
1163 Object* script_obj = shared->script();
1164 if (script_obj->IsScript()) {
1165 Script* script = Script::cast(script_obj);
1166 accumulator->Add(" [");
1167 accumulator->PrintName(script->name());
1168
1169 Address pc = this->pc();
1170 if (code != NULL && code->kind() == Code::FUNCTION &&
1171 pc >= code->instruction_start() && pc < code->instruction_end()) {
1172 int source_pos = code->SourcePosition(pc);
1173 int line = script->GetLineNumber(source_pos) + 1;
1174 accumulator->Add(":%d", line);
1175 } else {
1176 int function_start_pos = shared->start_position();
1177 int line = script->GetLineNumber(function_start_pos) + 1;
1178 accumulator->Add(":~%d", line);
1179 }
1180
1181 accumulator->Add("] ");
1182 }
1183
1184 accumulator->Add("(this=%o", receiver);
1185
1186 // Print the parameters.
1187 int parameters_count = ComputeParametersCount();
1188 for (int i = 0; i < parameters_count; i++) {
1189 accumulator->Add(",");
1190 // If we have a name for the parameter we print it. Nameless
1191 // parameters are either because we have more actual parameters
1192 // than formal parameters or because we have no scope information.
1193 if (i < scope_info->ParameterCount()) {
1194 accumulator->PrintName(scope_info->ParameterName(i));
1195 accumulator->Add("=");
1196 }
1197 accumulator->Add("%o", GetParameter(i));
1198 }
1199
1200 accumulator->Add(")");
1201 if (mode == OVERVIEW) {
1202 accumulator->Add("\n");
1203 return;
1204 }
1205 if (is_optimized()) {
1206 accumulator->Add(" {\n// optimized frame\n}\n");
1207 return;
1208 }
1209 accumulator->Add(" {\n");
1210
1211 // Compute the number of locals and expression stack elements.
1212 int stack_locals_count = scope_info->StackLocalCount();
1213 int heap_locals_count = scope_info->ContextLocalCount();
1214 int expressions_count = ComputeExpressionsCount();
1215
1216 // Print stack-allocated local variables.
1217 if (stack_locals_count > 0) {
1218 accumulator->Add(" // stack-allocated locals\n");
1219 }
1220 for (int i = 0; i < stack_locals_count; i++) {
1221 accumulator->Add(" var ");
1222 accumulator->PrintName(scope_info->StackLocalName(i));
1223 accumulator->Add(" = ");
1224 if (i < expressions_count) {
1225 accumulator->Add("%o", GetExpression(i));
1226 } else {
1227 accumulator->Add("// no expression found - inconsistent frame?");
1228 }
1229 accumulator->Add("\n");
1230 }
1231
1232 // Try to get hold of the context of this frame.
1233 Context* context = NULL;
1234 if (this->context() != NULL && this->context()->IsContext()) {
1235 context = Context::cast(this->context());
1236 }
1237 while (context->IsWithContext()) {
1238 context = context->previous();
1239 ASSERT(context != NULL);
1240 }
1241
1242 // Print heap-allocated local variables.
1243 if (heap_locals_count > 0) {
1244 accumulator->Add(" // heap-allocated locals\n");
1245 }
1246 for (int i = 0; i < heap_locals_count; i++) {
1247 accumulator->Add(" var ");
1248 accumulator->PrintName(scope_info->ContextLocalName(i));
1249 accumulator->Add(" = ");
1250 if (context != NULL) {
1251 int index = Context::MIN_CONTEXT_SLOTS + i;
1252 if (index < context->length()) {
1253 accumulator->Add("%o", context->get(index));
1254 } else {
1255 accumulator->Add(
1256 "// warning: missing context slot - inconsistent frame?");
1257 }
1258 } else {
1259 accumulator->Add("// warning: no context found - inconsistent frame?");
1260 }
1261 accumulator->Add("\n");
1262 }
1263
1264 // Print the expression stack.
1265 int expressions_start = stack_locals_count;
1266 if (expressions_start < expressions_count) {
1267 accumulator->Add(" // expression stack (top to bottom)\n");
1268 }
1269 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1270 if (IsExpressionInsideHandler(i)) continue;
1271 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1272 }
1273
1274 // Print details about the function.
1275 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1276 SharedFunctionInfo* shared = function->shared();
1277 accumulator->Add("--------- s o u r c e c o d e ---------\n");
1278 shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length);
1279 accumulator->Add("\n-----------------------------------------\n");
1280 }
1281
1282 accumulator->Add("}\n\n");
1283 }
1284
1285
Print(StringStream * accumulator,PrintMode mode,int index) const1286 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1287 PrintMode mode,
1288 int index) const {
1289 int actual = ComputeParametersCount();
1290 int expected = -1;
1291 JSFunction* function = this->function();
1292 expected = function->shared()->formal_parameter_count();
1293
1294 PrintIndex(accumulator, mode, index);
1295 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1296 if (mode == OVERVIEW) {
1297 accumulator->Add("\n");
1298 return;
1299 }
1300 accumulator->Add(" {\n");
1301
1302 // Print actual arguments.
1303 if (actual > 0) accumulator->Add(" // actual arguments\n");
1304 for (int i = 0; i < actual; i++) {
1305 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1306 if (expected != -1 && i >= expected) {
1307 accumulator->Add(" // not passed to callee");
1308 }
1309 accumulator->Add("\n");
1310 }
1311
1312 accumulator->Add("}\n\n");
1313 }
1314
1315
Iterate(ObjectVisitor * v) const1316 void EntryFrame::Iterate(ObjectVisitor* v) const {
1317 StackHandlerIterator it(this, top_handler());
1318 ASSERT(!it.done());
1319 StackHandler* handler = it.handler();
1320 ASSERT(handler->is_js_entry());
1321 handler->Iterate(v, LookupCode());
1322 #ifdef DEBUG
1323 // Make sure that the entry frame does not contain more than one
1324 // stack handler.
1325 it.Advance();
1326 ASSERT(it.done());
1327 #endif
1328 IteratePc(v, pc_address(), LookupCode());
1329 }
1330
1331
IterateExpressions(ObjectVisitor * v) const1332 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1333 const int offset = StandardFrameConstants::kLastObjectOffset;
1334 Object** base = &Memory::Object_at(sp());
1335 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1336 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1337 StackHandler* handler = it.handler();
1338 // Traverse pointers down to - but not including - the next
1339 // handler in the handler chain. Update the base to skip the
1340 // handler and allow the handler to traverse its own pointers.
1341 const Address address = handler->address();
1342 v->VisitPointers(base, reinterpret_cast<Object**>(address));
1343 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1344 // Traverse the pointers in the handler itself.
1345 handler->Iterate(v, LookupCode());
1346 }
1347 v->VisitPointers(base, limit);
1348 }
1349
1350
Iterate(ObjectVisitor * v) const1351 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1352 IterateExpressions(v);
1353 IteratePc(v, pc_address(), LookupCode());
1354 }
1355
1356
Iterate(ObjectVisitor * v) const1357 void InternalFrame::Iterate(ObjectVisitor* v) const {
1358 // Internal frames only have object pointers on the expression stack
1359 // as they never have any arguments.
1360 IterateExpressions(v);
1361 IteratePc(v, pc_address(), LookupCode());
1362 }
1363
1364
Iterate(ObjectVisitor * v) const1365 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1366 Object** base = &Memory::Object_at(sp());
1367 Object** limit = &Memory::Object_at(fp() +
1368 kFirstRegisterParameterFrameOffset);
1369 v->VisitPointers(base, limit);
1370 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1371 const int offset = StandardFrameConstants::kLastObjectOffset;
1372 limit = &Memory::Object_at(fp() + offset) + 1;
1373 v->VisitPointers(base, limit);
1374 IteratePc(v, pc_address(), LookupCode());
1375 }
1376
1377
GetCallerStackPointer() const1378 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1379 return fp() + StandardFrameConstants::kCallerSPOffset;
1380 }
1381
1382
unchecked_code() const1383 Code* StubFailureTrampolineFrame::unchecked_code() const {
1384 Code* trampoline;
1385 StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
1386 FindCodeInCache(&trampoline);
1387 if (trampoline->contains(pc())) {
1388 return trampoline;
1389 }
1390
1391 StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
1392 FindCodeInCache(&trampoline);
1393 if (trampoline->contains(pc())) {
1394 return trampoline;
1395 }
1396
1397 UNREACHABLE();
1398 return NULL;
1399 }
1400
1401
1402 // -------------------------------------------------------------------------
1403
1404
FindJavaScriptFrame(int n)1405 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1406 ASSERT(n >= 0);
1407 for (int i = 0; i <= n; i++) {
1408 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1409 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1410 iterator_.Advance();
1411 }
1412 UNREACHABLE();
1413 return NULL;
1414 }
1415
1416
1417 // -------------------------------------------------------------------------
1418
1419
GcSafeMapOfCodeSpaceObject(HeapObject * object)1420 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1421 MapWord map_word = object->map_word();
1422 return map_word.IsForwardingAddress() ?
1423 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1424 }
1425
1426
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1427 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1428 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1429 }
1430
1431
1432 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1433 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1434 Map* map = GcSafeMapOfCodeSpaceObject(code);
1435 ASSERT(map == code->GetHeap()->code_map());
1436 Address start = code->address();
1437 Address end = code->address() + code->SizeFromMap(map);
1438 return start <= addr && addr < end;
1439 }
1440 #endif
1441
1442
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1443 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1444 Address inner_pointer) {
1445 Code* code = reinterpret_cast<Code*>(object);
1446 ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer));
1447 return code;
1448 }
1449
1450
GcSafeFindCodeForInnerPointer(Address inner_pointer)1451 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1452 Address inner_pointer) {
1453 Heap* heap = isolate_->heap();
1454 // Check if the inner pointer points into a large object chunk.
1455 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1456 if (large_page != NULL) {
1457 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1458 }
1459
1460 // Iterate through the page until we reach the end or find an object starting
1461 // after the inner pointer.
1462 Page* page = Page::FromAddress(inner_pointer);
1463
1464 Address addr = page->skip_list()->StartFor(inner_pointer);
1465
1466 Address top = heap->code_space()->top();
1467 Address limit = heap->code_space()->limit();
1468
1469 while (true) {
1470 if (addr == top && addr != limit) {
1471 addr = limit;
1472 continue;
1473 }
1474
1475 HeapObject* obj = HeapObject::FromAddress(addr);
1476 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1477 Address next_addr = addr + obj_size;
1478 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1479 addr = next_addr;
1480 }
1481 }
1482
1483
1484 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1485 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1486 isolate_->counters()->pc_to_code()->Increment();
1487 ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
1488 uint32_t hash = ComputeIntegerHash(
1489 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1490 v8::internal::kZeroHashSeed);
1491 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1492 InnerPointerToCodeCacheEntry* entry = cache(index);
1493 if (entry->inner_pointer == inner_pointer) {
1494 isolate_->counters()->pc_to_code_cached()->Increment();
1495 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1496 } else {
1497 // Because this code may be interrupted by a profiling signal that
1498 // also queries the cache, we cannot update inner_pointer before the code
1499 // has been set. Otherwise, we risk trying to use a cache entry before
1500 // the code has been computed.
1501 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1502 entry->safepoint_entry.Reset();
1503 entry->inner_pointer = inner_pointer;
1504 }
1505 return entry;
1506 }
1507
1508
1509 // -------------------------------------------------------------------------
1510
1511
Unwind(Isolate * isolate,FixedArray * array,int offset,int previous_handler_offset) const1512 void StackHandler::Unwind(Isolate* isolate,
1513 FixedArray* array,
1514 int offset,
1515 int previous_handler_offset) const {
1516 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1517 ASSERT_LE(0, offset);
1518 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1519 // Unwinding a stack handler into an array chains it in the opposite
1520 // direction, re-using the "next" slot as a "previous" link, so that stack
1521 // handlers can be later re-wound in the correct order. Decode the "state"
1522 // slot into "index" and "kind" and store them separately, using the fp slot.
1523 array->set(offset, Smi::FromInt(previous_handler_offset)); // next
1524 array->set(offset + 1, *code_address()); // code
1525 array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state
1526 array->set(offset + 3, *context_address()); // context
1527 array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp
1528
1529 *isolate->handler_address() = next()->address();
1530 }
1531
1532
Rewind(Isolate * isolate,FixedArray * array,int offset,Address fp)1533 int StackHandler::Rewind(Isolate* isolate,
1534 FixedArray* array,
1535 int offset,
1536 Address fp) {
1537 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1538 ASSERT_LE(0, offset);
1539 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1540 Smi* prev_handler_offset = Smi::cast(array->get(offset));
1541 Code* code = Code::cast(array->get(offset + 1));
1542 Smi* smi_index = Smi::cast(array->get(offset + 2));
1543 Object* context = array->get(offset + 3);
1544 Smi* smi_kind = Smi::cast(array->get(offset + 4));
1545
1546 unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
1547 IndexField::encode(static_cast<unsigned>(smi_index->value()));
1548
1549 Memory::Address_at(address() + StackHandlerConstants::kNextOffset) =
1550 *isolate->handler_address();
1551 Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code;
1552 Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
1553 Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
1554 context;
1555 SetFp(address() + StackHandlerConstants::kFPOffset, fp);
1556
1557 *isolate->handler_address() = address();
1558
1559 return prev_handler_offset->value();
1560 }
1561
1562
1563 // -------------------------------------------------------------------------
1564
NumRegs(RegList reglist)1565 int NumRegs(RegList reglist) {
1566 return CompilerIntrinsics::CountSetBits(reglist);
1567 }
1568
1569
1570 struct JSCallerSavedCodeData {
1571 int reg_code[kNumJSCallerSaved];
1572 };
1573
1574 JSCallerSavedCodeData caller_saved_code_data;
1575
SetUpJSCallerSavedCodeData()1576 void SetUpJSCallerSavedCodeData() {
1577 int i = 0;
1578 for (int r = 0; r < kNumRegs; r++)
1579 if ((kJSCallerSaved & (1 << r)) != 0)
1580 caller_saved_code_data.reg_code[i++] = r;
1581
1582 ASSERT(i == kNumJSCallerSaved);
1583 }
1584
1585
JSCallerSavedCode(int n)1586 int JSCallerSavedCode(int n) {
1587 ASSERT(0 <= n && n < kNumJSCallerSaved);
1588 return caller_saved_code_data.reg_code[n];
1589 }
1590
1591
1592 #define DEFINE_WRAPPER(type, field) \
1593 class field##_Wrapper : public ZoneObject { \
1594 public: /* NOLINT */ \
1595 field##_Wrapper(const field& original) : frame_(original) { \
1596 } \
1597 field frame_; \
1598 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1599 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1600 #undef DEFINE_WRAPPER
1601
1602 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1603 #define FRAME_TYPE_CASE(type, field) \
1604 case StackFrame::type: { \
1605 field##_Wrapper* wrapper = \
1606 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1607 return &wrapper->frame_; \
1608 }
1609
1610 switch (frame->type()) {
1611 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1612 default: UNREACHABLE();
1613 }
1614 #undef FRAME_TYPE_CASE
1615 return NULL;
1616 }
1617
1618
CreateStackMap(Isolate * isolate,Zone * zone)1619 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1620 ZoneList<StackFrame*> list(10, zone);
1621 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1622 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1623 list.Add(frame, zone);
1624 }
1625 return list.ToVector();
1626 }
1627
1628
1629 } } // namespace v8::internal
1630