1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "ast.h"
31 #include "deoptimizer.h"
32 #include "frames-inl.h"
33 #include "full-codegen.h"
34 #include "lazy-instance.h"
35 #include "mark-compact.h"
36 #include "safepoint-table.h"
37 #include "scopeinfo.h"
38 #include "string-stream.h"
39 #include "vm-state-inl.h"
40
41 namespace v8 {
42 namespace internal {
43
44
45 ReturnAddressLocationResolver
46 StackFrame::return_address_location_resolver_ = NULL;
47
48
49 // Iterator that supports traversing the stack handlers of a
50 // particular frame. Needs to know the top of the handler chain.
51 class StackHandlerIterator BASE_EMBEDDED {
52 public:
StackHandlerIterator(const StackFrame * frame,StackHandler * handler)53 StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
54 : limit_(frame->fp()), handler_(handler) {
55 // Make sure the handler has already been unwound to this frame.
56 ASSERT(frame->sp() <= handler->address());
57 }
58
handler() const59 StackHandler* handler() const { return handler_; }
60
done()61 bool done() {
62 return handler_ == NULL || handler_->address() > limit_;
63 }
Advance()64 void Advance() {
65 ASSERT(!done());
66 handler_ = handler_->next();
67 }
68
69 private:
70 const Address limit_;
71 StackHandler* handler_;
72 };
73
74
75 // -------------------------------------------------------------------------
76
77
78 #define INITIALIZE_SINGLETON(type, field) field##_(this),
StackFrameIteratorBase(Isolate * isolate,bool can_access_heap_objects)79 StackFrameIteratorBase::StackFrameIteratorBase(Isolate* isolate,
80 bool can_access_heap_objects)
81 : isolate_(isolate),
82 STACK_FRAME_TYPE_LIST(INITIALIZE_SINGLETON)
83 frame_(NULL), handler_(NULL),
84 can_access_heap_objects_(can_access_heap_objects) {
85 }
86 #undef INITIALIZE_SINGLETON
87
88
StackFrameIterator(Isolate * isolate)89 StackFrameIterator::StackFrameIterator(Isolate* isolate)
90 : StackFrameIteratorBase(isolate, true) {
91 Reset(isolate->thread_local_top());
92 }
93
94
StackFrameIterator(Isolate * isolate,ThreadLocalTop * t)95 StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
96 : StackFrameIteratorBase(isolate, true) {
97 Reset(t);
98 }
99
100
Advance()101 void StackFrameIterator::Advance() {
102 ASSERT(!done());
103 // Compute the state of the calling frame before restoring
104 // callee-saved registers and unwinding handlers. This allows the
105 // frame code that computes the caller state to access the top
106 // handler and the value of any callee-saved register if needed.
107 StackFrame::State state;
108 StackFrame::Type type = frame_->GetCallerState(&state);
109
110 // Unwind handlers corresponding to the current frame.
111 StackHandlerIterator it(frame_, handler_);
112 while (!it.done()) it.Advance();
113 handler_ = it.handler();
114
115 // Advance to the calling frame.
116 frame_ = SingletonFor(type, &state);
117
118 // When we're done iterating over the stack frames, the handler
119 // chain must have been completely unwound.
120 ASSERT(!done() || handler_ == NULL);
121 }
122
123
Reset(ThreadLocalTop * top)124 void StackFrameIterator::Reset(ThreadLocalTop* top) {
125 StackFrame::State state;
126 StackFrame::Type type = ExitFrame::GetStateForFramePointer(
127 Isolate::c_entry_fp(top), &state);
128 handler_ = StackHandler::FromAddress(Isolate::handler(top));
129 if (SingletonFor(type) == NULL) return;
130 frame_ = SingletonFor(type, &state);
131 }
132
133
SingletonFor(StackFrame::Type type,StackFrame::State * state)134 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
135 StackFrame::State* state) {
136 if (type == StackFrame::NONE) return NULL;
137 StackFrame* result = SingletonFor(type);
138 ASSERT(result != NULL);
139 result->state_ = *state;
140 return result;
141 }
142
143
SingletonFor(StackFrame::Type type)144 StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type) {
145 #define FRAME_TYPE_CASE(type, field) \
146 case StackFrame::type: result = &field##_; break;
147
148 StackFrame* result = NULL;
149 switch (type) {
150 case StackFrame::NONE: return NULL;
151 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
152 default: break;
153 }
154 return result;
155
156 #undef FRAME_TYPE_CASE
157 }
158
159
160 // -------------------------------------------------------------------------
161
162
JavaScriptFrameIterator(Isolate * isolate,StackFrame::Id id)163 JavaScriptFrameIterator::JavaScriptFrameIterator(
164 Isolate* isolate, StackFrame::Id id)
165 : iterator_(isolate) {
166 while (!done()) {
167 Advance();
168 if (frame()->id() == id) return;
169 }
170 }
171
172
Advance()173 void JavaScriptFrameIterator::Advance() {
174 do {
175 iterator_.Advance();
176 } while (!iterator_.done() && !iterator_.frame()->is_java_script());
177 }
178
179
AdvanceToArgumentsFrame()180 void JavaScriptFrameIterator::AdvanceToArgumentsFrame() {
181 if (!frame()->has_adapted_arguments()) return;
182 iterator_.Advance();
183 ASSERT(iterator_.frame()->is_arguments_adaptor());
184 }
185
186
187 // -------------------------------------------------------------------------
188
189
StackTraceFrameIterator(Isolate * isolate)190 StackTraceFrameIterator::StackTraceFrameIterator(Isolate* isolate)
191 : JavaScriptFrameIterator(isolate) {
192 if (!done() && !IsValidFrame()) Advance();
193 }
194
195
Advance()196 void StackTraceFrameIterator::Advance() {
197 while (true) {
198 JavaScriptFrameIterator::Advance();
199 if (done()) return;
200 if (IsValidFrame()) return;
201 }
202 }
203
204
IsValidFrame()205 bool StackTraceFrameIterator::IsValidFrame() {
206 if (!frame()->function()->IsJSFunction()) return false;
207 Object* script = frame()->function()->shared()->script();
208 // Don't show functions from native scripts to user.
209 return (script->IsScript() &&
210 Script::TYPE_NATIVE != Script::cast(script)->type()->value());
211 }
212
213
214 // -------------------------------------------------------------------------
215
216
SafeStackFrameIterator(Isolate * isolate,Address fp,Address sp,Address js_entry_sp)217 SafeStackFrameIterator::SafeStackFrameIterator(
218 Isolate* isolate,
219 Address fp, Address sp, Address js_entry_sp)
220 : StackFrameIteratorBase(isolate, false),
221 low_bound_(sp),
222 high_bound_(js_entry_sp),
223 top_frame_type_(StackFrame::NONE),
224 external_callback_scope_(isolate->external_callback_scope()) {
225 StackFrame::State state;
226 StackFrame::Type type;
227 ThreadLocalTop* top = isolate->thread_local_top();
228 if (IsValidTop(top)) {
229 type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
230 top_frame_type_ = type;
231 } else if (IsValidStackAddress(fp)) {
232 ASSERT(fp != NULL);
233 state.fp = fp;
234 state.sp = sp;
235 state.pc_address = StackFrame::ResolveReturnAddressLocation(
236 reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp)));
237 // StackFrame::ComputeType will read both kContextOffset and kMarkerOffset,
238 // we check only that kMarkerOffset is within the stack bounds and do
239 // compile time check that kContextOffset slot is pushed on the stack before
240 // kMarkerOffset.
241 STATIC_ASSERT(StandardFrameConstants::kMarkerOffset <
242 StandardFrameConstants::kContextOffset);
243 Address frame_marker = fp + StandardFrameConstants::kMarkerOffset;
244 if (IsValidStackAddress(frame_marker)) {
245 type = StackFrame::ComputeType(this, &state);
246 top_frame_type_ = type;
247 } else {
248 // Mark the frame as JAVA_SCRIPT if we cannot determine its type.
249 // The frame anyways will be skipped.
250 type = StackFrame::JAVA_SCRIPT;
251 // Top frame is incomplete so we cannot reliably determine its type.
252 top_frame_type_ = StackFrame::NONE;
253 }
254 } else {
255 return;
256 }
257 if (SingletonFor(type) == NULL) return;
258 frame_ = SingletonFor(type, &state);
259 if (frame_ == NULL) return;
260
261 Advance();
262
263 if (frame_ != NULL && !frame_->is_exit() &&
264 external_callback_scope_ != NULL &&
265 external_callback_scope_->scope_address() < frame_->fp()) {
266 // Skip top ExternalCallbackScope if we already advanced to a JS frame
267 // under it. Sampler will anyways take this top external callback.
268 external_callback_scope_ = external_callback_scope_->previous();
269 }
270 }
271
272
IsValidTop(ThreadLocalTop * top) const273 bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
274 Address c_entry_fp = Isolate::c_entry_fp(top);
275 if (!IsValidExitFrame(c_entry_fp)) return false;
276 // There should be at least one JS_ENTRY stack handler.
277 Address handler = Isolate::handler(top);
278 if (handler == NULL) return false;
279 // Check that there are no js frames on top of the native frames.
280 return c_entry_fp < handler;
281 }
282
283
AdvanceOneFrame()284 void SafeStackFrameIterator::AdvanceOneFrame() {
285 ASSERT(!done());
286 StackFrame* last_frame = frame_;
287 Address last_sp = last_frame->sp(), last_fp = last_frame->fp();
288 // Before advancing to the next stack frame, perform pointer validity tests.
289 if (!IsValidFrame(last_frame) || !IsValidCaller(last_frame)) {
290 frame_ = NULL;
291 return;
292 }
293
294 // Advance to the previous frame.
295 StackFrame::State state;
296 StackFrame::Type type = frame_->GetCallerState(&state);
297 frame_ = SingletonFor(type, &state);
298 if (frame_ == NULL) return;
299
300 // Check that we have actually moved to the previous frame in the stack.
301 if (frame_->sp() < last_sp || frame_->fp() < last_fp) {
302 frame_ = NULL;
303 }
304 }
305
306
IsValidFrame(StackFrame * frame) const307 bool SafeStackFrameIterator::IsValidFrame(StackFrame* frame) const {
308 return IsValidStackAddress(frame->sp()) && IsValidStackAddress(frame->fp());
309 }
310
311
IsValidCaller(StackFrame * frame)312 bool SafeStackFrameIterator::IsValidCaller(StackFrame* frame) {
313 StackFrame::State state;
314 if (frame->is_entry() || frame->is_entry_construct()) {
315 // See EntryFrame::GetCallerState. It computes the caller FP address
316 // and calls ExitFrame::GetStateForFramePointer on it. We need to be
317 // sure that caller FP address is valid.
318 Address caller_fp = Memory::Address_at(
319 frame->fp() + EntryFrameConstants::kCallerFPOffset);
320 if (!IsValidExitFrame(caller_fp)) return false;
321 } else if (frame->is_arguments_adaptor()) {
322 // See ArgumentsAdaptorFrame::GetCallerStackPointer. It assumes that
323 // the number of arguments is stored on stack as Smi. We need to check
324 // that it really an Smi.
325 Object* number_of_args = reinterpret_cast<ArgumentsAdaptorFrame*>(frame)->
326 GetExpression(0);
327 if (!number_of_args->IsSmi()) {
328 return false;
329 }
330 }
331 frame->ComputeCallerState(&state);
332 return IsValidStackAddress(state.sp) && IsValidStackAddress(state.fp) &&
333 SingletonFor(frame->GetCallerState(&state)) != NULL;
334 }
335
336
IsValidExitFrame(Address fp) const337 bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
338 if (!IsValidStackAddress(fp)) return false;
339 Address sp = ExitFrame::ComputeStackPointer(fp);
340 if (!IsValidStackAddress(sp)) return false;
341 StackFrame::State state;
342 ExitFrame::FillState(fp, sp, &state);
343 if (!IsValidStackAddress(reinterpret_cast<Address>(state.pc_address))) {
344 return false;
345 }
346 return *state.pc_address != NULL;
347 }
348
349
Advance()350 void SafeStackFrameIterator::Advance() {
351 while (true) {
352 AdvanceOneFrame();
353 if (done()) return;
354 if (frame_->is_java_script()) return;
355 if (frame_->is_exit() && external_callback_scope_) {
356 // Some of the EXIT frames may have ExternalCallbackScope allocated on
357 // top of them. In that case the scope corresponds to the first EXIT
358 // frame beneath it. There may be other EXIT frames on top of the
359 // ExternalCallbackScope, just skip them as we cannot collect any useful
360 // information about them.
361 if (external_callback_scope_->scope_address() < frame_->fp()) {
362 Address* callback_address =
363 external_callback_scope_->callback_address();
364 if (*callback_address != NULL) {
365 frame_->state_.pc_address = callback_address;
366 }
367 external_callback_scope_ = external_callback_scope_->previous();
368 ASSERT(external_callback_scope_ == NULL ||
369 external_callback_scope_->scope_address() > frame_->fp());
370 return;
371 }
372 }
373 }
374 }
375
376
377 // -------------------------------------------------------------------------
378
379
GetSafepointData(Isolate * isolate,Address inner_pointer,SafepointEntry * safepoint_entry,unsigned * stack_slots)380 Code* StackFrame::GetSafepointData(Isolate* isolate,
381 Address inner_pointer,
382 SafepointEntry* safepoint_entry,
383 unsigned* stack_slots) {
384 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry* entry =
385 isolate->inner_pointer_to_code_cache()->GetCacheEntry(inner_pointer);
386 if (!entry->safepoint_entry.is_valid()) {
387 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
388 ASSERT(entry->safepoint_entry.is_valid());
389 } else {
390 ASSERT(entry->safepoint_entry.Equals(
391 entry->code->GetSafepointEntry(inner_pointer)));
392 }
393
394 // Fill in the results and return the code.
395 Code* code = entry->code;
396 *safepoint_entry = entry->safepoint_entry;
397 *stack_slots = code->stack_slots();
398 return code;
399 }
400
401
HasHandler() const402 bool StackFrame::HasHandler() const {
403 StackHandlerIterator it(this, top_handler());
404 return !it.done();
405 }
406
407
408 #ifdef DEBUG
409 static bool GcSafeCodeContains(HeapObject* object, Address addr);
410 #endif
411
412
IteratePc(ObjectVisitor * v,Address * pc_address,Code * holder)413 void StackFrame::IteratePc(ObjectVisitor* v,
414 Address* pc_address,
415 Code* holder) {
416 Address pc = *pc_address;
417 ASSERT(GcSafeCodeContains(holder, pc));
418 unsigned pc_offset = static_cast<unsigned>(pc - holder->instruction_start());
419 Object* code = holder;
420 v->VisitPointer(&code);
421 if (code != holder) {
422 holder = reinterpret_cast<Code*>(code);
423 pc = holder->instruction_start() + pc_offset;
424 *pc_address = pc;
425 }
426 }
427
428
SetReturnAddressLocationResolver(ReturnAddressLocationResolver resolver)429 void StackFrame::SetReturnAddressLocationResolver(
430 ReturnAddressLocationResolver resolver) {
431 ASSERT(return_address_location_resolver_ == NULL);
432 return_address_location_resolver_ = resolver;
433 }
434
435
ComputeType(const StackFrameIteratorBase * iterator,State * state)436 StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
437 State* state) {
438 ASSERT(state->fp != NULL);
439 if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
440 return ARGUMENTS_ADAPTOR;
441 }
442 // The marker and function offsets overlap. If the marker isn't a
443 // smi then the frame is a JavaScript frame -- and the marker is
444 // really the function.
445 const int offset = StandardFrameConstants::kMarkerOffset;
446 Object* marker = Memory::Object_at(state->fp + offset);
447 if (!marker->IsSmi()) {
448 // If we're using a "safe" stack iterator, we treat optimized
449 // frames as normal JavaScript frames to avoid having to look
450 // into the heap to determine the state. This is safe as long
451 // as nobody tries to GC...
452 if (!iterator->can_access_heap_objects_) return JAVA_SCRIPT;
453 Code::Kind kind = GetContainingCode(iterator->isolate(),
454 *(state->pc_address))->kind();
455 ASSERT(kind == Code::FUNCTION || kind == Code::OPTIMIZED_FUNCTION);
456 return (kind == Code::OPTIMIZED_FUNCTION) ? OPTIMIZED : JAVA_SCRIPT;
457 }
458 return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
459 }
460
461
462 #ifdef DEBUG
can_access_heap_objects() const463 bool StackFrame::can_access_heap_objects() const {
464 return iterator_->can_access_heap_objects_;
465 }
466 #endif
467
468
GetCallerState(State * state) const469 StackFrame::Type StackFrame::GetCallerState(State* state) const {
470 ComputeCallerState(state);
471 return ComputeType(iterator_, state);
472 }
473
474
UnpaddedFP() const475 Address StackFrame::UnpaddedFP() const {
476 #if V8_TARGET_ARCH_IA32
477 if (!is_optimized()) return fp();
478 int32_t alignment_state = Memory::int32_at(
479 fp() + JavaScriptFrameConstants::kDynamicAlignmentStateOffset);
480
481 return (alignment_state == kAlignmentPaddingPushed) ?
482 (fp() + kPointerSize) : fp();
483 #else
484 return fp();
485 #endif
486 }
487
488
unchecked_code() const489 Code* EntryFrame::unchecked_code() const {
490 return isolate()->heap()->js_entry_code();
491 }
492
493
ComputeCallerState(State * state) const494 void EntryFrame::ComputeCallerState(State* state) const {
495 GetCallerState(state);
496 }
497
498
SetCallerFp(Address caller_fp)499 void EntryFrame::SetCallerFp(Address caller_fp) {
500 const int offset = EntryFrameConstants::kCallerFPOffset;
501 Memory::Address_at(this->fp() + offset) = caller_fp;
502 }
503
504
GetCallerState(State * state) const505 StackFrame::Type EntryFrame::GetCallerState(State* state) const {
506 const int offset = EntryFrameConstants::kCallerFPOffset;
507 Address fp = Memory::Address_at(this->fp() + offset);
508 return ExitFrame::GetStateForFramePointer(fp, state);
509 }
510
511
unchecked_code() const512 Code* EntryConstructFrame::unchecked_code() const {
513 return isolate()->heap()->js_construct_entry_code();
514 }
515
516
code_slot() const517 Object*& ExitFrame::code_slot() const {
518 const int offset = ExitFrameConstants::kCodeOffset;
519 return Memory::Object_at(fp() + offset);
520 }
521
522
unchecked_code() const523 Code* ExitFrame::unchecked_code() const {
524 return reinterpret_cast<Code*>(code_slot());
525 }
526
527
ComputeCallerState(State * state) const528 void ExitFrame::ComputeCallerState(State* state) const {
529 // Set up the caller state.
530 state->sp = caller_sp();
531 state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
532 state->pc_address = ResolveReturnAddressLocation(
533 reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
534 }
535
536
SetCallerFp(Address caller_fp)537 void ExitFrame::SetCallerFp(Address caller_fp) {
538 Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset) = caller_fp;
539 }
540
541
Iterate(ObjectVisitor * v) const542 void ExitFrame::Iterate(ObjectVisitor* v) const {
543 // The arguments are traversed as part of the expression stack of
544 // the calling frame.
545 IteratePc(v, pc_address(), LookupCode());
546 v->VisitPointer(&code_slot());
547 }
548
549
GetCallerStackPointer() const550 Address ExitFrame::GetCallerStackPointer() const {
551 return fp() + ExitFrameConstants::kCallerSPDisplacement;
552 }
553
554
GetStateForFramePointer(Address fp,State * state)555 StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
556 if (fp == 0) return NONE;
557 Address sp = ComputeStackPointer(fp);
558 FillState(fp, sp, state);
559 ASSERT(*state->pc_address != NULL);
560 return EXIT;
561 }
562
563
ComputeStackPointer(Address fp)564 Address ExitFrame::ComputeStackPointer(Address fp) {
565 return Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
566 }
567
568
FillState(Address fp,Address sp,State * state)569 void ExitFrame::FillState(Address fp, Address sp, State* state) {
570 state->sp = sp;
571 state->fp = fp;
572 state->pc_address = ResolveReturnAddressLocation(
573 reinterpret_cast<Address*>(sp - 1 * kPCOnStackSize));
574 }
575
576
GetExpressionAddress(int n) const577 Address StandardFrame::GetExpressionAddress(int n) const {
578 const int offset = StandardFrameConstants::kExpressionsOffset;
579 return fp() + offset - n * kPointerSize;
580 }
581
582
GetExpression(Address fp,int index)583 Object* StandardFrame::GetExpression(Address fp, int index) {
584 return Memory::Object_at(GetExpressionAddress(fp, index));
585 }
586
587
GetExpressionAddress(Address fp,int n)588 Address StandardFrame::GetExpressionAddress(Address fp, int n) {
589 const int offset = StandardFrameConstants::kExpressionsOffset;
590 return fp + offset - n * kPointerSize;
591 }
592
593
ComputeExpressionsCount() const594 int StandardFrame::ComputeExpressionsCount() const {
595 const int offset =
596 StandardFrameConstants::kExpressionsOffset + kPointerSize;
597 Address base = fp() + offset;
598 Address limit = sp();
599 ASSERT(base >= limit); // stack grows downwards
600 // Include register-allocated locals in number of expressions.
601 return static_cast<int>((base - limit) / kPointerSize);
602 }
603
604
ComputeCallerState(State * state) const605 void StandardFrame::ComputeCallerState(State* state) const {
606 state->sp = caller_sp();
607 state->fp = caller_fp();
608 state->pc_address = ResolveReturnAddressLocation(
609 reinterpret_cast<Address*>(ComputePCAddress(fp())));
610 }
611
612
SetCallerFp(Address caller_fp)613 void StandardFrame::SetCallerFp(Address caller_fp) {
614 Memory::Address_at(fp() + StandardFrameConstants::kCallerFPOffset) =
615 caller_fp;
616 }
617
618
IsExpressionInsideHandler(int n) const619 bool StandardFrame::IsExpressionInsideHandler(int n) const {
620 Address address = GetExpressionAddress(n);
621 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
622 if (it.handler()->includes(address)) return true;
623 }
624 return false;
625 }
626
627
IterateCompiledFrame(ObjectVisitor * v) const628 void StandardFrame::IterateCompiledFrame(ObjectVisitor* v) const {
629 // Make sure that we're not doing "safe" stack frame iteration. We cannot
630 // possibly find pointers in optimized frames in that state.
631 ASSERT(can_access_heap_objects());
632
633 // Compute the safepoint information.
634 unsigned stack_slots = 0;
635 SafepointEntry safepoint_entry;
636 Code* code = StackFrame::GetSafepointData(
637 isolate(), pc(), &safepoint_entry, &stack_slots);
638 unsigned slot_space = stack_slots * kPointerSize;
639
640 // Visit the outgoing parameters.
641 Object** parameters_base = &Memory::Object_at(sp());
642 Object** parameters_limit = &Memory::Object_at(
643 fp() + JavaScriptFrameConstants::kFunctionOffset - slot_space);
644
645 // Visit the parameters that may be on top of the saved registers.
646 if (safepoint_entry.argument_count() > 0) {
647 v->VisitPointers(parameters_base,
648 parameters_base + safepoint_entry.argument_count());
649 parameters_base += safepoint_entry.argument_count();
650 }
651
652 // Skip saved double registers.
653 if (safepoint_entry.has_doubles()) {
654 // Number of doubles not known at snapshot time.
655 ASSERT(!Serializer::enabled());
656 parameters_base += DoubleRegister::NumAllocatableRegisters() *
657 kDoubleSize / kPointerSize;
658 }
659
660 // Visit the registers that contain pointers if any.
661 if (safepoint_entry.HasRegisters()) {
662 for (int i = kNumSafepointRegisters - 1; i >=0; i--) {
663 if (safepoint_entry.HasRegisterAt(i)) {
664 int reg_stack_index = MacroAssembler::SafepointRegisterStackIndex(i);
665 v->VisitPointer(parameters_base + reg_stack_index);
666 }
667 }
668 // Skip the words containing the register values.
669 parameters_base += kNumSafepointRegisters;
670 }
671
672 // We're done dealing with the register bits.
673 uint8_t* safepoint_bits = safepoint_entry.bits();
674 safepoint_bits += kNumSafepointRegisters >> kBitsPerByteLog2;
675
676 // Visit the rest of the parameters.
677 v->VisitPointers(parameters_base, parameters_limit);
678
679 // Visit pointer spill slots and locals.
680 for (unsigned index = 0; index < stack_slots; index++) {
681 int byte_index = index >> kBitsPerByteLog2;
682 int bit_index = index & (kBitsPerByte - 1);
683 if ((safepoint_bits[byte_index] & (1U << bit_index)) != 0) {
684 v->VisitPointer(parameters_limit + index);
685 }
686 }
687
688 // Visit the return address in the callee and incoming arguments.
689 IteratePc(v, pc_address(), code);
690
691 // Visit the context in stub frame and JavaScript frame.
692 // Visit the function in JavaScript frame.
693 Object** fixed_base = &Memory::Object_at(
694 fp() + StandardFrameConstants::kMarkerOffset);
695 Object** fixed_limit = &Memory::Object_at(fp());
696 v->VisitPointers(fixed_base, fixed_limit);
697 }
698
699
Iterate(ObjectVisitor * v) const700 void StubFrame::Iterate(ObjectVisitor* v) const {
701 IterateCompiledFrame(v);
702 }
703
704
unchecked_code() const705 Code* StubFrame::unchecked_code() const {
706 return static_cast<Code*>(isolate()->FindCodeObject(pc()));
707 }
708
709
GetCallerStackPointer() const710 Address StubFrame::GetCallerStackPointer() const {
711 return fp() + ExitFrameConstants::kCallerSPDisplacement;
712 }
713
714
GetNumberOfIncomingArguments() const715 int StubFrame::GetNumberOfIncomingArguments() const {
716 return 0;
717 }
718
719
Iterate(ObjectVisitor * v) const720 void OptimizedFrame::Iterate(ObjectVisitor* v) const {
721 #ifdef DEBUG
722 // Make sure that optimized frames do not contain any stack handlers.
723 StackHandlerIterator it(this, top_handler());
724 ASSERT(it.done());
725 #endif
726
727 IterateCompiledFrame(v);
728 }
729
730
SetParameterValue(int index,Object * value) const731 void JavaScriptFrame::SetParameterValue(int index, Object* value) const {
732 Memory::Object_at(GetParameterSlot(index)) = value;
733 }
734
735
IsConstructor() const736 bool JavaScriptFrame::IsConstructor() const {
737 Address fp = caller_fp();
738 if (has_adapted_arguments()) {
739 // Skip the arguments adaptor frame and look at the real caller.
740 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
741 }
742 return IsConstructFrame(fp);
743 }
744
745
GetArgumentsLength() const746 int JavaScriptFrame::GetArgumentsLength() const {
747 // If there is an arguments adaptor frame get the arguments length from it.
748 if (has_adapted_arguments()) {
749 return Smi::cast(GetExpression(caller_fp(), 0))->value();
750 } else {
751 return GetNumberOfIncomingArguments();
752 }
753 }
754
755
unchecked_code() const756 Code* JavaScriptFrame::unchecked_code() const {
757 return function()->code();
758 }
759
760
GetNumberOfIncomingArguments() const761 int JavaScriptFrame::GetNumberOfIncomingArguments() const {
762 ASSERT(can_access_heap_objects() &&
763 isolate()->heap()->gc_state() == Heap::NOT_IN_GC);
764
765 return function()->shared()->formal_parameter_count();
766 }
767
768
GetCallerStackPointer() const769 Address JavaScriptFrame::GetCallerStackPointer() const {
770 return fp() + StandardFrameConstants::kCallerSPOffset;
771 }
772
773
GetFunctions(List<JSFunction * > * functions)774 void JavaScriptFrame::GetFunctions(List<JSFunction*>* functions) {
775 ASSERT(functions->length() == 0);
776 functions->Add(function());
777 }
778
779
Summarize(List<FrameSummary> * functions)780 void JavaScriptFrame::Summarize(List<FrameSummary>* functions) {
781 ASSERT(functions->length() == 0);
782 Code* code_pointer = LookupCode();
783 int offset = static_cast<int>(pc() - code_pointer->address());
784 FrameSummary summary(receiver(),
785 function(),
786 code_pointer,
787 offset,
788 IsConstructor());
789 functions->Add(summary);
790 }
791
792
PrintTop(Isolate * isolate,FILE * file,bool print_args,bool print_line_number)793 void JavaScriptFrame::PrintTop(Isolate* isolate,
794 FILE* file,
795 bool print_args,
796 bool print_line_number) {
797 // constructor calls
798 HandleScope scope(isolate);
799 DisallowHeapAllocation no_allocation;
800 JavaScriptFrameIterator it(isolate);
801 while (!it.done()) {
802 if (it.frame()->is_java_script()) {
803 JavaScriptFrame* frame = it.frame();
804 if (frame->IsConstructor()) PrintF(file, "new ");
805 // function name
806 JSFunction* fun = frame->function();
807 fun->PrintName();
808 Code* js_code = frame->unchecked_code();
809 Address pc = frame->pc();
810 int code_offset =
811 static_cast<int>(pc - js_code->instruction_start());
812 PrintF("+%d", code_offset);
813 SharedFunctionInfo* shared = fun->shared();
814 if (print_line_number) {
815 Code* code = Code::cast(isolate->FindCodeObject(pc));
816 int source_pos = code->SourcePosition(pc);
817 Object* maybe_script = shared->script();
818 if (maybe_script->IsScript()) {
819 Handle<Script> script(Script::cast(maybe_script));
820 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
821 Object* script_name_raw = script->name();
822 if (script_name_raw->IsString()) {
823 String* script_name = String::cast(script->name());
824 SmartArrayPointer<char> c_script_name =
825 script_name->ToCString(DISALLOW_NULLS,
826 ROBUST_STRING_TRAVERSAL);
827 PrintF(file, " at %s:%d", *c_script_name, line);
828 } else {
829 PrintF(file, " at <unknown>:%d", line);
830 }
831 } else {
832 PrintF(file, " at <unknown>:<unknown>");
833 }
834 }
835
836 if (print_args) {
837 // function arguments
838 // (we are intentionally only printing the actually
839 // supplied parameters, not all parameters required)
840 PrintF(file, "(this=");
841 frame->receiver()->ShortPrint(file);
842 const int length = frame->ComputeParametersCount();
843 for (int i = 0; i < length; i++) {
844 PrintF(file, ", ");
845 frame->GetParameter(i)->ShortPrint(file);
846 }
847 PrintF(file, ")");
848 }
849 break;
850 }
851 it.Advance();
852 }
853 }
854
855
SaveOperandStack(FixedArray * store,int * stack_handler_index) const856 void JavaScriptFrame::SaveOperandStack(FixedArray* store,
857 int* stack_handler_index) const {
858 int operands_count = store->length();
859 ASSERT_LE(operands_count, ComputeOperandsCount());
860
861 // Visit the stack in LIFO order, saving operands and stack handlers into the
862 // array. The saved stack handlers store a link to the next stack handler,
863 // which will allow RestoreOperandStack to rewind the handlers.
864 StackHandlerIterator it(this, top_handler());
865 int i = operands_count - 1;
866 *stack_handler_index = -1;
867 for (; !it.done(); it.Advance()) {
868 StackHandler* handler = it.handler();
869 // Save operands pushed after the handler was pushed.
870 for (; GetOperandSlot(i) < handler->address(); i--) {
871 store->set(i, GetOperand(i));
872 }
873 ASSERT_GE(i + 1, StackHandlerConstants::kSlotCount);
874 ASSERT_EQ(handler->address(), GetOperandSlot(i));
875 int next_stack_handler_index = i + 1 - StackHandlerConstants::kSlotCount;
876 handler->Unwind(isolate(), store, next_stack_handler_index,
877 *stack_handler_index);
878 *stack_handler_index = next_stack_handler_index;
879 i -= StackHandlerConstants::kSlotCount;
880 }
881
882 // Save any remaining operands.
883 for (; i >= 0; i--) {
884 store->set(i, GetOperand(i));
885 }
886 }
887
888
RestoreOperandStack(FixedArray * store,int stack_handler_index)889 void JavaScriptFrame::RestoreOperandStack(FixedArray* store,
890 int stack_handler_index) {
891 int operands_count = store->length();
892 ASSERT_LE(operands_count, ComputeOperandsCount());
893 int i = 0;
894 while (i <= stack_handler_index) {
895 if (i < stack_handler_index) {
896 // An operand.
897 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
898 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
899 i++;
900 } else {
901 // A stack handler.
902 ASSERT_EQ(i, stack_handler_index);
903 // The FixedArray store grows up. The stack grows down. So the operand
904 // slot for i actually points to the bottom of the top word in the
905 // handler. The base of the StackHandler* is the address of the bottom
906 // word, which will be the last slot that is in the handler.
907 int handler_slot_index = i + StackHandlerConstants::kSlotCount - 1;
908 StackHandler *handler =
909 StackHandler::FromAddress(GetOperandSlot(handler_slot_index));
910 stack_handler_index = handler->Rewind(isolate(), store, i, fp());
911 i += StackHandlerConstants::kSlotCount;
912 }
913 }
914
915 for (; i < operands_count; i++) {
916 ASSERT_EQ(GetOperand(i), isolate()->heap()->the_hole_value());
917 Memory::Object_at(GetOperandSlot(i)) = store->get(i);
918 }
919 }
920
921
Print()922 void FrameSummary::Print() {
923 PrintF("receiver: ");
924 receiver_->ShortPrint();
925 PrintF("\nfunction: ");
926 function_->shared()->DebugName()->ShortPrint();
927 PrintF("\ncode: ");
928 code_->ShortPrint();
929 if (code_->kind() == Code::FUNCTION) PrintF(" NON-OPT");
930 if (code_->kind() == Code::OPTIMIZED_FUNCTION) PrintF(" OPT");
931 PrintF("\npc: %d\n", offset_);
932 }
933
934
LiteralAt(FixedArray * literal_array,int literal_id)935 JSFunction* OptimizedFrame::LiteralAt(FixedArray* literal_array,
936 int literal_id) {
937 if (literal_id == Translation::kSelfLiteralId) {
938 return function();
939 }
940
941 return JSFunction::cast(literal_array->get(literal_id));
942 }
943
944
Summarize(List<FrameSummary> * frames)945 void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
946 ASSERT(frames->length() == 0);
947 ASSERT(is_optimized());
948
949 int deopt_index = Safepoint::kNoDeoptimizationIndex;
950 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
951 FixedArray* literal_array = data->LiteralArray();
952
953 // BUG(3243555): Since we don't have a lazy-deopt registered at
954 // throw-statements, we can't use the translation at the call-site of
955 // throw. An entry with no deoptimization index indicates a call-site
956 // without a lazy-deopt. As a consequence we are not allowed to inline
957 // functions containing throw.
958 if (deopt_index == Safepoint::kNoDeoptimizationIndex) {
959 JavaScriptFrame::Summarize(frames);
960 return;
961 }
962
963 TranslationIterator it(data->TranslationByteArray(),
964 data->TranslationIndex(deopt_index)->value());
965 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
966 ASSERT(opcode == Translation::BEGIN);
967 it.Next(); // Drop frame count.
968 int jsframe_count = it.Next();
969
970 // We create the summary in reverse order because the frames
971 // in the deoptimization translation are ordered bottom-to-top.
972 bool is_constructor = IsConstructor();
973 int i = jsframe_count;
974 while (i > 0) {
975 opcode = static_cast<Translation::Opcode>(it.Next());
976 if (opcode == Translation::JS_FRAME) {
977 i--;
978 BailoutId ast_id = BailoutId(it.Next());
979 JSFunction* function = LiteralAt(literal_array, it.Next());
980 it.Next(); // Skip height.
981
982 // The translation commands are ordered and the receiver is always
983 // at the first position. Since we are always at a call when we need
984 // to construct a stack trace, the receiver is always in a stack slot.
985 opcode = static_cast<Translation::Opcode>(it.Next());
986 ASSERT(opcode == Translation::STACK_SLOT ||
987 opcode == Translation::LITERAL ||
988 opcode == Translation::CAPTURED_OBJECT ||
989 opcode == Translation::DUPLICATED_OBJECT);
990 int index = it.Next();
991
992 // Get the correct receiver in the optimized frame.
993 Object* receiver = NULL;
994 if (opcode == Translation::LITERAL) {
995 receiver = data->LiteralArray()->get(index);
996 } else if (opcode == Translation::STACK_SLOT) {
997 // Positive index means the value is spilled to the locals
998 // area. Negative means it is stored in the incoming parameter
999 // area.
1000 if (index >= 0) {
1001 receiver = GetExpression(index);
1002 } else {
1003 // Index -1 overlaps with last parameter, -n with the first parameter,
1004 // (-n - 1) with the receiver with n being the number of parameters
1005 // of the outermost, optimized frame.
1006 int parameter_count = ComputeParametersCount();
1007 int parameter_index = index + parameter_count;
1008 receiver = (parameter_index == -1)
1009 ? this->receiver()
1010 : this->GetParameter(parameter_index);
1011 }
1012 } else {
1013 // TODO(3029): Materializing a captured object (or duplicated
1014 // object) is hard, we return undefined for now. This breaks the
1015 // produced stack trace, as constructor frames aren't marked as
1016 // such anymore.
1017 receiver = isolate()->heap()->undefined_value();
1018 }
1019
1020 Code* code = function->shared()->code();
1021 DeoptimizationOutputData* output_data =
1022 DeoptimizationOutputData::cast(code->deoptimization_data());
1023 unsigned entry = Deoptimizer::GetOutputInfo(output_data,
1024 ast_id,
1025 function->shared());
1026 unsigned pc_offset =
1027 FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize;
1028 ASSERT(pc_offset > 0);
1029
1030 FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
1031 frames->Add(summary);
1032 is_constructor = false;
1033 } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
1034 // The next encountered JS_FRAME will be marked as a constructor call.
1035 it.Skip(Translation::NumberOfOperandsFor(opcode));
1036 ASSERT(!is_constructor);
1037 is_constructor = true;
1038 } else {
1039 // Skip over operands to advance to the next opcode.
1040 it.Skip(Translation::NumberOfOperandsFor(opcode));
1041 }
1042 }
1043 ASSERT(!is_constructor);
1044 }
1045
1046
GetDeoptimizationData(int * deopt_index)1047 DeoptimizationInputData* OptimizedFrame::GetDeoptimizationData(
1048 int* deopt_index) {
1049 ASSERT(is_optimized());
1050
1051 JSFunction* opt_function = function();
1052 Code* code = opt_function->code();
1053
1054 // The code object may have been replaced by lazy deoptimization. Fall
1055 // back to a slow search in this case to find the original optimized
1056 // code object.
1057 if (!code->contains(pc())) {
1058 code = isolate()->inner_pointer_to_code_cache()->
1059 GcSafeFindCodeForInnerPointer(pc());
1060 }
1061 ASSERT(code != NULL);
1062 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
1063
1064 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1065 *deopt_index = safepoint_entry.deoptimization_index();
1066 ASSERT(*deopt_index != Safepoint::kNoDeoptimizationIndex);
1067
1068 return DeoptimizationInputData::cast(code->deoptimization_data());
1069 }
1070
1071
GetInlineCount()1072 int OptimizedFrame::GetInlineCount() {
1073 ASSERT(is_optimized());
1074
1075 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1076 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1077
1078 TranslationIterator it(data->TranslationByteArray(),
1079 data->TranslationIndex(deopt_index)->value());
1080 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1081 ASSERT(opcode == Translation::BEGIN);
1082 USE(opcode);
1083 it.Next(); // Drop frame count.
1084 int jsframe_count = it.Next();
1085 return jsframe_count;
1086 }
1087
1088
GetFunctions(List<JSFunction * > * functions)1089 void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
1090 ASSERT(functions->length() == 0);
1091 ASSERT(is_optimized());
1092
1093 int deopt_index = Safepoint::kNoDeoptimizationIndex;
1094 DeoptimizationInputData* data = GetDeoptimizationData(&deopt_index);
1095 FixedArray* literal_array = data->LiteralArray();
1096
1097 TranslationIterator it(data->TranslationByteArray(),
1098 data->TranslationIndex(deopt_index)->value());
1099 Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
1100 ASSERT(opcode == Translation::BEGIN);
1101 it.Next(); // Drop frame count.
1102 int jsframe_count = it.Next();
1103
1104 // We insert the frames in reverse order because the frames
1105 // in the deoptimization translation are ordered bottom-to-top.
1106 while (jsframe_count > 0) {
1107 opcode = static_cast<Translation::Opcode>(it.Next());
1108 if (opcode == Translation::JS_FRAME) {
1109 jsframe_count--;
1110 it.Next(); // Skip ast id.
1111 JSFunction* function = LiteralAt(literal_array, it.Next());
1112 it.Next(); // Skip height.
1113 functions->Add(function);
1114 } else {
1115 // Skip over operands to advance to the next opcode.
1116 it.Skip(Translation::NumberOfOperandsFor(opcode));
1117 }
1118 }
1119 }
1120
1121
GetNumberOfIncomingArguments() const1122 int ArgumentsAdaptorFrame::GetNumberOfIncomingArguments() const {
1123 return Smi::cast(GetExpression(0))->value();
1124 }
1125
1126
GetCallerStackPointer() const1127 Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
1128 return fp() + StandardFrameConstants::kCallerSPOffset;
1129 }
1130
1131
GetCallerStackPointer() const1132 Address InternalFrame::GetCallerStackPointer() const {
1133 // Internal frames have no arguments. The stack pointer of the
1134 // caller is at a fixed offset from the frame pointer.
1135 return fp() + StandardFrameConstants::kCallerSPOffset;
1136 }
1137
1138
unchecked_code() const1139 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1140 return isolate()->builtins()->builtin(
1141 Builtins::kArgumentsAdaptorTrampoline);
1142 }
1143
1144
unchecked_code() const1145 Code* InternalFrame::unchecked_code() const {
1146 const int offset = InternalFrameConstants::kCodeOffset;
1147 Object* code = Memory::Object_at(fp() + offset);
1148 ASSERT(code != NULL);
1149 return reinterpret_cast<Code*>(code);
1150 }
1151
1152
PrintIndex(StringStream * accumulator,PrintMode mode,int index)1153 void StackFrame::PrintIndex(StringStream* accumulator,
1154 PrintMode mode,
1155 int index) {
1156 accumulator->Add((mode == OVERVIEW) ? "%5d: " : "[%d]: ", index);
1157 }
1158
1159
Print(StringStream * accumulator,PrintMode mode,int index) const1160 void JavaScriptFrame::Print(StringStream* accumulator,
1161 PrintMode mode,
1162 int index) const {
1163 HandleScope scope(isolate());
1164 Object* receiver = this->receiver();
1165 JSFunction* function = this->function();
1166
1167 accumulator->PrintSecurityTokenIfChanged(function);
1168 PrintIndex(accumulator, mode, index);
1169 Code* code = NULL;
1170 if (IsConstructor()) accumulator->Add("new ");
1171 accumulator->PrintFunction(function, receiver, &code);
1172
1173 // Get scope information for nicer output, if possible. If code is NULL, or
1174 // doesn't contain scope info, scope_info will return 0 for the number of
1175 // parameters, stack local variables, context local variables, stack slots,
1176 // or context slots.
1177 Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate()));
1178
1179 Handle<SharedFunctionInfo> shared(function->shared());
1180 scope_info = Handle<ScopeInfo>(shared->scope_info());
1181 Object* script_obj = shared->script();
1182 if (script_obj->IsScript()) {
1183 Handle<Script> script(Script::cast(script_obj));
1184 accumulator->Add(" [");
1185 accumulator->PrintName(script->name());
1186
1187 Address pc = this->pc();
1188 if (code != NULL && code->kind() == Code::FUNCTION &&
1189 pc >= code->instruction_start() && pc < code->instruction_end()) {
1190 int source_pos = code->SourcePosition(pc);
1191 int line = GetScriptLineNumberSafe(script, source_pos) + 1;
1192 accumulator->Add(":%d", line);
1193 } else {
1194 int function_start_pos = shared->start_position();
1195 int line = GetScriptLineNumberSafe(script, function_start_pos) + 1;
1196 accumulator->Add(":~%d", line);
1197 }
1198
1199 accumulator->Add("] ");
1200 }
1201
1202 accumulator->Add("(this=%o", receiver);
1203
1204 // Print the parameters.
1205 int parameters_count = ComputeParametersCount();
1206 for (int i = 0; i < parameters_count; i++) {
1207 accumulator->Add(",");
1208 // If we have a name for the parameter we print it. Nameless
1209 // parameters are either because we have more actual parameters
1210 // than formal parameters or because we have no scope information.
1211 if (i < scope_info->ParameterCount()) {
1212 accumulator->PrintName(scope_info->ParameterName(i));
1213 accumulator->Add("=");
1214 }
1215 accumulator->Add("%o", GetParameter(i));
1216 }
1217
1218 accumulator->Add(")");
1219 if (mode == OVERVIEW) {
1220 accumulator->Add("\n");
1221 return;
1222 }
1223 if (is_optimized()) {
1224 accumulator->Add(" {\n// optimized frame\n}\n");
1225 return;
1226 }
1227 accumulator->Add(" {\n");
1228
1229 // Compute the number of locals and expression stack elements.
1230 int stack_locals_count = scope_info->StackLocalCount();
1231 int heap_locals_count = scope_info->ContextLocalCount();
1232 int expressions_count = ComputeExpressionsCount();
1233
1234 // Print stack-allocated local variables.
1235 if (stack_locals_count > 0) {
1236 accumulator->Add(" // stack-allocated locals\n");
1237 }
1238 for (int i = 0; i < stack_locals_count; i++) {
1239 accumulator->Add(" var ");
1240 accumulator->PrintName(scope_info->StackLocalName(i));
1241 accumulator->Add(" = ");
1242 if (i < expressions_count) {
1243 accumulator->Add("%o", GetExpression(i));
1244 } else {
1245 accumulator->Add("// no expression found - inconsistent frame?");
1246 }
1247 accumulator->Add("\n");
1248 }
1249
1250 // Try to get hold of the context of this frame.
1251 Context* context = NULL;
1252 if (this->context() != NULL && this->context()->IsContext()) {
1253 context = Context::cast(this->context());
1254 }
1255
1256 // Print heap-allocated local variables.
1257 if (heap_locals_count > 0) {
1258 accumulator->Add(" // heap-allocated locals\n");
1259 }
1260 for (int i = 0; i < heap_locals_count; i++) {
1261 accumulator->Add(" var ");
1262 accumulator->PrintName(scope_info->ContextLocalName(i));
1263 accumulator->Add(" = ");
1264 if (context != NULL) {
1265 if (i < context->length()) {
1266 accumulator->Add("%o", context->get(Context::MIN_CONTEXT_SLOTS + i));
1267 } else {
1268 accumulator->Add(
1269 "// warning: missing context slot - inconsistent frame?");
1270 }
1271 } else {
1272 accumulator->Add("// warning: no context found - inconsistent frame?");
1273 }
1274 accumulator->Add("\n");
1275 }
1276
1277 // Print the expression stack.
1278 int expressions_start = stack_locals_count;
1279 if (expressions_start < expressions_count) {
1280 accumulator->Add(" // expression stack (top to bottom)\n");
1281 }
1282 for (int i = expressions_count - 1; i >= expressions_start; i--) {
1283 if (IsExpressionInsideHandler(i)) continue;
1284 accumulator->Add(" [%02d] : %o\n", i, GetExpression(i));
1285 }
1286
1287 // Print details about the function.
1288 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1289 SharedFunctionInfo* shared = function->shared();
1290 accumulator->Add("--------- s o u r c e c o d e ---------\n");
1291 shared->SourceCodePrint(accumulator, FLAG_max_stack_trace_source_length);
1292 accumulator->Add("\n-----------------------------------------\n");
1293 }
1294
1295 accumulator->Add("}\n\n");
1296 }
1297
1298
Print(StringStream * accumulator,PrintMode mode,int index) const1299 void ArgumentsAdaptorFrame::Print(StringStream* accumulator,
1300 PrintMode mode,
1301 int index) const {
1302 int actual = ComputeParametersCount();
1303 int expected = -1;
1304 JSFunction* function = this->function();
1305 expected = function->shared()->formal_parameter_count();
1306
1307 PrintIndex(accumulator, mode, index);
1308 accumulator->Add("arguments adaptor frame: %d->%d", actual, expected);
1309 if (mode == OVERVIEW) {
1310 accumulator->Add("\n");
1311 return;
1312 }
1313 accumulator->Add(" {\n");
1314
1315 // Print actual arguments.
1316 if (actual > 0) accumulator->Add(" // actual arguments\n");
1317 for (int i = 0; i < actual; i++) {
1318 accumulator->Add(" [%02d] : %o", i, GetParameter(i));
1319 if (expected != -1 && i >= expected) {
1320 accumulator->Add(" // not passed to callee");
1321 }
1322 accumulator->Add("\n");
1323 }
1324
1325 accumulator->Add("}\n\n");
1326 }
1327
1328
Iterate(ObjectVisitor * v) const1329 void EntryFrame::Iterate(ObjectVisitor* v) const {
1330 StackHandlerIterator it(this, top_handler());
1331 ASSERT(!it.done());
1332 StackHandler* handler = it.handler();
1333 ASSERT(handler->is_js_entry());
1334 handler->Iterate(v, LookupCode());
1335 #ifdef DEBUG
1336 // Make sure that the entry frame does not contain more than one
1337 // stack handler.
1338 it.Advance();
1339 ASSERT(it.done());
1340 #endif
1341 IteratePc(v, pc_address(), LookupCode());
1342 }
1343
1344
IterateExpressions(ObjectVisitor * v) const1345 void StandardFrame::IterateExpressions(ObjectVisitor* v) const {
1346 const int offset = StandardFrameConstants::kContextOffset;
1347 Object** base = &Memory::Object_at(sp());
1348 Object** limit = &Memory::Object_at(fp() + offset) + 1;
1349 for (StackHandlerIterator it(this, top_handler()); !it.done(); it.Advance()) {
1350 StackHandler* handler = it.handler();
1351 // Traverse pointers down to - but not including - the next
1352 // handler in the handler chain. Update the base to skip the
1353 // handler and allow the handler to traverse its own pointers.
1354 const Address address = handler->address();
1355 v->VisitPointers(base, reinterpret_cast<Object**>(address));
1356 base = reinterpret_cast<Object**>(address + StackHandlerConstants::kSize);
1357 // Traverse the pointers in the handler itself.
1358 handler->Iterate(v, LookupCode());
1359 }
1360 v->VisitPointers(base, limit);
1361 }
1362
1363
Iterate(ObjectVisitor * v) const1364 void JavaScriptFrame::Iterate(ObjectVisitor* v) const {
1365 IterateExpressions(v);
1366 IteratePc(v, pc_address(), LookupCode());
1367 }
1368
1369
Iterate(ObjectVisitor * v) const1370 void InternalFrame::Iterate(ObjectVisitor* v) const {
1371 // Internal frames only have object pointers on the expression stack
1372 // as they never have any arguments.
1373 IterateExpressions(v);
1374 IteratePc(v, pc_address(), LookupCode());
1375 }
1376
1377
Iterate(ObjectVisitor * v) const1378 void StubFailureTrampolineFrame::Iterate(ObjectVisitor* v) const {
1379 Object** base = &Memory::Object_at(sp());
1380 Object** limit = &Memory::Object_at(fp() +
1381 kFirstRegisterParameterFrameOffset);
1382 v->VisitPointers(base, limit);
1383 base = &Memory::Object_at(fp() + StandardFrameConstants::kMarkerOffset);
1384 const int offset = StandardFrameConstants::kContextOffset;
1385 limit = &Memory::Object_at(fp() + offset) + 1;
1386 v->VisitPointers(base, limit);
1387 IteratePc(v, pc_address(), LookupCode());
1388 }
1389
1390
GetCallerStackPointer() const1391 Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
1392 return fp() + StandardFrameConstants::kCallerSPOffset;
1393 }
1394
1395
unchecked_code() const1396 Code* StubFailureTrampolineFrame::unchecked_code() const {
1397 Code* trampoline;
1398 StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).
1399 FindCodeInCache(&trampoline, isolate());
1400 if (trampoline->contains(pc())) {
1401 return trampoline;
1402 }
1403
1404 StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).
1405 FindCodeInCache(&trampoline, isolate());
1406 if (trampoline->contains(pc())) {
1407 return trampoline;
1408 }
1409
1410 StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate());
1411 if (trampoline->contains(pc())) {
1412 return trampoline;
1413 }
1414
1415 UNREACHABLE();
1416 return NULL;
1417 }
1418
1419
1420 // -------------------------------------------------------------------------
1421
1422
FindJavaScriptFrame(int n)1423 JavaScriptFrame* StackFrameLocator::FindJavaScriptFrame(int n) {
1424 ASSERT(n >= 0);
1425 for (int i = 0; i <= n; i++) {
1426 while (!iterator_.frame()->is_java_script()) iterator_.Advance();
1427 if (i == n) return JavaScriptFrame::cast(iterator_.frame());
1428 iterator_.Advance();
1429 }
1430 UNREACHABLE();
1431 return NULL;
1432 }
1433
1434
1435 // -------------------------------------------------------------------------
1436
1437
GcSafeMapOfCodeSpaceObject(HeapObject * object)1438 static Map* GcSafeMapOfCodeSpaceObject(HeapObject* object) {
1439 MapWord map_word = object->map_word();
1440 return map_word.IsForwardingAddress() ?
1441 map_word.ToForwardingAddress()->map() : map_word.ToMap();
1442 }
1443
1444
GcSafeSizeOfCodeSpaceObject(HeapObject * object)1445 static int GcSafeSizeOfCodeSpaceObject(HeapObject* object) {
1446 return object->SizeFromMap(GcSafeMapOfCodeSpaceObject(object));
1447 }
1448
1449
1450 #ifdef DEBUG
GcSafeCodeContains(HeapObject * code,Address addr)1451 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
1452 Map* map = GcSafeMapOfCodeSpaceObject(code);
1453 ASSERT(map == code->GetHeap()->code_map());
1454 Address start = code->address();
1455 Address end = code->address() + code->SizeFromMap(map);
1456 return start <= addr && addr < end;
1457 }
1458 #endif
1459
1460
GcSafeCastToCode(HeapObject * object,Address inner_pointer)1461 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
1462 Address inner_pointer) {
1463 Code* code = reinterpret_cast<Code*>(object);
1464 ASSERT(code != NULL && GcSafeCodeContains(code, inner_pointer));
1465 return code;
1466 }
1467
1468
GcSafeFindCodeForInnerPointer(Address inner_pointer)1469 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
1470 Address inner_pointer) {
1471 Heap* heap = isolate_->heap();
1472 // Check if the inner pointer points into a large object chunk.
1473 LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
1474 if (large_page != NULL) {
1475 return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
1476 }
1477
1478 // Iterate through the page until we reach the end or find an object starting
1479 // after the inner pointer.
1480 Page* page = Page::FromAddress(inner_pointer);
1481
1482 Address addr = page->skip_list()->StartFor(inner_pointer);
1483
1484 Address top = heap->code_space()->top();
1485 Address limit = heap->code_space()->limit();
1486
1487 while (true) {
1488 if (addr == top && addr != limit) {
1489 addr = limit;
1490 continue;
1491 }
1492
1493 HeapObject* obj = HeapObject::FromAddress(addr);
1494 int obj_size = GcSafeSizeOfCodeSpaceObject(obj);
1495 Address next_addr = addr + obj_size;
1496 if (next_addr > inner_pointer) return GcSafeCastToCode(obj, inner_pointer);
1497 addr = next_addr;
1498 }
1499 }
1500
1501
1502 InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
GetCacheEntry(Address inner_pointer)1503 InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
1504 isolate_->counters()->pc_to_code()->Increment();
1505 ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
1506 uint32_t hash = ComputeIntegerHash(
1507 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
1508 v8::internal::kZeroHashSeed);
1509 uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
1510 InnerPointerToCodeCacheEntry* entry = cache(index);
1511 if (entry->inner_pointer == inner_pointer) {
1512 isolate_->counters()->pc_to_code_cached()->Increment();
1513 ASSERT(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
1514 } else {
1515 // Because this code may be interrupted by a profiling signal that
1516 // also queries the cache, we cannot update inner_pointer before the code
1517 // has been set. Otherwise, we risk trying to use a cache entry before
1518 // the code has been computed.
1519 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);
1520 entry->safepoint_entry.Reset();
1521 entry->inner_pointer = inner_pointer;
1522 }
1523 return entry;
1524 }
1525
1526
1527 // -------------------------------------------------------------------------
1528
1529
Unwind(Isolate * isolate,FixedArray * array,int offset,int previous_handler_offset) const1530 void StackHandler::Unwind(Isolate* isolate,
1531 FixedArray* array,
1532 int offset,
1533 int previous_handler_offset) const {
1534 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1535 ASSERT_LE(0, offset);
1536 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1537 // Unwinding a stack handler into an array chains it in the opposite
1538 // direction, re-using the "next" slot as a "previous" link, so that stack
1539 // handlers can be later re-wound in the correct order. Decode the "state"
1540 // slot into "index" and "kind" and store them separately, using the fp slot.
1541 array->set(offset, Smi::FromInt(previous_handler_offset)); // next
1542 array->set(offset + 1, *code_address()); // code
1543 array->set(offset + 2, Smi::FromInt(static_cast<int>(index()))); // state
1544 array->set(offset + 3, *context_address()); // context
1545 array->set(offset + 4, Smi::FromInt(static_cast<int>(kind()))); // fp
1546
1547 *isolate->handler_address() = next()->address();
1548 }
1549
1550
Rewind(Isolate * isolate,FixedArray * array,int offset,Address fp)1551 int StackHandler::Rewind(Isolate* isolate,
1552 FixedArray* array,
1553 int offset,
1554 Address fp) {
1555 STATIC_ASSERT(StackHandlerConstants::kSlotCount >= 5);
1556 ASSERT_LE(0, offset);
1557 ASSERT_GE(array->length(), offset + StackHandlerConstants::kSlotCount);
1558 Smi* prev_handler_offset = Smi::cast(array->get(offset));
1559 Code* code = Code::cast(array->get(offset + 1));
1560 Smi* smi_index = Smi::cast(array->get(offset + 2));
1561 Object* context = array->get(offset + 3);
1562 Smi* smi_kind = Smi::cast(array->get(offset + 4));
1563
1564 unsigned state = KindField::encode(static_cast<Kind>(smi_kind->value())) |
1565 IndexField::encode(static_cast<unsigned>(smi_index->value()));
1566
1567 Memory::Address_at(address() + StackHandlerConstants::kNextOffset) =
1568 *isolate->handler_address();
1569 Memory::Object_at(address() + StackHandlerConstants::kCodeOffset) = code;
1570 Memory::uintptr_at(address() + StackHandlerConstants::kStateOffset) = state;
1571 Memory::Object_at(address() + StackHandlerConstants::kContextOffset) =
1572 context;
1573 SetFp(address() + StackHandlerConstants::kFPOffset, fp);
1574
1575 *isolate->handler_address() = address();
1576
1577 return prev_handler_offset->value();
1578 }
1579
1580
1581 // -------------------------------------------------------------------------
1582
NumRegs(RegList reglist)1583 int NumRegs(RegList reglist) {
1584 return CompilerIntrinsics::CountSetBits(reglist);
1585 }
1586
1587
1588 struct JSCallerSavedCodeData {
1589 int reg_code[kNumJSCallerSaved];
1590 };
1591
1592 JSCallerSavedCodeData caller_saved_code_data;
1593
SetUpJSCallerSavedCodeData()1594 void SetUpJSCallerSavedCodeData() {
1595 int i = 0;
1596 for (int r = 0; r < kNumRegs; r++)
1597 if ((kJSCallerSaved & (1 << r)) != 0)
1598 caller_saved_code_data.reg_code[i++] = r;
1599
1600 ASSERT(i == kNumJSCallerSaved);
1601 }
1602
1603
JSCallerSavedCode(int n)1604 int JSCallerSavedCode(int n) {
1605 ASSERT(0 <= n && n < kNumJSCallerSaved);
1606 return caller_saved_code_data.reg_code[n];
1607 }
1608
1609
1610 #define DEFINE_WRAPPER(type, field) \
1611 class field##_Wrapper : public ZoneObject { \
1612 public: /* NOLINT */ \
1613 field##_Wrapper(const field& original) : frame_(original) { \
1614 } \
1615 field frame_; \
1616 };
STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)1617 STACK_FRAME_TYPE_LIST(DEFINE_WRAPPER)
1618 #undef DEFINE_WRAPPER
1619
1620 static StackFrame* AllocateFrameCopy(StackFrame* frame, Zone* zone) {
1621 #define FRAME_TYPE_CASE(type, field) \
1622 case StackFrame::type: { \
1623 field##_Wrapper* wrapper = \
1624 new(zone) field##_Wrapper(*(reinterpret_cast<field*>(frame))); \
1625 return &wrapper->frame_; \
1626 }
1627
1628 switch (frame->type()) {
1629 STACK_FRAME_TYPE_LIST(FRAME_TYPE_CASE)
1630 default: UNREACHABLE();
1631 }
1632 #undef FRAME_TYPE_CASE
1633 return NULL;
1634 }
1635
1636
CreateStackMap(Isolate * isolate,Zone * zone)1637 Vector<StackFrame*> CreateStackMap(Isolate* isolate, Zone* zone) {
1638 ZoneList<StackFrame*> list(10, zone);
1639 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1640 StackFrame* frame = AllocateFrameCopy(it.frame(), zone);
1641 list.Add(frame, zone);
1642 }
1643 return list.ToVector();
1644 }
1645
1646
1647 } } // namespace v8::internal
1648