• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/diagnostics/unwinder.h"
6 
7 #include <algorithm>
8 
9 #include "include/v8-unwinder.h"
10 #include "src/execution/frame-constants.h"
11 #include "src/execution/pointer-authentication.h"
12 
13 namespace v8 {
14 
15 // Architecture specific. Implemented in unwinder-<arch>.cc.
16 void GetCalleeSavedRegistersFromEntryFrame(void* fp,
17                                            RegisterState* register_state);
18 
Load(i::Address address)19 i::Address Load(i::Address address) {
20   return *reinterpret_cast<i::Address*>(address);
21 }
22 
23 namespace {
24 
CalculateEnd(const void * start,size_t length_in_bytes)25 const i::byte* CalculateEnd(const void* start, size_t length_in_bytes) {
26   // Given that the length of the memory range is in bytes and it is not
27   // necessarily aligned, we need to do the pointer arithmetic in byte* here.
28   const i::byte* start_as_byte = reinterpret_cast<const i::byte*>(start);
29   return start_as_byte + length_in_bytes;
30 }
31 
PCIsInCodeRange(const v8::MemoryRange & code_range,void * pc)32 bool PCIsInCodeRange(const v8::MemoryRange& code_range, void* pc) {
33   return pc >= code_range.start &&
34          pc < CalculateEnd(code_range.start, code_range.length_in_bytes);
35 }
36 
37 // This relies on the fact that the code pages are ordered, and that they don't
38 // overlap.
PCIsInCodePages(size_t code_pages_length,const MemoryRange * code_pages,void * pc)39 bool PCIsInCodePages(size_t code_pages_length, const MemoryRange* code_pages,
40                      void* pc) {
41   DCHECK(std::is_sorted(code_pages, code_pages + code_pages_length,
42                         [](const MemoryRange& a, const MemoryRange& b) {
43                           return a.start < b.start;
44                         }));
45 
46   MemoryRange fake_range{pc, 1};
47   auto it =
48       std::upper_bound(code_pages, code_pages + code_pages_length, fake_range,
49                        [](const MemoryRange& a, const MemoryRange& b) {
50                          return a.start < b.start;
51                        });
52   DCHECK_IMPLIES(it != code_pages + code_pages_length, pc < it->start);
53   if (it == code_pages) return false;
54   --it;
55   return it->start <= pc && pc < CalculateEnd(it->start, it->length_in_bytes);
56 }
57 
IsInJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)58 bool IsInJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
59   return PCIsInCodeRange(entry_stubs.js_entry_stub.code, pc) ||
60          PCIsInCodeRange(entry_stubs.js_construct_entry_stub.code, pc) ||
61          PCIsInCodeRange(entry_stubs.js_run_microtasks_entry_stub.code, pc);
62 }
63 
IsInUnsafeJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)64 bool IsInUnsafeJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
65   return IsInJSEntryRange(entry_stubs, pc);
66 
67   // TODO(petermarshall): We can be more precise by checking whether we are
68   // in JSEntry but after frame setup and before frame teardown, in which case
69   // we are safe to unwind the stack. For now, we bail out if the PC is anywhere
70   // within JSEntry.
71 }
72 
AddressIsInStack(const void * address,const void * stack_base,const void * stack_top)73 bool AddressIsInStack(const void* address, const void* stack_base,
74                       const void* stack_top) {
75   return address <= stack_base && address >= stack_top;
76 }
77 
GetReturnAddressFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)78 void* GetReturnAddressFromFP(void* fp, void* pc,
79                              const JSEntryStubs& entry_stubs) {
80   int caller_pc_offset = i::CommonFrameConstants::kCallerPCOffset;
81 // TODO(solanes): Implement the JSEntry range case also for x64 here and below.
82 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
83   if (IsInJSEntryRange(entry_stubs, pc)) {
84     caller_pc_offset = i::EntryFrameConstants::kDirectCallerPCOffset;
85   }
86 #endif
87   i::Address ret_addr =
88       Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset);
89   return reinterpret_cast<void*>(i::PointerAuthentication::StripPAC(ret_addr));
90 }
91 
GetCallerFPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)92 void* GetCallerFPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
93   int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
94 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
95   if (IsInJSEntryRange(entry_stubs, pc)) {
96     caller_fp_offset = i::EntryFrameConstants::kDirectCallerFPOffset;
97   }
98 #endif
99   return reinterpret_cast<void*>(
100       Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
101 }
102 
GetCallerSPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)103 void* GetCallerSPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
104   int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
105 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
106   if (IsInJSEntryRange(entry_stubs, pc)) {
107     caller_sp_offset = i::EntryFrameConstants::kDirectCallerSPOffset;
108   }
109 #endif
110   return reinterpret_cast<void*>(reinterpret_cast<i::Address>(fp) +
111                                  caller_sp_offset);
112 }
113 
114 }  // namespace
115 
TryUnwindV8Frames(const JSEntryStubs & entry_stubs,size_t code_pages_length,const MemoryRange * code_pages,RegisterState * register_state,const void * stack_base)116 bool Unwinder::TryUnwindV8Frames(const JSEntryStubs& entry_stubs,
117                                  size_t code_pages_length,
118                                  const MemoryRange* code_pages,
119                                  RegisterState* register_state,
120                                  const void* stack_base) {
121   const void* stack_top = register_state->sp;
122 
123   void* pc = register_state->pc;
124   if (PCIsInV8(code_pages_length, code_pages, pc) &&
125       !IsInUnsafeJSEntryRange(entry_stubs, pc)) {
126     void* current_fp = register_state->fp;
127     if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
128 
129     // Peek at the return address that the caller pushed. If it's in V8, then we
130     // assume the caller frame is a JS frame and continue to unwind.
131     void* next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
132     while (PCIsInV8(code_pages_length, code_pages, next_pc)) {
133       current_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
134       if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
135       pc = next_pc;
136       next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
137     }
138 
139     void* final_sp = GetCallerSPFromFP(current_fp, pc, entry_stubs);
140     if (!AddressIsInStack(final_sp, stack_base, stack_top)) return false;
141     register_state->sp = final_sp;
142 
143     // We don't check that the final FP value is within the stack bounds because
144     // this is just the rbp value that JSEntryStub pushed. On platforms like
145     // Win64 this is not used as a dedicated FP register, and could contain
146     // anything.
147     void* final_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
148     register_state->fp = final_fp;
149 
150     register_state->pc = next_pc;
151 
152     // Link register no longer valid after unwinding.
153     register_state->lr = nullptr;
154 
155     if (IsInJSEntryRange(entry_stubs, pc)) {
156       GetCalleeSavedRegistersFromEntryFrame(current_fp, register_state);
157     }
158     return true;
159   }
160   return false;
161 }
162 
PCIsInV8(size_t code_pages_length,const MemoryRange * code_pages,void * pc)163 bool Unwinder::PCIsInV8(size_t code_pages_length, const MemoryRange* code_pages,
164                         void* pc) {
165   return pc && PCIsInCodePages(code_pages_length, code_pages, pc);
166 }
167 
168 }  // namespace v8
169