1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/diagnostics/unwinder.h"
6
7 #include <algorithm>
8
9 #include "src/execution/pointer-authentication.h"
10
11 namespace v8 {
12
13 // Architecture specific. Implemented in unwinder-<arch>.cc.
14 void GetCalleeSavedRegistersFromEntryFrame(void* fp,
15 RegisterState* register_state);
16
Load(i::Address address)17 i::Address Load(i::Address address) {
18 return *reinterpret_cast<i::Address*>(address);
19 }
20
21 namespace {
22
CalculateEnd(const void * start,size_t length_in_bytes)23 const i::byte* CalculateEnd(const void* start, size_t length_in_bytes) {
24 // Given that the length of the memory range is in bytes and it is not
25 // necessarily aligned, we need to do the pointer arithmetic in byte* here.
26 const i::byte* start_as_byte = reinterpret_cast<const i::byte*>(start);
27 return start_as_byte + length_in_bytes;
28 }
29
PCIsInCodeRange(const v8::MemoryRange & code_range,void * pc)30 bool PCIsInCodeRange(const v8::MemoryRange& code_range, void* pc) {
31 return pc >= code_range.start &&
32 pc < CalculateEnd(code_range.start, code_range.length_in_bytes);
33 }
34
35 // This relies on the fact that the code pages are ordered, and that they don't
36 // overlap.
PCIsInCodePages(size_t code_pages_length,const MemoryRange * code_pages,void * pc)37 bool PCIsInCodePages(size_t code_pages_length, const MemoryRange* code_pages,
38 void* pc) {
39 DCHECK(std::is_sorted(code_pages, code_pages + code_pages_length,
40 [](const MemoryRange& a, const MemoryRange& b) {
41 return a.start < b.start;
42 }));
43
44 MemoryRange fake_range{pc, 1};
45 auto it =
46 std::upper_bound(code_pages, code_pages + code_pages_length, fake_range,
47 [](const MemoryRange& a, const MemoryRange& b) {
48 return a.start < b.start;
49 });
50 DCHECK_IMPLIES(it != code_pages + code_pages_length, pc < it->start);
51 if (it == code_pages) return false;
52 --it;
53 return it->start <= pc && pc < CalculateEnd(it->start, it->length_in_bytes);
54 }
55
IsInJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)56 bool IsInJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
57 return PCIsInCodeRange(entry_stubs.js_entry_stub.code, pc) ||
58 PCIsInCodeRange(entry_stubs.js_construct_entry_stub.code, pc) ||
59 PCIsInCodeRange(entry_stubs.js_run_microtasks_entry_stub.code, pc);
60 }
61
IsInUnsafeJSEntryRange(const JSEntryStubs & entry_stubs,void * pc)62 bool IsInUnsafeJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
63 return IsInJSEntryRange(entry_stubs, pc);
64
65 // TODO(petermarshall): We can be more precise by checking whether we are
66 // in JSEntry but after frame setup and before frame teardown, in which case
67 // we are safe to unwind the stack. For now, we bail out if the PC is anywhere
68 // within JSEntry.
69 }
70
AddressIsInStack(const void * address,const void * stack_base,const void * stack_top)71 bool AddressIsInStack(const void* address, const void* stack_base,
72 const void* stack_top) {
73 return address <= stack_base && address >= stack_top;
74 }
75
GetReturnAddressFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)76 void* GetReturnAddressFromFP(void* fp, void* pc,
77 const JSEntryStubs& entry_stubs) {
78 int caller_pc_offset = i::CommonFrameConstants::kCallerPCOffset;
79 // TODO(solanes): Implement the JSEntry range case also for x64 here and below.
80 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
81 if (IsInJSEntryRange(entry_stubs, pc)) {
82 caller_pc_offset = i::EntryFrameConstants::kDirectCallerPCOffset;
83 }
84 #endif
85 i::Address ret_addr =
86 Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset);
87 return reinterpret_cast<void*>(i::PointerAuthentication::StripPAC(ret_addr));
88 }
89
GetCallerFPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)90 void* GetCallerFPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
91 int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
92 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
93 if (IsInJSEntryRange(entry_stubs, pc)) {
94 caller_fp_offset = i::EntryFrameConstants::kDirectCallerFPOffset;
95 }
96 #endif
97 return reinterpret_cast<void*>(
98 Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
99 }
100
GetCallerSPFromFP(void * fp,void * pc,const JSEntryStubs & entry_stubs)101 void* GetCallerSPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
102 int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
103 #if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_ARM
104 if (IsInJSEntryRange(entry_stubs, pc)) {
105 caller_sp_offset = i::EntryFrameConstants::kDirectCallerSPOffset;
106 }
107 #endif
108 return reinterpret_cast<void*>(reinterpret_cast<i::Address>(fp) +
109 caller_sp_offset);
110 }
111
112 } // namespace
113
TryUnwindV8Frames(const JSEntryStubs & entry_stubs,size_t code_pages_length,const MemoryRange * code_pages,RegisterState * register_state,const void * stack_base)114 bool Unwinder::TryUnwindV8Frames(const JSEntryStubs& entry_stubs,
115 size_t code_pages_length,
116 const MemoryRange* code_pages,
117 RegisterState* register_state,
118 const void* stack_base) {
119 const void* stack_top = register_state->sp;
120
121 void* pc = register_state->pc;
122 if (PCIsInV8(code_pages_length, code_pages, pc) &&
123 !IsInUnsafeJSEntryRange(entry_stubs, pc)) {
124 void* current_fp = register_state->fp;
125 if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
126
127 // Peek at the return address that the caller pushed. If it's in V8, then we
128 // assume the caller frame is a JS frame and continue to unwind.
129 void* next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
130 while (PCIsInV8(code_pages_length, code_pages, next_pc)) {
131 current_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
132 if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
133 pc = next_pc;
134 next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
135 }
136
137 void* final_sp = GetCallerSPFromFP(current_fp, pc, entry_stubs);
138 if (!AddressIsInStack(final_sp, stack_base, stack_top)) return false;
139 register_state->sp = final_sp;
140
141 // We don't check that the final FP value is within the stack bounds because
142 // this is just the rbp value that JSEntryStub pushed. On platforms like
143 // Win64 this is not used as a dedicated FP register, and could contain
144 // anything.
145 void* final_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
146 register_state->fp = final_fp;
147
148 register_state->pc = next_pc;
149
150 // Link register no longer valid after unwinding.
151 register_state->lr = nullptr;
152
153 if (IsInJSEntryRange(entry_stubs, pc)) {
154 GetCalleeSavedRegistersFromEntryFrame(current_fp, register_state);
155 }
156 return true;
157 }
158 return false;
159 }
160
PCIsInV8(size_t code_pages_length,const MemoryRange * code_pages,void * pc)161 bool Unwinder::PCIsInV8(size_t code_pages_length, const MemoryRange* code_pages,
162 void* pc) {
163 return pc && PCIsInCodePages(code_pages_length, code_pages, pc);
164 }
165
166 } // namespace v8
167