1 // Copyright 2021 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifdef UNSAFE_BUFFERS_BUILD
6 // TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7 #pragma allow_unsafe_buffers
8 #endif
9
10 #include "base/profiler/chrome_unwinder_android_32.h"
11
12 #include <algorithm>
13
14 #include "base/check_op.h"
15 #include "base/memory/aligned_memory.h"
16 #include "base/notreached.h"
17 #include "base/numerics/checked_math.h"
18 #include "base/profiler/chrome_unwind_info_android_32.h"
19
20 namespace base {
21 namespace {
22
GetRegisterPointer(RegisterContext * context,uint8_t register_index)23 uintptr_t* GetRegisterPointer(RegisterContext* context,
24 uint8_t register_index) {
25 DCHECK_LE(register_index, 15);
26 static unsigned long RegisterContext::*const registers[16] = {
27 &RegisterContext::arm_r0, &RegisterContext::arm_r1,
28 &RegisterContext::arm_r2, &RegisterContext::arm_r3,
29 &RegisterContext::arm_r4, &RegisterContext::arm_r5,
30 &RegisterContext::arm_r6, &RegisterContext::arm_r7,
31 &RegisterContext::arm_r8, &RegisterContext::arm_r9,
32 &RegisterContext::arm_r10, &RegisterContext::arm_fp,
33 &RegisterContext::arm_ip, &RegisterContext::arm_sp,
34 &RegisterContext::arm_lr, &RegisterContext::arm_pc,
35 };
36 return reinterpret_cast<uintptr_t*>(&(context->*registers[register_index]));
37 }
38
39 // Pops the value on the top of stack out and assign it to target register.
40 // This is equivalent to arm instruction `Pop r[n]` where n = `register_index`.
41 // Returns whether the pop is successful.
PopRegister(RegisterContext * context,uint8_t register_index)42 bool PopRegister(RegisterContext* context, uint8_t register_index) {
43 const uintptr_t sp = RegisterContextStackPointer(context);
44 const uintptr_t stacktop_value = *reinterpret_cast<uintptr_t*>(sp);
45 const auto new_sp = CheckedNumeric<uintptr_t>(sp) + sizeof(uintptr_t);
46 const bool success =
47 new_sp.AssignIfValid(&RegisterContextStackPointer(context));
48 if (success) {
49 *GetRegisterPointer(context, register_index) = stacktop_value;
50 }
51 return success;
52 }
53
54 // Decodes the given bytes as an ULEB128 format number and advances the bytes
55 // pointer by the size of ULEB128.
56 //
57 // This function assumes the given bytes are in valid ULEB128
58 // format and the decoded number should not overflow `uintptr_t` type.
DecodeULEB128(const uint8_t * & bytes)59 uintptr_t DecodeULEB128(const uint8_t*& bytes) {
60 uintptr_t value = 0;
61 unsigned shift = 0;
62 do {
63 DCHECK_LE(shift, sizeof(uintptr_t) * 8); // ULEB128 must not overflow.
64 value += (*bytes & 0x7fu) << shift;
65 shift += 7;
66 } while (*bytes++ & 0x80);
67 return value;
68 }
69
GetTopBits(uint8_t byte,unsigned bits)70 uint8_t GetTopBits(uint8_t byte, unsigned bits) {
71 DCHECK_LE(bits, 8u);
72 return byte >> (8 - bits);
73 }
74
75 } // namespace
76
ChromeUnwinderAndroid32(const ChromeUnwindInfoAndroid32 & unwind_info,uintptr_t chrome_module_base_address,uintptr_t text_section_start_address)77 ChromeUnwinderAndroid32::ChromeUnwinderAndroid32(
78 const ChromeUnwindInfoAndroid32& unwind_info,
79 uintptr_t chrome_module_base_address,
80 uintptr_t text_section_start_address)
81 : unwind_info_(unwind_info),
82 chrome_module_base_address_(chrome_module_base_address),
83 text_section_start_address_(text_section_start_address) {
84 DCHECK_GT(text_section_start_address_, chrome_module_base_address_);
85 }
86
CanUnwindFrom(const Frame & current_frame) const87 bool ChromeUnwinderAndroid32::CanUnwindFrom(const Frame& current_frame) const {
88 return current_frame.module &&
89 current_frame.module->GetBaseAddress() == chrome_module_base_address_;
90 }
91
TryUnwind(UnwinderStateCapture * capture_state,RegisterContext * thread_context,uintptr_t stack_top,std::vector<Frame> * stack)92 UnwindResult ChromeUnwinderAndroid32::TryUnwind(
93 UnwinderStateCapture* capture_state,
94 RegisterContext* thread_context,
95 uintptr_t stack_top,
96 std::vector<Frame>* stack) {
97 DCHECK(CanUnwindFrom(stack->back()));
98 uintptr_t frame_initial_sp = RegisterContextStackPointer(thread_context);
99 const uintptr_t unwind_initial_pc =
100 RegisterContextInstructionPointer(thread_context);
101
102 do {
103 const uintptr_t pc = RegisterContextInstructionPointer(thread_context);
104 const uintptr_t instruction_byte_offset_from_text_section_start =
105 pc - text_section_start_address_;
106
107 const std::optional<FunctionOffsetTableIndex> function_offset_table_index =
108 GetFunctionTableIndexFromInstructionOffset(
109 unwind_info_.page_table, unwind_info_.function_table,
110 instruction_byte_offset_from_text_section_start);
111
112 if (!function_offset_table_index) {
113 return UnwindResult::kAborted;
114 }
115
116 const uint32_t current_unwind_instruction_index =
117 GetFirstUnwindInstructionIndexFromFunctionOffsetTableEntry(
118 &unwind_info_
119 .function_offset_table[function_offset_table_index
120 ->function_offset_table_byte_index],
121 function_offset_table_index
122 ->instruction_offset_from_function_start);
123
124 const uint8_t* current_unwind_instruction =
125 &unwind_info_
126 .unwind_instruction_table[current_unwind_instruction_index];
127
128 UnwindInstructionResult instruction_result;
129 bool pc_was_updated = false;
130
131 do {
132 instruction_result = ExecuteUnwindInstruction(
133 current_unwind_instruction, pc_was_updated, thread_context);
134 const uintptr_t sp = RegisterContextStackPointer(thread_context);
135 if (sp > stack_top || sp < frame_initial_sp ||
136 !IsAligned(sp, sizeof(uintptr_t))) {
137 return UnwindResult::kAborted;
138 }
139 } while (instruction_result ==
140 UnwindInstructionResult::kInstructionPending);
141
142 if (instruction_result == UnwindInstructionResult::kAborted) {
143 return UnwindResult::kAborted;
144 }
145
146 DCHECK_EQ(instruction_result, UnwindInstructionResult::kCompleted);
147
148 const uintptr_t new_sp = RegisterContextStackPointer(thread_context);
149 // Validate SP is properly aligned across frames.
150 // See
151 // https://community.arm.com/arm-community-blogs/b/architectures-and-processors-blog/posts/using-the-stack-in-aarch32-and-aarch64
152 // for SP alignment rules.
153 if (!IsAligned(new_sp, 2 * sizeof(uintptr_t))) {
154 return UnwindResult::kAborted;
155 }
156 // Validate that SP does not decrease across frames.
157 const bool is_leaf_frame = stack->size() == 1;
158 // Each frame unwind is expected to only pop from stack memory, which will
159 // cause sp to increase.
160 // Non-Leaf frames are expected to at least pop lr off stack, so sp is
161 // expected to strictly increase for non-leaf frames.
162 if (new_sp <= (is_leaf_frame ? frame_initial_sp - 1 : frame_initial_sp)) {
163 return UnwindResult::kAborted;
164 }
165
166 // For leaf functions, if SP does not change, PC must change, otherwise,
167 // the overall execution state will be the same before/after the frame
168 // unwind.
169 if (is_leaf_frame && new_sp == frame_initial_sp &&
170 RegisterContextInstructionPointer(thread_context) ==
171 unwind_initial_pc) {
172 return UnwindResult::kAborted;
173 }
174
175 frame_initial_sp = new_sp;
176
177 stack->emplace_back(RegisterContextInstructionPointer(thread_context),
178 module_cache()->GetModuleForAddress(
179 RegisterContextInstructionPointer(thread_context)));
180 } while (CanUnwindFrom(stack->back()));
181 return UnwindResult::kUnrecognizedFrame;
182 }
183
ExecuteUnwindInstruction(const uint8_t * & instruction,bool & pc_was_updated,RegisterContext * thread_context)184 UnwindInstructionResult ExecuteUnwindInstruction(
185 const uint8_t*& instruction,
186 bool& pc_was_updated,
187 RegisterContext* thread_context) {
188 if (GetTopBits(*instruction, 2) == 0b00) {
189 // 00xxxxxx
190 // vsp = vsp + (xxxxxx << 2) + 4. Covers range 0x04-0x100 inclusive.
191 const uintptr_t offset = ((*instruction++ & 0b00111111u) << 2) + 4;
192
193 const auto new_sp =
194 CheckedNumeric<uintptr_t>(RegisterContextStackPointer(thread_context)) +
195 offset;
196 if (!new_sp.AssignIfValid(&RegisterContextStackPointer(thread_context))) {
197 return UnwindInstructionResult::kAborted;
198 }
199 } else if (GetTopBits(*instruction, 2) == 0b01) {
200 // 01xxxxxx
201 // vsp = vsp - (xxxxxx << 2) - 4. Covers range 0x04-0x100 inclusive.
202 const uintptr_t offset = ((*instruction++ & 0b00111111u) << 2) + 4;
203 const auto new_sp =
204 CheckedNumeric<uintptr_t>(RegisterContextStackPointer(thread_context)) -
205 offset;
206 if (!new_sp.AssignIfValid(&RegisterContextStackPointer(thread_context))) {
207 return UnwindInstructionResult::kAborted;
208 }
209 } else if (GetTopBits(*instruction, 4) == 0b1001) {
210 // 1001nnnn (nnnn != 13,15)
211 // Set vsp = r[nnnn].
212 const uint8_t register_index = *instruction++ & 0b00001111;
213 DCHECK_NE(register_index, 13); // Must not set sp to sp.
214 DCHECK_NE(register_index, 15); // Must not set sp to pc.
215 // Note: We shouldn't have cases that are setting caller-saved registers
216 // using this instruction.
217 DCHECK_GE(register_index, 4);
218
219 RegisterContextStackPointer(thread_context) =
220 *GetRegisterPointer(thread_context, register_index);
221 } else if (GetTopBits(*instruction, 5) == 0b10101) {
222 // 10101nnn
223 // Pop r4-r[4+nnn], r14
224 const uint8_t max_register_index = (*instruction++ & 0b00000111u) + 4;
225 for (uint8_t n = 4; n <= max_register_index; n++) {
226 if (!PopRegister(thread_context, n)) {
227 return UnwindInstructionResult::kAborted;
228 }
229 }
230 if (!PopRegister(thread_context, 14)) {
231 return UnwindInstructionResult::kAborted;
232 }
233 } else if (*instruction == 0b10000000 && *(instruction + 1) == 0) {
234 // 10000000 00000000
235 // Refuse to unwind.
236 instruction += 2;
237 return UnwindInstructionResult::kAborted;
238 } else if (GetTopBits(*instruction, 4) == 0b1000) {
239 const uint32_t register_bitmask =
240 ((*instruction & 0xfu) << 8) + *(instruction + 1);
241 instruction += 2;
242 // 1000iiii iiiiiiii
243 // Pop up to 12 integer registers under masks {r15-r12}, {r11-r4}
244 for (uint8_t register_index = 4; register_index < 16; register_index++) {
245 if (register_bitmask & (1 << (register_index - 4))) {
246 if (!PopRegister(thread_context, register_index)) {
247 return UnwindInstructionResult::kAborted;
248 }
249 }
250 }
251 // If we set pc (r15) with value on stack, we should no longer copy lr to
252 // pc on COMPLETE.
253 pc_was_updated |= register_bitmask & (1 << (15 - 4));
254 } else if (*instruction == 0b10110000) {
255 // Finish
256 // Code 0xb0, Finish, copies VRS[r14] to VRS[r15] and also
257 // indicates that no further instructions are to be processed for this
258 // frame.
259
260 instruction++;
261 // Only copy lr to pc when pc is not updated by other instructions before.
262 if (!pc_was_updated) {
263 thread_context->arm_pc = thread_context->arm_lr;
264 }
265
266 return UnwindInstructionResult::kCompleted;
267 } else if (*instruction == 0b10110010) {
268 // 10110010 uleb128
269 // vsp = vsp + 0x204 + (uleb128 << 2)
270 // (for vsp increments of 0x104-0x200, use 00xxxxxx twice)
271 instruction++;
272 const auto new_sp =
273 CheckedNumeric<uintptr_t>(RegisterContextStackPointer(thread_context)) +
274 (CheckedNumeric<uintptr_t>(DecodeULEB128(instruction)) << 2) + 0x204;
275
276 if (!new_sp.AssignIfValid(&RegisterContextStackPointer(thread_context))) {
277 return UnwindInstructionResult::kAborted;
278 }
279 } else {
280 NOTREACHED();
281 }
282 return UnwindInstructionResult::kInstructionPending;
283 }
284
GetFirstUnwindInstructionIndexFromFunctionOffsetTableEntry(const uint8_t * function_offset_table_entry,int instruction_offset_from_function_start)285 uintptr_t GetFirstUnwindInstructionIndexFromFunctionOffsetTableEntry(
286 const uint8_t* function_offset_table_entry,
287 int instruction_offset_from_function_start) {
288 DCHECK_GE(instruction_offset_from_function_start, 0);
289 const uint8_t* current_function_offset_table_position =
290 function_offset_table_entry;
291
292 do {
293 const uintptr_t function_offset =
294 DecodeULEB128(current_function_offset_table_position);
295
296 const uintptr_t unwind_table_index =
297 DecodeULEB128(current_function_offset_table_position);
298
299 // Each function always ends at 0 offset. It is guaranteed to find an entry
300 // as long as the function offset table is well-structured.
301 if (function_offset <=
302 static_cast<uint32_t>(instruction_offset_from_function_start)) {
303 return unwind_table_index;
304 }
305
306 } while (true);
307
308 NOTREACHED();
309 }
310
311 const std::optional<FunctionOffsetTableIndex>
GetFunctionTableIndexFromInstructionOffset(span<const uint32_t> page_start_instructions,span<const FunctionTableEntry> function_offset_table_indices,uint32_t instruction_byte_offset_from_text_section_start)312 GetFunctionTableIndexFromInstructionOffset(
313 span<const uint32_t> page_start_instructions,
314 span<const FunctionTableEntry> function_offset_table_indices,
315 uint32_t instruction_byte_offset_from_text_section_start) {
316 DCHECK(!page_start_instructions.empty());
317 DCHECK(!function_offset_table_indices.empty());
318 // First function on first page should always start from 0 offset.
319 DCHECK_EQ(function_offset_table_indices.front()
320 .function_start_address_page_instruction_offset,
321 0ul);
322
323 const uint16_t page_number =
324 instruction_byte_offset_from_text_section_start >> 17;
325 const uint16_t page_instruction_offset =
326 (instruction_byte_offset_from_text_section_start >> 1) &
327 0xffff; // 16 bits.
328
329 // Invalid instruction_byte_offset_from_text_section_start:
330 // instruction_byte_offset_from_text_section_start falls after the last page.
331 if (page_number >= page_start_instructions.size()) {
332 return std::nullopt;
333 }
334
335 const span<const FunctionTableEntry>::iterator function_table_entry_start =
336 function_offset_table_indices.begin() +
337 checked_cast<ptrdiff_t>(page_start_instructions[page_number]);
338 const span<const FunctionTableEntry>::iterator function_table_entry_end =
339 page_number == page_start_instructions.size() - 1
340 ? function_offset_table_indices.end()
341 : function_offset_table_indices.begin() +
342 checked_cast<ptrdiff_t>(
343 page_start_instructions[page_number + 1]);
344
345 // `std::upper_bound` finds first element that > target in range
346 // [function_table_entry_start, function_table_entry_end).
347 const auto first_larger_entry_location = std::upper_bound(
348 function_table_entry_start, function_table_entry_end,
349 page_instruction_offset,
350 [](uint16_t page_instruction_offset, const FunctionTableEntry& entry) {
351 return page_instruction_offset <
352 entry.function_start_address_page_instruction_offset;
353 });
354
355 // Offsets the element found by 1 to get the biggest element that <= target.
356 const auto entry_location = first_larger_entry_location - 1;
357
358 // When all offsets in current range > page_instruction_offset (including when
359 // there is no entry in current range), the `FunctionTableEntry` we are
360 // looking for is not within the function_offset_table_indices range we are
361 // inspecting, because the function is too long that it spans multiple pages.
362 //
363 // We need to locate the previous entry on function_offset_table_indices and
364 // find its corresponding page_table index.
365 //
366 // Example:
367 // +--------------------+--------------------+
368 // | <-----2 byte-----> | <-----2 byte-----> |
369 // +--------------------+--------------------+
370 // | Page Offset | Offset Table Index |
371 // +--------------------+--------------------+-----
372 // | 10 | XXX | |
373 // +--------------------+--------------------+ |
374 // | ... | ... |Page 0x100
375 // +--------------------+--------------------+ |
376 // | 65500 | ZZZ | |
377 // +--------------------+--------------------+----- Page 0x101 is empty
378 // | 200 | AAA | |
379 // +--------------------+--------------------+ |
380 // | ... | ... |Page 0x102
381 // +--------------------+--------------------+ |
382 // | 65535 | BBB | |
383 // +--------------------+--------------------+-----
384 //
385 // Example:
386 // For
387 // - page_number = 0x100, page_instruction_offset >= 65535
388 // - page_number = 0x101, all page_instruction_offset
389 // - page_number = 0x102, page_instruction_offset < 200
390 // We should be able to map them all to entry [65500, ZZZ] in page 0x100.
391
392 // Finds the page_number that corresponds to `entry_location`. The page
393 // might not be the page we are inspecting, when the function spans over
394 // multiple pages.
395 uint16_t function_start_page_number = page_number;
396 while (function_offset_table_indices.begin() +
397 checked_cast<ptrdiff_t>(
398 page_start_instructions[function_start_page_number]) >
399 entry_location) {
400 // First page in page table must not be empty.
401 DCHECK_NE(function_start_page_number, 0);
402 function_start_page_number--;
403 };
404
405 const uint32_t function_start_address_instruction_offset =
406 (uint32_t{function_start_page_number} << 16) +
407 entry_location->function_start_address_page_instruction_offset;
408
409 const int instruction_offset_from_function_start =
410 static_cast<int>((instruction_byte_offset_from_text_section_start >> 1) -
411 function_start_address_instruction_offset);
412
413 DCHECK_GE(instruction_offset_from_function_start, 0);
414 return FunctionOffsetTableIndex{
415 instruction_offset_from_function_start,
416 entry_location->function_offset_table_byte_index,
417 };
418 }
419
420 } // namespace base
421