1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/base/stack.h"
6
7 #include <limits>
8
9 #include "src/base/platform/platform.h"
10 #include "src/base/sanitizer/asan.h"
11 #include "src/base/sanitizer/msan.h"
12 #include "src/base/sanitizer/tsan.h"
13
14 namespace heap {
15 namespace base {
16
17 using IterateStackCallback = void (*)(const Stack*, StackVisitor*, intptr_t*);
18 extern "C" void PushAllRegistersAndIterateStack(const Stack*, StackVisitor*,
19 IterateStackCallback);
20
Stack(const void * stack_start)21 Stack::Stack(const void* stack_start) : stack_start_(stack_start) {}
22
SetStackStart(const void * stack_start)23 void Stack::SetStackStart(const void* stack_start) {
24 stack_start_ = stack_start;
25 }
26
IsOnStack(void * slot) const27 bool Stack::IsOnStack(void* slot) const {
28 DCHECK_NOT_NULL(stack_start_);
29 #ifdef V8_USE_ADDRESS_SANITIZER
30 // If the slot is part of a fake frame, then it is definitely on the stack.
31 if (__asan_addr_is_in_fake_stack(__asan_get_current_fake_stack(),
32 reinterpret_cast<void*>(slot), nullptr,
33 nullptr)) {
34 return true;
35 }
36 // Fall through as there is still a regular stack present even when running
37 // with ASAN fake stacks.
38 #endif // V8_USE_ADDRESS_SANITIZER
39 #if defined(__has_feature)
40 #if __has_feature(safe_stack)
41 if (__builtin___get_unsafe_stack_top() >= slot &&
42 slot >= __builtin___get_unsafe_stack_ptr()) {
43 return true;
44 }
45 #endif // __has_feature(safe_stack)
46 #endif // defined(__has_feature)
47 return v8::base::Stack::GetCurrentStackPosition() <= slot &&
48 slot <= stack_start_;
49 }
50
51 namespace {
52
53 #ifdef V8_USE_ADDRESS_SANITIZER
54
55 // No ASAN support as accessing fake frames otherwise results in
56 // "stack-use-after-scope" warnings.
57 DISABLE_ASAN
58 // No TSAN support as the stack may not be exclusively owned by the current
59 // thread, e.g., for interrupt handling. Atomic reads are not enough as the
60 // other thread may use a lock to synchronize the access.
61 DISABLE_TSAN
IterateAsanFakeFrameIfNecessary(StackVisitor * visitor,void * asan_fake_stack,const void * stack_start,const void * stack_end,void * address)62 void IterateAsanFakeFrameIfNecessary(StackVisitor* visitor,
63 void* asan_fake_stack,
64 const void* stack_start,
65 const void* stack_end, void* address) {
66 // When using ASAN fake stack a pointer to the fake frame is kept on the
67 // native frame. In case |addr| points to a fake frame of the current stack
68 // iterate the fake frame. Frame layout see
69 // https://github.com/google/sanitizers/wiki/AddressSanitizerUseAfterReturn
70 if (asan_fake_stack) {
71 void* fake_frame_begin;
72 void* fake_frame_end;
73 void* real_stack_frame = __asan_addr_is_in_fake_stack(
74 asan_fake_stack, address, &fake_frame_begin, &fake_frame_end);
75 if (real_stack_frame) {
76 // |address| points to a fake frame. Check that the fake frame is part
77 // of this stack.
78 if (stack_start >= real_stack_frame && real_stack_frame >= stack_end) {
79 // Iterate the fake frame.
80 for (void** current = reinterpret_cast<void**>(fake_frame_begin);
81 current < fake_frame_end; ++current) {
82 void* addr = *current;
83 if (addr == nullptr) continue;
84 visitor->VisitPointer(addr);
85 }
86 }
87 }
88 }
89 }
90
91 #endif // V8_USE_ADDRESS_SANITIZER
92
IterateUnsafeStackIfNecessary(StackVisitor * visitor)93 void IterateUnsafeStackIfNecessary(StackVisitor* visitor) {
94 #if defined(__has_feature)
95 #if __has_feature(safe_stack)
96 // Source:
97 // https://github.com/llvm/llvm-project/blob/main/compiler-rt/lib/safestack/safestack.cpp
98 constexpr size_t kSafeStackAlignmentBytes = 16;
99 void* stack_end = __builtin___get_unsafe_stack_ptr();
100 void* stack_start = __builtin___get_unsafe_stack_top();
101 CHECK_GT(stack_start, stack_end);
102 CHECK_EQ(0u, reinterpret_cast<uintptr_t>(stack_end) &
103 (kSafeStackAlignmentBytes - 1));
104 CHECK_EQ(0u, reinterpret_cast<uintptr_t>(stack_start) &
105 (kSafeStackAlignmentBytes - 1));
106 void** current = reinterpret_cast<void**>(stack_end);
107 for (; current < stack_start; ++current) {
108 void* address = *current;
109 if (address == nullptr) continue;
110 visitor->VisitPointer(address);
111 }
112 #endif // __has_feature(safe_stack)
113 #endif // defined(__has_feature)
114 }
115
116 // Called by the trampoline that pushes registers on the stack. This method
117 // should never be inlined to ensure that a possible redzone cannot contain
118 // any data that needs to be scanned.
119 V8_NOINLINE
120 // No ASAN support as method accesses redzones while walking the stack.
121 DISABLE_ASAN
122 // No TSAN support as the stack may not be exclusively owned by the current
123 // thread, e.g., for interrupt handling. Atomic reads are not enough as the
124 // other thread may use a lock to synchronize the access.
125 DISABLE_TSAN
IteratePointersImpl(const Stack * stack,StackVisitor * visitor,intptr_t * stack_end)126 void IteratePointersImpl(const Stack* stack, StackVisitor* visitor,
127 intptr_t* stack_end) {
128 #ifdef V8_USE_ADDRESS_SANITIZER
129 void* asan_fake_stack = __asan_get_current_fake_stack();
130 #endif // V8_USE_ADDRESS_SANITIZER
131 // All supported platforms should have their stack aligned to at least
132 // sizeof(void*).
133 constexpr size_t kMinStackAlignment = sizeof(void*);
134 void** current = reinterpret_cast<void**>(stack_end);
135 CHECK_EQ(0u, reinterpret_cast<uintptr_t>(current) & (kMinStackAlignment - 1));
136 for (; current < stack->stack_start(); ++current) {
137 // MSAN: Instead of unpoisoning the whole stack, the slot's value is copied
138 // into a local which is unpoisoned.
139 void* address = *current;
140 MSAN_MEMORY_IS_INITIALIZED(&address, sizeof(address));
141 if (address == nullptr) continue;
142 visitor->VisitPointer(address);
143 #ifdef V8_USE_ADDRESS_SANITIZER
144 IterateAsanFakeFrameIfNecessary(visitor, asan_fake_stack,
145 stack->stack_start(), stack_end, address);
146 #endif // V8_USE_ADDRESS_SANITIZER
147 }
148 }
149
150 } // namespace
151
IteratePointers(StackVisitor * visitor) const152 void Stack::IteratePointers(StackVisitor* visitor) const {
153 DCHECK_NOT_NULL(stack_start_);
154 PushAllRegistersAndIterateStack(this, visitor, &IteratePointersImpl);
155 // No need to deal with callee-saved registers as they will be kept alive by
156 // the regular conservative stack iteration.
157 // TODO(chromium:1056170): Add support for SIMD and/or filtering.
158 IterateUnsafeStackIfNecessary(visitor);
159 }
160
IteratePointersUnsafe(StackVisitor * visitor,uintptr_t stack_end) const161 void Stack::IteratePointersUnsafe(StackVisitor* visitor,
162 uintptr_t stack_end) const {
163 IteratePointersImpl(this, visitor, reinterpret_cast<intptr_t*>(stack_end));
164 }
165
GetCurrentStackPointerForLocalVariables()166 const void* Stack::GetCurrentStackPointerForLocalVariables() {
167 #if defined(__has_feature)
168 #if __has_feature(safe_stack)
169 return __builtin___get_unsafe_stack_ptr();
170 #else // __has_feature(safe_stack)
171 return v8::base::Stack::GetCurrentStackPosition();
172 #endif // __has_feature(safe_stack)
173 #else // defined(__has_feature)
174 return v8::base::Stack::GetCurrentStackPosition();
175 #endif // defined(__has_feature)
176 }
177
178 } // namespace base
179 } // namespace heap
180