1 // Copyright 2021 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_STATE_BITMAP_H_
6 #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_STATE_BITMAP_H_
7
8 #include <climits>
9 #include <cstddef>
10 #include <cstdint>
11
12 #include <algorithm>
13 #include <array>
14 #include <atomic>
15 #include <tuple>
16 #include <utility>
17
18 #include "base/allocator/partition_allocator/partition_alloc_base/bits.h"
19 #include "base/allocator/partition_allocator/partition_alloc_base/compiler_specific.h"
20 #include "base/allocator/partition_allocator/partition_alloc_check.h"
21
22 namespace partition_alloc::internal {
23
24 // Bitmap which tracks allocation states. An allocation can be in one of 3
25 // states:
26 // - freed (00),
27 // - allocated (11),
28 // - quarantined (01 or 10, depending on the *Scan epoch).
29 //
30 // The state machine of allocation states:
31 // +-------------+ +-------------+
32 // | | malloc() | |
33 // | Freed +--------------->| Allocated |
34 // | (00) | (or 11) | (11) |
35 // | | | |
36 // +-------------+ +------+------+
37 // ^ |
38 // | |
39 // real_free() | (and 00) free() | (and 01(10))
40 // | |
41 // | +-------------+ |
42 // | | | |
43 // +-------+ Quarantined |<-------+
44 // | (01,10) |
45 // | |
46 // +-------------+
47 // ^ |
48 // | mark() |
49 // +-----------+
50 // (xor 11)
51 //
52 // The bitmap can be safely accessed from multiple threads, but this doesn't
53 // imply visibility on the data (i.e. no ordering guaranties, since relaxed
54 // atomics are used underneath). The bitmap itself must be created inside a
55 // page, size and alignment of which are specified as template arguments
56 // |PageSize| and |PageAlignment|. |AllocationAlignment| specifies the minimal
57 // alignment of objects that are allocated inside a page (serves as the
58 // granularity in the bitmap).
59 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
60 class StateBitmap final {
61 enum class State : uint8_t {
62 kFreed = 0b00,
63 kQuarantined1 = 0b01,
64 kQuarantined2 = 0b10,
65 kAlloced = 0b11,
66 kNumOfStates = 4,
67 };
68
69 using CellType = uintptr_t;
70 static constexpr size_t kBitsPerCell = sizeof(CellType) * CHAR_BIT;
71 static constexpr size_t kBitsNeededForAllocation =
72 base::bits::Log2Floor(static_cast<size_t>(State::kNumOfStates));
73 static constexpr CellType kStateMask = (1 << kBitsNeededForAllocation) - 1;
74
75 static constexpr size_t kBitmapSize =
76 (PageSize + ((kBitsPerCell * AllocationAlignment) - 1)) /
77 (kBitsPerCell * AllocationAlignment) * kBitsNeededForAllocation;
78 static constexpr size_t kPageOffsetMask = PageAlignment - 1;
79 static constexpr size_t kPageBaseMask = ~kPageOffsetMask;
80
81 public:
82 using Epoch = size_t;
83
84 static constexpr size_t kPageSize = PageSize;
85 static constexpr size_t kPageAlignment = PageAlignment;
86 static constexpr size_t kAllocationAlignment = AllocationAlignment;
87 static constexpr size_t kMaxEntries =
88 (kBitmapSize / kBitsNeededForAllocation) * kBitsPerCell;
89
90 inline StateBitmap();
91
92 // Sets the bits corresponding to |address| as allocated.
93 PA_ALWAYS_INLINE void Allocate(uintptr_t address);
94
95 // Sets the bits corresponding to |address| as quarantined. Must be called
96 // only once, in which case returns |true|. Otherwise, if the object was
97 // already quarantined or freed before, returns |false|.
98 PA_ALWAYS_INLINE bool Quarantine(uintptr_t address, Epoch epoch);
99
100 // Marks ("promotes") quarantined object. Returns |true| on success, otherwise
101 // |false| if the object was marked before.
102 PA_ALWAYS_INLINE bool MarkQuarantinedAsReachable(uintptr_t address,
103 Epoch epoch);
104
105 // Sets the bits corresponding to |address| as freed.
106 PA_ALWAYS_INLINE void Free(uintptr_t address);
107
108 // Getters that check object state.
109 PA_ALWAYS_INLINE bool IsAllocated(uintptr_t address) const;
110 PA_ALWAYS_INLINE bool IsQuarantined(uintptr_t address) const;
111 PA_ALWAYS_INLINE bool IsFreed(uintptr_t address) const;
112
113 // Iterate objects depending on their state.
114 //
115 // The callback is of type
116 // void(uintptr_t object_start)
117 template <typename Callback>
118 inline void IterateAllocated(Callback) const;
119 // The callback is of type
120 // void(uintptr_t object_start)
121 template <typename Callback, decltype(std::declval<Callback>()(0), 0) = 0>
122 inline void IterateQuarantined(Callback) const;
123 // The callback is of type
124 // void(uintptr_t object_start, bool is_marked)
125 template <typename Callback,
126 decltype(std::declval<Callback>()(0, true), 0) = 0>
127 inline void IterateQuarantined(size_t epoch, Callback) const;
128 // The callback is of type
129 // void(uintptr_t object_start)
130 template <typename Callback>
131 inline void IterateUnmarkedQuarantined(size_t epoch, Callback) const;
132 // The callback is of type
133 // void(uintptr_t object_start)
134 // The function is similar as above, but it also frees (clears) the iterated
135 // bits.
136 template <typename Callback>
137 inline void IterateUnmarkedQuarantinedAndFree(size_t epoch, Callback);
138
139 inline void Clear();
140
141 private:
AsAtomicCell(size_t cell_index)142 std::atomic<CellType>& AsAtomicCell(size_t cell_index) {
143 return reinterpret_cast<std::atomic<CellType>&>(bitmap_[cell_index]);
144 }
AsAtomicCell(size_t cell_index)145 const std::atomic<CellType>& AsAtomicCell(size_t cell_index) const {
146 return reinterpret_cast<const std::atomic<CellType>&>(bitmap_[cell_index]);
147 }
148
149 PA_ALWAYS_INLINE unsigned GetBits(uintptr_t address) const;
150
151 struct FilterQuarantine {
152 PA_ALWAYS_INLINE bool operator()(CellType cell) const;
153 const size_t epoch;
154 };
155
156 struct FilterUnmarkedQuarantine {
157 PA_ALWAYS_INLINE bool operator()(CellType cell) const;
158 const size_t epoch;
159 };
160
161 struct FilterAllocated {
162 PA_ALWAYS_INLINE bool operator()(CellType cell) const;
163 const size_t epoch;
164 };
165
166 // Simply calls the callback.
167 struct SimpleCallbackForwarder {
SimpleCallbackForwarderSimpleCallbackForwarder168 PA_ALWAYS_INLINE explicit SimpleCallbackForwarder(size_t epoch) {}
169
170 template <typename Callback>
171 PA_ALWAYS_INLINE void operator()(Callback,
172 uintptr_t pointer,
173 CellType bits) const;
174 };
175
176 // Calls the callback and passes a bool argument, indicating whether a
177 // quarantine object is marked or not.
178 struct QuarantineCallbackForwarder {
QuarantineCallbackForwarderQuarantineCallbackForwarder179 PA_ALWAYS_INLINE explicit QuarantineCallbackForwarder(size_t epoch)
180 : is_unmarked{epoch} {}
181
182 template <typename Callback>
183 PA_ALWAYS_INLINE void operator()(Callback,
184 uintptr_t pointer,
185 CellType bits) const;
186 FilterUnmarkedQuarantine is_unmarked;
187 };
188
189 template <typename Filter,
190 typename CallbackForwarder,
191 typename Callback,
192 bool Clear>
193 inline void IterateImpl(size_t epoch, Callback);
194
195 PA_ALWAYS_INLINE CellType LoadCell(size_t cell_index) const;
196 PA_ALWAYS_INLINE static constexpr std::pair<size_t, size_t>
197 AllocationIndexAndBit(uintptr_t);
198
199 std::array<CellType, kBitmapSize> bitmap_;
200 };
201
202 // The constructor can be omitted, but the Chromium's clang plugin wrongly
203 // warns that the type is not trivially constructible.
204 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
205 inline StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
206 StateBitmap() = default;
207
208 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
209 PA_ALWAYS_INLINE void
Allocate(uintptr_t address)210 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::Allocate(
211 uintptr_t address) {
212 PA_SCAN_DCHECK(IsFreed(address));
213 auto [cell_index, object_bit] = AllocationIndexAndBit(address);
214 const CellType mask = static_cast<CellType>(State::kAlloced) << object_bit;
215 auto& cell = AsAtomicCell(cell_index);
216 cell.fetch_or(mask, std::memory_order_relaxed);
217 }
218
219 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
220 PA_ALWAYS_INLINE bool
Quarantine(uintptr_t address,Epoch epoch)221 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::Quarantine(
222 uintptr_t address,
223 Epoch epoch) {
224 // *Scan is enabled at runtime, which means that we can quarantine allocation,
225 // that was previously not recorded in the bitmap. Hence, we can't reliably
226 // check transition from kAlloced to kQuarantined.
227 static_assert((~static_cast<CellType>(State::kQuarantined1) & kStateMask) ==
228 (static_cast<CellType>(State::kQuarantined2) & kStateMask),
229 "kQuarantined1 must be inverted kQuarantined2");
230 const State quarantine_state =
231 epoch & 0b1 ? State::kQuarantined1 : State::kQuarantined2;
232 auto [cell_index, object_bit] = AllocationIndexAndBit(address);
233 const CellType mask =
234 ~(static_cast<CellType>(quarantine_state) << object_bit);
235 auto& cell = AsAtomicCell(cell_index);
236 const CellType cell_before = cell.fetch_and(mask, std::memory_order_relaxed);
237 // Check if the previous state was also quarantined.
238 return __builtin_popcount(static_cast<unsigned>((cell_before >> object_bit) &
239 kStateMask)) != 1;
240 }
241
242 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
243 PA_ALWAYS_INLINE bool
244 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
MarkQuarantinedAsReachable(uintptr_t address,Epoch epoch)245 MarkQuarantinedAsReachable(uintptr_t address, Epoch epoch) {
246 static_assert((~static_cast<CellType>(State::kQuarantined1) & kStateMask) ==
247 (static_cast<CellType>(State::kQuarantined2) & kStateMask),
248 "kQuarantined1 must be inverted kQuarantined2");
249 const State quarantine_state_old =
250 epoch & 0b1 ? State::kQuarantined2 : State::kQuarantined1;
251 auto [cell_index, object_bit] = AllocationIndexAndBit(address);
252 const CellType clear_mask =
253 ~(static_cast<CellType>(State::kAlloced) << object_bit);
254 const CellType set_mask_old = static_cast<CellType>(quarantine_state_old)
255 << object_bit;
256 const CellType xor_mask = static_cast<CellType>(0b11) << object_bit;
257 auto& cell = AsAtomicCell(cell_index);
258 CellType expected =
259 (cell.load(std::memory_order_relaxed) & clear_mask) | set_mask_old;
260 CellType desired = expected ^ xor_mask;
261 while (PA_UNLIKELY(!cell.compare_exchange_weak(expected, desired,
262 std::memory_order_relaxed,
263 std::memory_order_relaxed))) {
264 // First check if the object was already marked before or in parallel.
265 if ((expected & set_mask_old) == 0) {
266 // Check that the bits can't be in any state other than
267 // marked-quarantined.
268 PA_SCAN_DCHECK(
269 ((expected >> object_bit) & kStateMask) ==
270 (~static_cast<CellType>(quarantine_state_old) & kStateMask));
271 return false;
272 }
273 // Otherwise, some other bits in the cell were concurrently changed. Update
274 // desired and retry.
275 desired = expected ^ xor_mask;
276 }
277 return true;
278 }
279
280 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
281 PA_ALWAYS_INLINE void
Free(uintptr_t address)282 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::Free(
283 uintptr_t address) {
284 // *Scan is enabled at runtime, which means that we can free an allocation,
285 // that was previously not recorded as quarantined in the bitmap. Hence, we
286 // can't reliably check the transition from kQuarantined to kFreed.
287 static_assert((~static_cast<CellType>(State::kAlloced) & kStateMask) ==
288 (static_cast<CellType>(State::kFreed) & kStateMask),
289 "kFreed must be inverted kAlloced");
290 auto [cell_index, object_bit] = AllocationIndexAndBit(address);
291 const CellType mask = ~(static_cast<CellType>(State::kAlloced) << object_bit);
292 auto& cell = AsAtomicCell(cell_index);
293 cell.fetch_and(mask, std::memory_order_relaxed);
294 }
295
296 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
297 PA_ALWAYS_INLINE bool
IsAllocated(uintptr_t address)298 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IsAllocated(
299 uintptr_t address) const {
300 return GetBits(address) == static_cast<unsigned>(State::kAlloced);
301 }
302
303 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
304 PA_ALWAYS_INLINE bool
IsQuarantined(uintptr_t address)305 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IsQuarantined(
306 uintptr_t address) const {
307 // On x86 CPI of popcnt is the same as tzcnt, so we use it instead of tzcnt +
308 // inversion.
309 return __builtin_popcount(GetBits(address)) == 1;
310 }
311
312 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
313 PA_ALWAYS_INLINE bool
IsFreed(uintptr_t address)314 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IsFreed(
315 uintptr_t address) const {
316 return GetBits(address) == static_cast<unsigned>(State::kFreed);
317 }
318
319 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
320 PA_ALWAYS_INLINE
321 typename StateBitmap<PageSize, PageAlignment, AllocationAlignment>::CellType
LoadCell(size_t cell_index)322 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::LoadCell(
323 size_t cell_index) const {
324 return AsAtomicCell(cell_index).load(std::memory_order_relaxed);
325 }
326
327 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
328 PA_ALWAYS_INLINE constexpr std::pair<size_t, size_t>
329 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
AllocationIndexAndBit(uintptr_t address)330 AllocationIndexAndBit(uintptr_t address) {
331 const uintptr_t offset_in_page = address & kPageOffsetMask;
332 const size_t allocation_number =
333 (offset_in_page / kAllocationAlignment) * kBitsNeededForAllocation;
334 const size_t cell_index = allocation_number / kBitsPerCell;
335 PA_SCAN_DCHECK(kBitmapSize > cell_index);
336 const size_t bit = allocation_number % kBitsPerCell;
337 return {cell_index, bit};
338 }
339
340 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
GetBits(uintptr_t address)341 unsigned StateBitmap<PageSize, PageAlignment, AllocationAlignment>::GetBits(
342 uintptr_t address) const {
343 auto [cell_index, object_bit] = AllocationIndexAndBit(address);
344 return (LoadCell(cell_index) >> object_bit) & kStateMask;
345 }
346
347 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
348 bool StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
operator()349 FilterQuarantine::operator()(CellType bits) const {
350 return __builtin_popcount(bits) == 1;
351 }
352
353 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
354 bool StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
operator()355 FilterUnmarkedQuarantine::operator()(CellType bits) const {
356 // Truth table:
357 // epoch & 1 | bits | result
358 // 0 | 01 | 1
359 // 1 | 10 | 1
360 // * | ** | 0
361 return bits - (epoch & 0b01) == 0b01;
362 }
363
364 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
365 bool StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
operator()366 FilterAllocated::operator()(CellType bits) const {
367 return bits == 0b11;
368 }
369
370 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
371 template <typename Callback>
372 PA_ALWAYS_INLINE void
373 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
operator()374 SimpleCallbackForwarder::operator()(Callback callback,
375 uintptr_t pointer,
376 CellType bits) const {
377 callback(pointer);
378 }
379
380 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
381 template <typename Callback>
382 PA_ALWAYS_INLINE void
383 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
operator()384 QuarantineCallbackForwarder::operator()(Callback callback,
385 uintptr_t pointer,
386 CellType bits) const {
387 callback(pointer, !is_unmarked(bits));
388 }
389
390 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
391 template <typename Filter,
392 typename CallbackForwarder,
393 typename Callback,
394 bool Clear>
395 inline void
IterateImpl(size_t epoch,Callback callback)396 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IterateImpl(
397 size_t epoch,
398 Callback callback) {
399 // The bitmap (|this|) is allocated inside the page with |kPageAlignment|.
400 Filter filter{epoch};
401 CallbackForwarder callback_forwarder{epoch};
402 const uintptr_t base = reinterpret_cast<uintptr_t>(this) & kPageBaseMask;
403 for (size_t cell_index = 0; cell_index < kBitmapSize; ++cell_index) {
404 CellType value = LoadCell(cell_index);
405 while (value) {
406 const size_t trailing_zeroes =
407 static_cast<size_t>(base::bits::CountTrailingZeroBits(value) & ~0b1);
408 const size_t clear_value_mask =
409 ~(static_cast<CellType>(kStateMask) << trailing_zeroes);
410 const CellType bits = (value >> trailing_zeroes) & kStateMask;
411 if (!filter(bits)) {
412 // Clear current object bit in temporary value to advance iteration.
413 value &= clear_value_mask;
414 continue;
415 }
416 const size_t object_number =
417 (cell_index * kBitsPerCell) + trailing_zeroes;
418 const uintptr_t object_address =
419 base +
420 (object_number * kAllocationAlignment / kBitsNeededForAllocation);
421
422 callback_forwarder(callback, object_address, bits);
423
424 if (Clear) {
425 // Clear the current bits.
426 AsAtomicCell(cell_index)
427 .fetch_and(clear_value_mask, std::memory_order_relaxed);
428 }
429
430 // Clear current object bit in temporary value to advance iteration.
431 value &= clear_value_mask;
432 }
433 }
434 }
435
436 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
437 template <typename Callback>
438 inline void
IterateAllocated(Callback callback)439 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IterateAllocated(
440 Callback callback) const {
441 const_cast<StateBitmap*>(this)
442 ->IterateImpl<FilterAllocated, SimpleCallbackForwarder, Callback, false>(
443 0, std::move(callback));
444 }
445
446 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
447 template <typename Callback, decltype(std::declval<Callback>()(0), 0)>
448 inline void
IterateQuarantined(Callback callback)449 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IterateQuarantined(
450 Callback callback) const {
451 const_cast<StateBitmap*>(this)
452 ->IterateImpl<FilterQuarantine, SimpleCallbackForwarder, Callback, false>(
453 0, std::move(callback));
454 }
455
456 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
457 template <typename Callback, decltype(std::declval<Callback>()(0, true), 0)>
458 inline void
IterateQuarantined(size_t epoch,Callback callback)459 StateBitmap<PageSize, PageAlignment, AllocationAlignment>::IterateQuarantined(
460 size_t epoch,
461 Callback callback) const {
462 const_cast<StateBitmap*>(this)
463 ->IterateImpl<FilterQuarantine, QuarantineCallbackForwarder, Callback,
464 false>(epoch, std::move(callback));
465 }
466
467 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
468 template <typename Callback>
469 inline void StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
IterateUnmarkedQuarantined(size_t epoch,Callback callback)470 IterateUnmarkedQuarantined(size_t epoch, Callback callback) const {
471 const_cast<StateBitmap*>(this)
472 ->IterateImpl<FilterUnmarkedQuarantine, SimpleCallbackForwarder, Callback,
473 false>(epoch, std::move(callback));
474 }
475
476 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
477 template <typename Callback>
478 inline void StateBitmap<PageSize, PageAlignment, AllocationAlignment>::
IterateUnmarkedQuarantinedAndFree(size_t epoch,Callback callback)479 IterateUnmarkedQuarantinedAndFree(size_t epoch, Callback callback) {
480 IterateImpl<FilterUnmarkedQuarantine, SimpleCallbackForwarder, Callback,
481 true>(epoch, std::move(callback));
482 }
483
484 template <size_t PageSize, size_t PageAlignment, size_t AllocationAlignment>
Clear()485 void StateBitmap<PageSize, PageAlignment, AllocationAlignment>::Clear() {
486 std::fill(bitmap_.begin(), bitmap_.end(), '\0');
487 }
488
489 } // namespace partition_alloc::internal
490
491 #endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_STARSCAN_STATE_BITMAP_H_
492