• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_POINTERS_RAW_PTR_BACKUP_REF_IMPL_H_
6 #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_POINTERS_RAW_PTR_BACKUP_REF_IMPL_H_
7 
8 #include <stddef.h>
9 
10 #include <type_traits>
11 
12 #include "base/allocator/partition_allocator/partition_address_space.h"
13 #include "base/allocator/partition_allocator/partition_alloc_base/compiler_specific.h"
14 #include "base/allocator/partition_allocator/partition_alloc_base/component_export.h"
15 #include "base/allocator/partition_allocator/partition_alloc_base/cxx20_is_constant_evaluated.h"
16 #include "base/allocator/partition_allocator/partition_alloc_buildflags.h"
17 #include "base/allocator/partition_allocator/partition_alloc_config.h"
18 #include "base/allocator/partition_allocator/partition_alloc_constants.h"
19 #include "base/allocator/partition_allocator/partition_alloc_forward.h"
20 #include "build/build_config.h"
21 
22 #if !BUILDFLAG(HAS_64_BIT_POINTERS)
23 #include "base/allocator/partition_allocator/address_pool_manager_bitmap.h"
24 #endif
25 
26 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
27 #error "Included under wrong build option"
28 #endif
29 
30 namespace base::internal {
31 
32 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
33 PA_COMPONENT_EXPORT(RAW_PTR)
34 void CheckThatAddressIsntWithinFirstPartitionPage(uintptr_t address);
35 #endif
36 
37 // Note that `RawPtrBackupRefImpl` itself is not thread-safe. If multiple
38 // threads modify the same raw_ptr object without synchronization, a data race
39 // will occur.
40 template <bool AllowDangling = false>
41 struct RawPtrBackupRefImpl {
42   // These are needed for correctness, or else we may end up manipulating
43   // ref-count where we shouldn't, thus affecting the BRP's integrity. Unlike
44   // the first two, kMustZeroOnDestruct wouldn't be needed if raw_ptr was used
45   // correctly, but we already caught cases where a value is written after
46   // destruction.
47   static constexpr bool kMustZeroOnInit = true;
48   static constexpr bool kMustZeroOnMove = true;
49   static constexpr bool kMustZeroOnDestruct = true;
50 
51  private:
IsSupportedAndNotNullRawPtrBackupRefImpl52   PA_ALWAYS_INLINE static bool IsSupportedAndNotNull(uintptr_t address) {
53     // There are many situations where the compiler can prove that
54     // `ReleaseWrappedPtr` is called on a value that is always nullptr, but the
55     // way `IsManagedByPartitionAllocBRPPool` is written, the compiler can't
56     // prove that nullptr is not managed by PartitionAlloc; and so the compiler
57     // has to emit a useless check and dead code. To avoid that without making
58     // the runtime check slower, tell the compiler to skip
59     // `IsManagedByPartitionAllocBRPPool` when it can statically determine that
60     // address is nullptr.
61 #if PA_HAS_BUILTIN(__builtin_constant_p)
62     if (__builtin_constant_p(address == 0) && (address == 0)) {
63 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
64       PA_BASE_CHECK(
65           !partition_alloc::IsManagedByPartitionAllocBRPPool(address));
66 #endif  // BUILDFLAG(PA_DCHECK_IS_ON) ||
67         // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
68       return false;
69     }
70 #endif  // PA_HAS_BUILTIN(__builtin_constant_p)
71 
72     // This covers the nullptr case, as address 0 is never in any
73     // PartitionAlloc pool.
74     bool is_in_brp_pool =
75         partition_alloc::IsManagedByPartitionAllocBRPPool(address);
76 
77     // There may be pointers immediately after the allocation, e.g.
78     //   {
79     //     // Assume this allocation happens outside of PartitionAlloc.
80     //     raw_ptr<T> ptr = new T[20];
81     //     for (size_t i = 0; i < 20; i ++) { ptr++; }
82     //   }
83     //
84     // Such pointers are *not* at risk of accidentally falling into BRP pool,
85     // because:
86     // 1) On 64-bit systems, BRP pool is preceded by a forbidden region.
87     // 2) On 32-bit systems, the guard pages and metadata of super pages in BRP
88     //    pool aren't considered to be part of that pool.
89     //
90     // This allows us to make a stronger assertion that if
91     // IsManagedByPartitionAllocBRPPool returns true for a valid pointer,
92     // it must be at least partition page away from the beginning of a super
93     // page.
94 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
95     if (is_in_brp_pool) {
96       CheckThatAddressIsntWithinFirstPartitionPage(address);
97     }
98 #endif
99 
100     return is_in_brp_pool;
101   }
102 
103 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
104   // Out-Of-Bounds (OOB) poison bit is set when the pointer has overflowed by
105   // one byte.
106 #if defined(ARCH_CPU_X86_64)
107   // Bit 63 is the only pointer bit that will work as the poison bit across both
108   // LAM48 and LAM57. It also works when all unused linear address bits are
109   // checked for canonicality.
110   static constexpr uintptr_t OOB_POISON_BIT = static_cast<uintptr_t>(1) << 63;
111 #else
112   // Avoid ARM's Top-Byte Ignore.
113   static constexpr uintptr_t OOB_POISON_BIT = static_cast<uintptr_t>(1) << 55;
114 #endif
115 
116   template <typename T>
UnpoisonPtrRawPtrBackupRefImpl117   PA_ALWAYS_INLINE static T* UnpoisonPtr(T* ptr) {
118     return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(ptr) &
119                                 ~OOB_POISON_BIT);
120   }
121 
122   template <typename T>
IsPtrOOBRawPtrBackupRefImpl123   PA_ALWAYS_INLINE static bool IsPtrOOB(T* ptr) {
124     return (reinterpret_cast<uintptr_t>(ptr) & OOB_POISON_BIT) ==
125            OOB_POISON_BIT;
126   }
127 
128   template <typename T>
PoisonOOBPtrRawPtrBackupRefImpl129   PA_ALWAYS_INLINE static T* PoisonOOBPtr(T* ptr) {
130     return reinterpret_cast<T*>(reinterpret_cast<uintptr_t>(ptr) |
131                                 OOB_POISON_BIT);
132   }
133 #else   // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
134   template <typename T>
UnpoisonPtrRawPtrBackupRefImpl135   PA_ALWAYS_INLINE static T* UnpoisonPtr(T* ptr) {
136     return ptr;
137   }
138 #endif  // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
139 
140  public:
141   // Wraps a pointer.
142   template <typename T>
WrapRawPtrRawPtrBackupRefImpl143   PA_ALWAYS_INLINE static constexpr T* WrapRawPtr(T* ptr) {
144     if (partition_alloc::internal::base::is_constant_evaluated()) {
145       return ptr;
146     }
147     uintptr_t address = partition_alloc::UntagPtr(UnpoisonPtr(ptr));
148     if (IsSupportedAndNotNull(address)) {
149 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
150       PA_BASE_CHECK(ptr != nullptr);
151 #endif
152       AcquireInternal(address);
153     } else {
154 #if !BUILDFLAG(HAS_64_BIT_POINTERS)
155 #if PA_HAS_BUILTIN(__builtin_constant_p)
156       // Similarly to `IsSupportedAndNotNull` above, elide the
157       // `BanSuperPageFromBRPPool` call if the compiler can prove that `address`
158       // is zero since PA won't be able to map anything at that address anyway.
159       bool known_constant_zero =
160           __builtin_constant_p(address == 0) && (address == 0);
161 #else   // PA_HAS_BUILTIN(__builtin_constant_p)
162       bool known_constant_zero = false;
163 #endif  // PA_HAS_BUILTIN(__builtin_constant_p)
164 
165       if (!known_constant_zero) {
166         partition_alloc::internal::AddressPoolManagerBitmap::
167             BanSuperPageFromBRPPool(address);
168       }
169 #endif  // !BUILDFLAG(HAS_64_BIT_POINTERS)
170     }
171 
172     return ptr;
173   }
174 
175   // Notifies the allocator when a wrapped pointer is being removed or replaced.
176   template <typename T>
ReleaseWrappedPtrRawPtrBackupRefImpl177   PA_ALWAYS_INLINE static constexpr void ReleaseWrappedPtr(T* wrapped_ptr) {
178     if (partition_alloc::internal::base::is_constant_evaluated()) {
179       return;
180     }
181     uintptr_t address = partition_alloc::UntagPtr(UnpoisonPtr(wrapped_ptr));
182     if (IsSupportedAndNotNull(address)) {
183 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
184       PA_BASE_CHECK(wrapped_ptr != nullptr);
185 #endif
186       ReleaseInternal(address);
187     }
188     // We are unable to counteract BanSuperPageFromBRPPool(), called from
189     // WrapRawPtr(). We only use one bit per super-page and, thus can't tell if
190     // there's more than one associated raw_ptr<T> at a given time. The risk of
191     // exhausting the entire address space is minuscule, therefore, we couldn't
192     // resist the perf gain of a single relaxed store (in the above mentioned
193     // function) over much more expensive two CAS operations, which we'd have to
194     // use if we were to un-ban a super-page.
195   }
196 
197   // Unwraps the pointer, while asserting that memory hasn't been freed. The
198   // function is allowed to crash on nullptr.
199   template <typename T>
SafelyUnwrapPtrForDereferenceRawPtrBackupRefImpl200   PA_ALWAYS_INLINE static constexpr T* SafelyUnwrapPtrForDereference(
201       T* wrapped_ptr) {
202     if (partition_alloc::internal::base::is_constant_evaluated()) {
203       return wrapped_ptr;
204     }
205 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
206 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
207     PA_BASE_CHECK(!IsPtrOOB(wrapped_ptr));
208 #endif
209     uintptr_t address = partition_alloc::UntagPtr(wrapped_ptr);
210     if (IsSupportedAndNotNull(address)) {
211       PA_BASE_CHECK(wrapped_ptr != nullptr);
212       PA_BASE_CHECK(IsPointeeAlive(address));
213     }
214 #endif  // BUILDFLAG(PA_DCHECK_IS_ON) ||
215         // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
216     return wrapped_ptr;
217   }
218 
219   // Unwraps the pointer, while asserting that memory hasn't been freed. The
220   // function must handle nullptr gracefully.
221   template <typename T>
SafelyUnwrapPtrForExtractionRawPtrBackupRefImpl222   PA_ALWAYS_INLINE static constexpr T* SafelyUnwrapPtrForExtraction(
223       T* wrapped_ptr) {
224     if (partition_alloc::internal::base::is_constant_evaluated()) {
225       return wrapped_ptr;
226     }
227     T* unpoisoned_ptr = UnpoisonPtr(wrapped_ptr);
228 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
229     // Some code uses invalid pointer values as indicators, so those values must
230     // be passed through unchanged during extraction. The following check will
231     // pass invalid values through if those values do not fall within the BRP
232     // pool after being unpoisoned.
233     if (!IsSupportedAndNotNull(partition_alloc::UntagPtr(unpoisoned_ptr))) {
234       return wrapped_ptr;
235     }
236     // Poison-based OOB checks do not extend to extracted pointers. The
237     // alternative of retaining poison on extracted pointers could introduce new
238     // OOB conditions, e.g., in code that extracts an end-of-allocation pointer
239     // for use in a loop termination condition. The poison bit would make that
240     // pointer appear to reference a very high address.
241 #endif  // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
242     return unpoisoned_ptr;
243   }
244 
245   // Unwraps the pointer, without making an assertion on whether memory was
246   // freed or not.
247   template <typename T>
UnsafelyUnwrapPtrForComparisonRawPtrBackupRefImpl248   PA_ALWAYS_INLINE static constexpr T* UnsafelyUnwrapPtrForComparison(
249       T* wrapped_ptr) {
250     if (partition_alloc::internal::base::is_constant_evaluated()) {
251       return wrapped_ptr;
252     }
253     // This may be used for unwrapping an end-of-allocation pointer to be used
254     // as an endpoint in an iterative algorithm, so this removes the OOB poison
255     // bit.
256     return UnpoisonPtr(wrapped_ptr);
257   }
258 
259   // Upcasts the wrapped pointer.
260   template <typename To, typename From>
UpcastRawPtrBackupRefImpl261   PA_ALWAYS_INLINE static constexpr To* Upcast(From* wrapped_ptr) {
262     static_assert(std::is_convertible<From*, To*>::value,
263                   "From must be convertible to To.");
264     // Note, this cast may change the address if upcasting to base that lies in
265     // the middle of the derived object.
266     return wrapped_ptr;
267   }
268 
269   // Verify the pointer stayed in the same slot, and return the poisoned version
270   // of `new_ptr` if OOB poisoning is enabled.
271   template <typename T>
VerifyAndPoisonPointerAfterAdvanceOrRetreatRawPtrBackupRefImpl272   PA_ALWAYS_INLINE static T* VerifyAndPoisonPointerAfterAdvanceOrRetreat(
273       T* unpoisoned_ptr,
274       T* new_ptr) {
275     // In the "before allocation" mode, on 32-bit, we can run into a problem
276     // that the end-of-allocation address could fall outside of
277     // PartitionAlloc's pools, if this is the last slot of the super page,
278     // thus pointing to the guard page. This means the ref-count won't be
279     // decreased when the pointer is released (leak).
280     //
281     // We could possibly solve it in a few different ways:
282     // - Add the trailing guard page to the pool, but we'd have to think very
283     //   hard if this doesn't create another hole.
284     // - Add an address adjustment to "is in pool?" check, similar as the one in
285     //   PartitionAllocGetSlotStartInBRPPool(), but that seems fragile, not to
286     //   mention adding an extra instruction to an inlined hot path.
287     // - Let the leak happen, since it should a very rare condition.
288     // - Go back to the previous solution of rewrapping the pointer, but that
289     //   had an issue of losing BRP protection in case the pointer ever gets
290     //   shifted back before the end of allocation.
291     //
292     // We decided to cross that bridge once we get there... if we ever get
293     // there. Currently there are no plans to switch back to the "before
294     // allocation" mode.
295     //
296     // This problem doesn't exist in the "previous slot" mode, or any mode that
297     // involves putting extras after the allocation, because the
298     // end-of-allocation address belongs to the same slot.
299     static_assert(BUILDFLAG(PUT_REF_COUNT_IN_PREVIOUS_SLOT));
300 
301     // First check if the new address didn't migrate in/out the BRP pool, and
302     // that it lands within the same allocation. An end-of-allocation address is
303     // ok, too, and that may lead to the pointer being poisoned if the relevant
304     // feature is enabled. These checks add a non-trivial cost, but they're
305     // cheaper and more secure than the previous implementation that rewrapped
306     // the pointer (wrapped the new pointer and unwrapped the old one).
307     //
308     // Note, the value of these checks goes beyond OOB protection. They're
309     // important for integrity of the BRP algorithm. Without these, an attacker
310     // could make the pointer point to another allocation, and cause its
311     // ref-count to go to 0 upon this pointer's destruction, even though there
312     // may be another pointer still pointing to it, thus making it lose the BRP
313     // protection prematurely.
314     const uintptr_t before_addr = partition_alloc::UntagPtr(unpoisoned_ptr);
315     const uintptr_t after_addr = partition_alloc::UntagPtr(new_ptr);
316     // TODO(bartekn): Consider adding support for non-BRP pools too (without
317     // removing the cross-pool migration check).
318     if (IsSupportedAndNotNull(before_addr)) {
319       constexpr size_t size = sizeof(T);
320       [[maybe_unused]] const bool is_end =
321           CheckPointerWithinSameAlloc(before_addr, after_addr, size);
322 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
323       if (is_end) {
324         new_ptr = PoisonOOBPtr(new_ptr);
325       }
326 #endif  // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
327     } else {
328       // Check that the new address didn't migrate into the BRP pool, as it
329       // would result in more pointers pointing to an allocation than its
330       // ref-count reflects.
331       PA_BASE_CHECK(!IsSupportedAndNotNull(after_addr));
332     }
333     return new_ptr;
334   }
335 
336   // Advance the wrapped pointer by `delta_elems`.
337   template <
338       typename T,
339       typename Z,
340       typename =
341           std::enable_if_t<partition_alloc::internal::is_offset_type<Z>, void>>
AdvanceRawPtrBackupRefImpl342   PA_ALWAYS_INLINE static constexpr T* Advance(T* wrapped_ptr, Z delta_elems) {
343     if (partition_alloc::internal::base::is_constant_evaluated()) {
344       return wrapped_ptr + delta_elems;
345     }
346     T* unpoisoned_ptr = UnpoisonPtr(wrapped_ptr);
347     return VerifyAndPoisonPointerAfterAdvanceOrRetreat(
348         unpoisoned_ptr, unpoisoned_ptr + delta_elems);
349   }
350 
351   // Retreat the wrapped pointer by `delta_elems`.
352   template <
353       typename T,
354       typename Z,
355       typename =
356           std::enable_if_t<partition_alloc::internal::is_offset_type<Z>, void>>
RetreatRawPtrBackupRefImpl357   PA_ALWAYS_INLINE static constexpr T* Retreat(T* wrapped_ptr, Z delta_elems) {
358     if (partition_alloc::internal::base::is_constant_evaluated()) {
359       return wrapped_ptr - delta_elems;
360     }
361     T* unpoisoned_ptr = UnpoisonPtr(wrapped_ptr);
362     return VerifyAndPoisonPointerAfterAdvanceOrRetreat(
363         unpoisoned_ptr, unpoisoned_ptr - delta_elems);
364   }
365 
366   template <typename T>
GetDeltaElemsRawPtrBackupRefImpl367   PA_ALWAYS_INLINE static constexpr ptrdiff_t GetDeltaElems(T* wrapped_ptr1,
368                                                             T* wrapped_ptr2) {
369     if (partition_alloc::internal::base::is_constant_evaluated()) {
370       return wrapped_ptr1 - wrapped_ptr2;
371     }
372 
373     T* unpoisoned_ptr1 = UnpoisonPtr(wrapped_ptr1);
374     T* unpoisoned_ptr2 = UnpoisonPtr(wrapped_ptr2);
375 #if BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
376     if (partition_alloc::internal::base::is_constant_evaluated()) {
377       return unpoisoned_ptr1 - unpoisoned_ptr2;
378     }
379     uintptr_t address1 = partition_alloc::UntagPtr(unpoisoned_ptr1);
380     uintptr_t address2 = partition_alloc::UntagPtr(unpoisoned_ptr2);
381     // Ensure that both pointers are within the same slot, and pool!
382     // TODO(bartekn): Consider adding support for non-BRP pool too.
383     if (IsSupportedAndNotNull(address1)) {
384       PA_BASE_CHECK(IsSupportedAndNotNull(address2));
385       PA_BASE_CHECK(partition_alloc::internal::IsPtrWithinSameAlloc(
386                         address2, address1, sizeof(T)) !=
387                     partition_alloc::internal::PtrPosWithinAlloc::kFarOOB);
388     } else {
389       PA_BASE_CHECK(!IsSupportedAndNotNull(address2));
390     }
391 #endif  // BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
392     return unpoisoned_ptr1 - unpoisoned_ptr2;
393   }
394 
395   // Returns a copy of a wrapped pointer, without making an assertion on whether
396   // memory was freed or not.
397   // This method increments the reference count of the allocation slot.
398   template <typename T>
DuplicateRawPtrBackupRefImpl399   PA_ALWAYS_INLINE static constexpr T* Duplicate(T* wrapped_ptr) {
400     if (partition_alloc::internal::base::is_constant_evaluated()) {
401       return wrapped_ptr;
402     }
403     return WrapRawPtr(wrapped_ptr);
404   }
405 
406   // Report the current wrapped pointer if pointee isn't alive anymore.
407   template <typename T>
ReportIfDanglingRawPtrBackupRefImpl408   PA_ALWAYS_INLINE static void ReportIfDangling(T* wrapped_ptr) {
409     ReportIfDanglingInternal(partition_alloc::UntagPtr(wrapped_ptr));
410   }
411 
412   // `WrapRawPtrForDuplication` and `UnsafelyUnwrapPtrForDuplication` are used
413   // to create a new raw_ptr<T> from another raw_ptr<T> of a different flavor.
414   template <typename T>
WrapRawPtrForDuplicationRawPtrBackupRefImpl415   PA_ALWAYS_INLINE static constexpr T* WrapRawPtrForDuplication(T* ptr) {
416     if (partition_alloc::internal::base::is_constant_evaluated()) {
417       return ptr;
418     } else {
419       return WrapRawPtr(ptr);
420     }
421   }
422 
423   template <typename T>
UnsafelyUnwrapPtrForDuplicationRawPtrBackupRefImpl424   PA_ALWAYS_INLINE static constexpr T* UnsafelyUnwrapPtrForDuplication(
425       T* wrapped_ptr) {
426     if (partition_alloc::internal::base::is_constant_evaluated()) {
427       return wrapped_ptr;
428     } else {
429       return UnpoisonPtr(wrapped_ptr);
430     }
431   }
432 
433   // This is for accounting only, used by unit tests.
IncrementSwapCountForTestRawPtrBackupRefImpl434   PA_ALWAYS_INLINE static constexpr void IncrementSwapCountForTest() {}
IncrementLessCountForTestRawPtrBackupRefImpl435   PA_ALWAYS_INLINE static constexpr void IncrementLessCountForTest() {}
436   PA_ALWAYS_INLINE static constexpr void
IncrementPointerToMemberOperatorCountForTestRawPtrBackupRefImpl437   IncrementPointerToMemberOperatorCountForTest() {}
438 
439  private:
440   // We've evaluated several strategies (inline nothing, various parts, or
441   // everything in |Wrap()| and |Release()|) using the Speedometer2 benchmark
442   // to measure performance. The best results were obtained when only the
443   // lightweight |IsManagedByPartitionAllocBRPPool()| check was inlined.
444   // Therefore, we've extracted the rest into the functions below and marked
445   // them as PA_NOINLINE to prevent unintended LTO effects.
446   PA_NOINLINE static PA_COMPONENT_EXPORT(RAW_PTR) void AcquireInternal(
447       uintptr_t address);
448   PA_NOINLINE static PA_COMPONENT_EXPORT(RAW_PTR) void ReleaseInternal(
449       uintptr_t address);
450   PA_NOINLINE static PA_COMPONENT_EXPORT(RAW_PTR) bool IsPointeeAlive(
451       uintptr_t address);
452   PA_NOINLINE static PA_COMPONENT_EXPORT(RAW_PTR) void ReportIfDanglingInternal(
453       uintptr_t address);
454 
455   // CHECK if `before_addr` and `after_addr` are in the same allocation, for a
456   // given `type_size`.
457   // If BACKUP_REF_PTR_POISON_OOB_PTR is enabled, return whether the allocation
458   // is at the end.
459   // If BACKUP_REF_PTR_POISON_OOB_PTR is disable, return false.
460   PA_NOINLINE static PA_COMPONENT_EXPORT(
461       RAW_PTR) bool CheckPointerWithinSameAlloc(uintptr_t before_addr,
462                                                 uintptr_t after_addr,
463                                                 size_t type_size);
464 };
465 
466 }  // namespace base::internal
467 
468 #endif  // BASE_ALLOCATOR_PARTITION_ALLOCATOR_POINTERS_RAW_PTR_BACKUP_REF_IMPL_H_
469