1 // Copyright 2020 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "partition_alloc/pointers/raw_ptr.h"
6
7 #include <climits>
8 #include <cstddef>
9 #include <cstdint>
10 #include <memory>
11 #include <string>
12 #include <thread>
13 #include <type_traits>
14 #include <utility>
15
16 #include "base/allocator/partition_alloc_features.h"
17 #include "base/allocator/partition_alloc_support.h"
18 #include "base/cpu.h"
19 #include "base/logging.h"
20 #include "base/memory/raw_ptr_asan_service.h"
21 #include "base/task/thread_pool.h"
22 #include "base/test/bind.h"
23 #include "base/test/gtest_util.h"
24 #include "base/test/memory/dangling_ptr_instrumentation.h"
25 #include "base/test/scoped_feature_list.h"
26 #include "base/test/task_environment.h"
27 #include "build/build_config.h"
28 #include "build/buildflag.h"
29 #include "partition_alloc/chromeos_buildflags.h"
30 #include "partition_alloc/dangling_raw_ptr_checks.h"
31 #include "partition_alloc/partition_alloc-inl.h"
32 #include "partition_alloc/partition_alloc.h"
33 #include "partition_alloc/partition_alloc_base/numerics/checked_math.h"
34 #include "partition_alloc/partition_alloc_buildflags.h"
35 #include "partition_alloc/partition_alloc_config.h"
36 #include "partition_alloc/partition_alloc_constants.h"
37 #include "partition_alloc/partition_alloc_hooks.h"
38 #include "partition_alloc/partition_root.h"
39 #include "partition_alloc/pointers/raw_ptr_counting_impl_for_test.h"
40 #include "partition_alloc/pointers/raw_ptr_test_support.h"
41 #include "partition_alloc/pointers/raw_ref.h"
42 #include "partition_alloc/tagging.h"
43 #include "testing/gmock/include/gmock/gmock.h"
44 #include "testing/gtest/include/gtest/gtest.h"
45 #include "third_party/abseil-cpp/absl/types/optional.h"
46 #include "third_party/abseil-cpp/absl/types/variant.h"
47
48 #if BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
49 #include <sanitizer/asan_interface.h>
50 #include "base/debug/asan_service.h"
51 #endif
52
53 using testing::AllOf;
54 using testing::HasSubstr;
55 using testing::Test;
56
57 static_assert(sizeof(raw_ptr<void>) == sizeof(void*),
58 "raw_ptr shouldn't add memory overhead");
59 static_assert(sizeof(raw_ptr<int>) == sizeof(int*),
60 "raw_ptr shouldn't add memory overhead");
61 static_assert(sizeof(raw_ptr<std::string>) == sizeof(std::string*),
62 "raw_ptr shouldn't add memory overhead");
63
64 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
65 !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR) && \
66 !BUILDFLAG(RAW_PTR_ZERO_ON_MOVE) && !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
67 // |is_trivially_copyable| assertion means that arrays/vectors of raw_ptr can
68 // be copied by memcpy.
69 static_assert(std::is_trivially_copyable_v<raw_ptr<void>>,
70 "raw_ptr should be trivially copyable");
71 static_assert(std::is_trivially_copyable_v<raw_ptr<int>>,
72 "raw_ptr should be trivially copyable");
73 static_assert(std::is_trivially_copyable_v<raw_ptr<std::string>>,
74 "raw_ptr should be trivially copyable");
75 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
76 // !BUILDFLAG(USE_ASAN_UNOWNED_PTR) &&
77 // !BUILDFLAG(USE_HOOKABLE_RAW_PTR) &&
78 // !BUILDFLAG(RAW_PTR_ZERO_ON_MOVE) &&
79 // !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
80
81 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
82 !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR) && \
83 !BUILDFLAG(RAW_PTR_ZERO_ON_CONSTRUCT) && \
84 !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
85 // |is_trivially_default_constructible| assertion helps retain implicit default
86 // constructors when raw_ptr is used as a union field. Example of an error
87 // if this assertion didn't hold:
88 //
89 // ../../base/trace_event/trace_arguments.h:249:16: error: call to
90 // implicitly-deleted default constructor of 'base::trace_event::TraceValue'
91 // TraceValue ret;
92 // ^
93 // ../../base/trace_event/trace_arguments.h:211:26: note: default
94 // constructor of 'TraceValue' is implicitly deleted because variant field
95 // 'as_pointer' has a non-trivial default constructor
96 // raw_ptr<const void> as_pointer;
97 static_assert(std::is_trivially_default_constructible_v<raw_ptr<void>>,
98 "raw_ptr should be trivially default constructible");
99 static_assert(std::is_trivially_default_constructible_v<raw_ptr<int>>,
100 "raw_ptr should be trivially default constructible");
101 static_assert(std::is_trivially_default_constructible_v<raw_ptr<std::string>>,
102 "raw_ptr should be trivially default constructible");
103 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
104 // !BUILDFLAG(USE_ASAN_UNOWNED_PTR) &&
105 // !BUILDFLAG(USE_HOOKABLE_RAW_PTR) &&
106 // !BUILDFLAG(RAW_PTR_ZERO_ON_CONSTRUCT) &&
107 // !BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
108
109 // Verify that raw_ptr is a literal type, and its entire interface is constexpr.
110 //
111 // Constexpr destructors were introduced in C++20. PartitionAlloc's minimum
112 // supported C++ version is C++17, so raw_ptr is not a literal type in C++17.
113 // Thus we only test for constexpr in C++20.
114 #if defined(__cpp_constexpr) && __cpp_constexpr >= 201907L
__anondf4b49410102() 115 static_assert([]() constexpr {
116 struct IntBase {};
117 struct Int : public IntBase {
118 int i = 0;
119 };
120
121 Int* i = new Int();
122 {
123 raw_ptr<Int> r(i); // raw_ptr(T*)
124 raw_ptr<Int> r2(r); // raw_ptr(const raw_ptr&)
125 raw_ptr<Int> r3(std::move(r)); // raw_ptr(raw_ptr&&)
126 r = r2; // operator=(const raw_ptr&)
127 r = std::move(r3); // operator=(raw_ptr&&)
128 raw_ptr<Int, base::RawPtrTraits::kMayDangle> r4(
129 r); // raw_ptr(const raw_ptr<DifferentTraits>&)
130 r4 = r2; // operator=(const raw_ptr<DifferentTraits>&)
131 // (There is no move-version of DifferentTraits.)
132 [[maybe_unused]] raw_ptr<IntBase> r5(
133 r2); // raw_ptr(const raw_ptr<Convertible>&)
134 [[maybe_unused]] raw_ptr<IntBase> r6(
135 std::move(r2)); // raw_ptr(raw_ptr<Convertible>&&)
136 r2 = r; // Reset after move...
137 r5 = r2; // operator=(const raw_ptr<Convertible>&)
138 r5 = std::move(r2); // operator=(raw_ptr<Convertible>&&)
139 [[maybe_unused]] raw_ptr<Int> r7(nullptr); // raw_ptr(nullptr)
140 r4 = nullptr; // operator=(nullptr)
141 r4 = i; // operator=(T*)
142 r5 = r4; // operator=(const Upcast&)
143 r5 = std::move(r4); // operator=(Upcast&&)
144 r.get()->i += 1; // get()
145 [[maybe_unused]] bool b = r; // operator bool
146 (*r).i += 1; // operator*()
147 r->i += 1; // operator->()
148 [[maybe_unused]] Int* i2 = r; // operator T*()
149 [[maybe_unused]] IntBase* i3 = r; // operator Convertible*()
150
151 [[maybe_unused]] Int** i4 = &r.AsEphemeralRawAddr();
152 [[maybe_unused]] Int*& i5 = r.AsEphemeralRawAddr();
153
154 Int* array = new Int[3]();
155 {
156 raw_ptr<Int, base::RawPtrTraits::kAllowPtrArithmetic> ra(array);
157 ++ra; // operator++()
158 --ra; // operator--()
159 ra++; // operator++(int)
160 ra--; // operator--(int)
161 ra += 1u; // operator+=()
162 ra -= 1u; // operator-=()
163 }
164 delete[] array;
165 }
166 delete i;
167 return true;
168 }());
169 #endif
170
171 struct StructWithoutTypeBasedTraits {};
172 struct BaseWithTypeBasedTraits {};
173 struct DerivedWithTypeBasedTraits : BaseWithTypeBasedTraits {};
174
175 namespace base::raw_ptr_traits {
176 // `BaseWithTypeBasedTraits` and any derived classes have
177 // `RawPtrTraits::kDummyForTest`.
178 template <typename T>
179 constexpr auto kTypeTraits<
180 T,
181 std::enable_if_t<std::is_base_of_v<BaseWithTypeBasedTraits, T>>> =
182 RawPtrTraits::kDummyForTest;
183 } // namespace base::raw_ptr_traits
184
185 // `raw_ptr<T>` should have traits based on specialization of `kTypeTraits<T>`.
186 static_assert(!ContainsFlags(raw_ptr<StructWithoutTypeBasedTraits>::Traits,
187 base::RawPtrTraits::kDummyForTest));
188 static_assert(ContainsFlags(raw_ptr<BaseWithTypeBasedTraits>::Traits,
189 base::RawPtrTraits::kDummyForTest));
190 static_assert(ContainsFlags(raw_ptr<DerivedWithTypeBasedTraits>::Traits,
191 base::RawPtrTraits::kDummyForTest));
192
193 // Don't use base::internal for testing raw_ptr API, to test if code outside
194 // this namespace calls the correct functions from this namespace.
195 namespace {
196
197 // Shorter name for expected test impl.
198 using RawPtrCountingImpl = base::test::RawPtrCountingImplForTest;
199
200 template <typename T>
201 using CountingRawPtr = raw_ptr<T,
202 base::RawPtrTraits::kUseCountingImplForTest |
203 base::RawPtrTraits::kAllowPtrArithmetic>;
204
205 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
206 static_assert(std::is_same_v<CountingRawPtr<int>::Impl, RawPtrCountingImpl>);
207
208 template <typename T>
209 using CountingRawPtrMayDangle =
210 raw_ptr<T,
211 base::RawPtrTraits::kMayDangle |
212 base::RawPtrTraits::kUseCountingImplForTest |
213 base::RawPtrTraits::kAllowPtrArithmetic>;
214
215 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
216 static_assert(
217 std::is_same_v<CountingRawPtrMayDangle<int>::Impl, RawPtrCountingImpl>);
218
219 template <typename T>
220 using CountingRawPtrUninitialized =
221 raw_ptr<T,
222 base::RawPtrTraits::kUseCountingImplForTest |
223 base::RawPtrTraits::kAllowUninitialized>;
224
225 // Ensure that the `kUseCountingImplForTest` flag selects the test impl.
226 static_assert(
227 std::is_same_v<CountingRawPtrUninitialized<int>::Impl, RawPtrCountingImpl>);
228
229 struct MyStruct {
230 int x;
231 };
232
233 struct Base1 {
Base1__anondf4b49410211::Base1234 explicit Base1(int b1) : b1(b1) {}
235 int b1;
236 };
237
238 struct Base2 {
Base2__anondf4b49410211::Base2239 explicit Base2(int b2) : b2(b2) {}
240 int b2;
241 };
242
243 struct Derived : Base1, Base2 {
Derived__anondf4b49410211::Derived244 Derived(int b1, int b2, int d) : Base1(b1), Base2(b2), d(d) {}
245 int d;
246 };
247
248 class RawPtrTest : public Test {
249 protected:
SetUp()250 void SetUp() override {
251 RawPtrCountingImpl::ClearCounters();
252 }
253 };
254
255 // Use this instead of std::ignore, to prevent the instruction from getting
256 // optimized out by the compiler.
257 volatile int g_volatile_int_to_ignore;
258
TEST_F(RawPtrTest,NullStarDereference)259 TEST_F(RawPtrTest, NullStarDereference) {
260 raw_ptr<int> ptr = nullptr;
261 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
262 }
263
TEST_F(RawPtrTest,NullArrowDereference)264 TEST_F(RawPtrTest, NullArrowDereference) {
265 raw_ptr<MyStruct> ptr = nullptr;
266 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = ptr->x, "");
267 }
268
TEST_F(RawPtrTest,NullExtractNoDereference)269 TEST_F(RawPtrTest, NullExtractNoDereference) {
270 CountingRawPtr<int> ptr = nullptr;
271 // No dereference hence shouldn't crash.
272 int* raw = ptr;
273 std::ignore = raw;
274 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
275 .get_for_extraction_cnt = 1,
276 .get_for_comparison_cnt = 0}),
277 CountersMatch());
278 }
279
TEST_F(RawPtrTest,InvalidExtractNoDereference)280 TEST_F(RawPtrTest, InvalidExtractNoDereference) {
281 // Some code uses invalid pointer values as indicators, so those values must
282 // be accepted by raw_ptr and passed through unchanged during extraction.
283 int* inv_ptr = reinterpret_cast<int*>(~static_cast<uintptr_t>(0));
284 CountingRawPtr<int> ptr = inv_ptr;
285 int* raw = ptr;
286 EXPECT_EQ(raw, inv_ptr);
287 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
288 .get_for_extraction_cnt = 1,
289 .get_for_comparison_cnt = 0}),
290 CountersMatch());
291 }
292
TEST_F(RawPtrTest,NullCmpExplicit)293 TEST_F(RawPtrTest, NullCmpExplicit) {
294 CountingRawPtr<int> ptr = nullptr;
295 EXPECT_TRUE(ptr == nullptr);
296 EXPECT_TRUE(nullptr == ptr);
297 EXPECT_FALSE(ptr != nullptr);
298 EXPECT_FALSE(nullptr != ptr);
299 // No need to unwrap pointer, just compare against 0.
300 EXPECT_THAT((CountingRawPtrExpectations{
301 .get_for_dereference_cnt = 0,
302 .get_for_extraction_cnt = 0,
303 .get_for_comparison_cnt = 0,
304 }),
305 CountersMatch());
306 }
307
TEST_F(RawPtrTest,NullCmpBool)308 TEST_F(RawPtrTest, NullCmpBool) {
309 CountingRawPtr<int> ptr = nullptr;
310 EXPECT_FALSE(ptr);
311 EXPECT_TRUE(!ptr);
312 // No need to unwrap pointer, just compare against 0.
313 EXPECT_THAT((CountingRawPtrExpectations{
314 .get_for_dereference_cnt = 0,
315 .get_for_extraction_cnt = 0,
316 .get_for_comparison_cnt = 0,
317 }),
318 CountersMatch());
319 }
320
FuncThatAcceptsBool(bool b)321 void FuncThatAcceptsBool(bool b) {}
322
IsValidNoCast(CountingRawPtr<int> ptr)323 bool IsValidNoCast(CountingRawPtr<int> ptr) {
324 return !!ptr; // !! to avoid implicit cast
325 }
IsValidNoCast2(CountingRawPtr<int> ptr)326 bool IsValidNoCast2(CountingRawPtr<int> ptr) {
327 return ptr && true;
328 }
329
TEST_F(RawPtrTest,BoolOpNotCast)330 TEST_F(RawPtrTest, BoolOpNotCast) {
331 CountingRawPtr<int> ptr = nullptr;
332 volatile bool is_valid = !!ptr; // !! to avoid implicit cast
333 is_valid = ptr || is_valid; // volatile, so won't be optimized
334 if (ptr) {
335 is_valid = true;
336 }
337 [[maybe_unused]] bool is_not_valid = !ptr;
338 if (!ptr) {
339 is_not_valid = true;
340 }
341 std::ignore = IsValidNoCast(ptr);
342 std::ignore = IsValidNoCast2(ptr);
343 FuncThatAcceptsBool(!ptr);
344 // No need to unwrap pointer, just compare against 0.
345 EXPECT_THAT((CountingRawPtrExpectations{
346 .get_for_dereference_cnt = 0,
347 .get_for_extraction_cnt = 0,
348 .get_for_comparison_cnt = 0,
349 }),
350 CountersMatch());
351 }
352
IsValidWithCast(CountingRawPtr<int> ptr)353 bool IsValidWithCast(CountingRawPtr<int> ptr) {
354 return ptr;
355 }
356
357 // This test is mostly for documentation purposes. It demonstrates cases where
358 // |operator T*| is called first and then the pointer is converted to bool,
359 // as opposed to calling |operator bool| directly. The former may be more
360 // costly, so the caller has to be careful not to trigger this path.
TEST_F(RawPtrTest,CastNotBoolOp)361 TEST_F(RawPtrTest, CastNotBoolOp) {
362 CountingRawPtr<int> ptr = nullptr;
363 [[maybe_unused]] bool is_valid = ptr;
364 is_valid = IsValidWithCast(ptr);
365 FuncThatAcceptsBool(ptr);
366 EXPECT_THAT((CountingRawPtrExpectations{
367 .get_for_dereference_cnt = 0,
368 .get_for_extraction_cnt = 3,
369 .get_for_comparison_cnt = 0,
370 }),
371 CountersMatch());
372 }
373
TEST_F(RawPtrTest,StarDereference)374 TEST_F(RawPtrTest, StarDereference) {
375 int foo = 42;
376 CountingRawPtr<int> ptr = &foo;
377 EXPECT_EQ(*ptr, 42);
378 EXPECT_THAT((CountingRawPtrExpectations{
379 .get_for_dereference_cnt = 1,
380 .get_for_extraction_cnt = 0,
381 .get_for_comparison_cnt = 0,
382 }),
383 CountersMatch());
384 }
385
TEST_F(RawPtrTest,ArrowDereference)386 TEST_F(RawPtrTest, ArrowDereference) {
387 MyStruct foo = {42};
388 CountingRawPtr<MyStruct> ptr = &foo;
389 EXPECT_EQ(ptr->x, 42);
390 EXPECT_THAT((CountingRawPtrExpectations{
391 .get_for_dereference_cnt = 1,
392 .get_for_extraction_cnt = 0,
393 .get_for_comparison_cnt = 0,
394 }),
395 CountersMatch());
396 }
397
TEST_F(RawPtrTest,Delete)398 TEST_F(RawPtrTest, Delete) {
399 CountingRawPtr<int> ptr = new int(42);
400 delete ptr.ExtractAsDangling();
401 // The pointer is first internally converted to MayDangle kind, then extracted
402 // using implicit cast before passing to |delete|.
403 EXPECT_THAT((CountingRawPtrExpectations{
404 .get_for_dereference_cnt = 0,
405 .get_for_extraction_cnt = 1,
406 .get_for_comparison_cnt = 0,
407 .wrap_raw_ptr_for_dup_cnt = 1,
408 .get_for_duplication_cnt = 1,
409 }),
410 CountersMatch());
411 }
412
TEST_F(RawPtrTest,ClearAndDelete)413 TEST_F(RawPtrTest, ClearAndDelete) {
414 CountingRawPtr<int> ptr(new int);
415 ptr.ClearAndDelete();
416
417 // TODO(crbug.com/1346513): clang-format has a difficult time making
418 // sense of preprocessor arms mixed with designated initializers.
419 //
420 // clang-format off
421 EXPECT_THAT((CountingRawPtrExpectations{
422 .wrap_raw_ptr_cnt = 1,
423 .release_wrapped_ptr_cnt = 1,
424 .get_for_dereference_cnt = 0,
425 .get_for_extraction_cnt = 1,
426 .wrapped_ptr_swap_cnt = 0,
427 }),
428 CountersMatch());
429 // clang-format on
430 EXPECT_EQ(ptr.get(), nullptr);
431 }
432
TEST_F(RawPtrTest,ClearAndDeleteArray)433 TEST_F(RawPtrTest, ClearAndDeleteArray) {
434 CountingRawPtr<int> ptr(new int[8]);
435 ptr.ClearAndDeleteArray();
436
437 // TODO(crbug.com/1346513): clang-format has a difficult time making
438 // sense of preprocessor arms mixed with designated initializers.
439 //
440 // clang-format off
441 EXPECT_THAT((CountingRawPtrExpectations{
442 .wrap_raw_ptr_cnt = 1,
443 .release_wrapped_ptr_cnt = 1,
444 .get_for_dereference_cnt = 0,
445 .get_for_extraction_cnt = 1,
446 .wrapped_ptr_swap_cnt = 0,
447 }),
448 CountersMatch());
449 // clang-format on
450 EXPECT_EQ(ptr.get(), nullptr);
451 }
452
TEST_F(RawPtrTest,ExtractAsDangling)453 TEST_F(RawPtrTest, ExtractAsDangling) {
454 CountingRawPtr<int> ptr(new int);
455
456 EXPECT_THAT((CountingRawPtrExpectations{
457 .wrap_raw_ptr_cnt = 1,
458 .release_wrapped_ptr_cnt = 0,
459 .get_for_dereference_cnt = 0,
460 .wrapped_ptr_swap_cnt = 0,
461 .wrap_raw_ptr_for_dup_cnt = 0,
462 .get_for_duplication_cnt = 0,
463 }),
464 CountersMatch());
465
466 EXPECT_TRUE(ptr.get());
467
468 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
469
470 EXPECT_THAT((CountingRawPtrExpectations{
471 .wrap_raw_ptr_cnt = 1,
472 .release_wrapped_ptr_cnt = 1,
473 .get_for_dereference_cnt = 0,
474 .wrapped_ptr_swap_cnt = 0,
475 .wrap_raw_ptr_for_dup_cnt = 1,
476 .get_for_duplication_cnt = 1,
477 }),
478 CountersMatch());
479
480 EXPECT_FALSE(ptr.get());
481 EXPECT_TRUE(dangling.get());
482
483 dangling.ClearAndDelete();
484 }
485
TEST_F(RawPtrTest,ExtractAsDanglingFromDangling)486 TEST_F(RawPtrTest, ExtractAsDanglingFromDangling) {
487 CountingRawPtrMayDangle<int> ptr(new int);
488
489 EXPECT_THAT((CountingRawPtrExpectations{
490 .wrap_raw_ptr_cnt = 1,
491 .release_wrapped_ptr_cnt = 0,
492 .get_for_dereference_cnt = 0,
493 .wrapped_ptr_swap_cnt = 0,
494 .wrap_raw_ptr_for_dup_cnt = 0,
495 .get_for_duplication_cnt = 0,
496 }),
497 CountersMatch());
498
499 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
500
501 // wrap_raw_ptr_cnt remains `1` because, as `ptr` is already a dangling
502 // pointer, we are only moving `ptr` to `dangling` here to avoid extra cost.
503 EXPECT_THAT((CountingRawPtrExpectations{
504 .wrap_raw_ptr_cnt = 1,
505 .release_wrapped_ptr_cnt = 1,
506 .get_for_dereference_cnt = 0,
507 .wrapped_ptr_swap_cnt = 0,
508 .wrap_raw_ptr_for_dup_cnt = 0,
509 .get_for_duplication_cnt = 0,
510 }),
511 CountersMatch());
512
513 dangling.ClearAndDelete();
514 }
515
TEST_F(RawPtrTest,ConstVolatileVoidPtr)516 TEST_F(RawPtrTest, ConstVolatileVoidPtr) {
517 int32_t foo[] = {1234567890};
518 CountingRawPtr<const volatile void> ptr = foo;
519 EXPECT_EQ(*static_cast<const volatile int32_t*>(ptr), 1234567890);
520 // Because we're using a cast, the extraction API kicks in, which doesn't
521 // know if the extracted pointer will be dereferenced or not.
522 EXPECT_THAT((CountingRawPtrExpectations{
523 .get_for_dereference_cnt = 0,
524 .get_for_extraction_cnt = 1,
525 .get_for_comparison_cnt = 0,
526 }),
527 CountersMatch());
528 }
529
TEST_F(RawPtrTest,VoidPtr)530 TEST_F(RawPtrTest, VoidPtr) {
531 int32_t foo[] = {1234567890};
532 CountingRawPtr<void> ptr = foo;
533 EXPECT_EQ(*static_cast<int32_t*>(ptr), 1234567890);
534 // Because we're using a cast, the extraction API kicks in, which doesn't
535 // know if the extracted pointer will be dereferenced or not.
536 EXPECT_THAT((CountingRawPtrExpectations{
537 .get_for_dereference_cnt = 0,
538 .get_for_extraction_cnt = 1,
539 .get_for_comparison_cnt = 0,
540 }),
541 CountersMatch());
542 }
543
TEST_F(RawPtrTest,OperatorEQ)544 TEST_F(RawPtrTest, OperatorEQ) {
545 int foo;
546 CountingRawPtr<int> ptr1 = nullptr;
547 EXPECT_TRUE(ptr1 == ptr1);
548
549 CountingRawPtr<int> ptr2 = nullptr;
550 EXPECT_TRUE(ptr1 == ptr2);
551
552 CountingRawPtr<int> ptr3 = &foo;
553 EXPECT_TRUE(&foo == ptr3);
554 EXPECT_TRUE(ptr3 == &foo);
555 EXPECT_FALSE(ptr1 == ptr3);
556
557 ptr1 = &foo;
558 EXPECT_TRUE(ptr1 == ptr3);
559 EXPECT_TRUE(ptr3 == ptr1);
560
561 EXPECT_THAT((CountingRawPtrExpectations{
562 .get_for_dereference_cnt = 0,
563 .get_for_extraction_cnt = 0,
564 .get_for_comparison_cnt = 12,
565 }),
566 CountersMatch());
567 }
568
TEST_F(RawPtrTest,OperatorNE)569 TEST_F(RawPtrTest, OperatorNE) {
570 int foo;
571 CountingRawPtr<int> ptr1 = nullptr;
572 EXPECT_FALSE(ptr1 != ptr1);
573
574 CountingRawPtr<int> ptr2 = nullptr;
575 EXPECT_FALSE(ptr1 != ptr2);
576
577 CountingRawPtr<int> ptr3 = &foo;
578 EXPECT_FALSE(&foo != ptr3);
579 EXPECT_FALSE(ptr3 != &foo);
580 EXPECT_TRUE(ptr1 != ptr3);
581
582 ptr1 = &foo;
583 EXPECT_FALSE(ptr1 != ptr3);
584 EXPECT_FALSE(ptr3 != ptr1);
585
586 EXPECT_THAT((CountingRawPtrExpectations{
587 .get_for_dereference_cnt = 0,
588 .get_for_extraction_cnt = 0,
589 .get_for_comparison_cnt = 12,
590 }),
591 CountersMatch());
592 }
593
TEST_F(RawPtrTest,OperatorEQCast)594 TEST_F(RawPtrTest, OperatorEQCast) {
595 int foo = 42;
596 const int* raw_int_ptr = &foo;
597 volatile void* raw_void_ptr = &foo;
598 CountingRawPtr<volatile int> checked_int_ptr = &foo;
599 CountingRawPtr<const void> checked_void_ptr = &foo;
600 EXPECT_TRUE(checked_int_ptr == checked_int_ptr);
601 EXPECT_TRUE(checked_int_ptr == raw_int_ptr);
602 EXPECT_TRUE(raw_int_ptr == checked_int_ptr);
603 EXPECT_TRUE(checked_void_ptr == checked_void_ptr);
604 EXPECT_TRUE(checked_void_ptr == raw_void_ptr);
605 EXPECT_TRUE(raw_void_ptr == checked_void_ptr);
606 EXPECT_TRUE(checked_int_ptr == checked_void_ptr);
607 EXPECT_TRUE(checked_int_ptr == raw_void_ptr);
608 EXPECT_TRUE(raw_int_ptr == checked_void_ptr);
609 EXPECT_TRUE(checked_void_ptr == checked_int_ptr);
610 EXPECT_TRUE(checked_void_ptr == raw_int_ptr);
611 EXPECT_TRUE(raw_void_ptr == checked_int_ptr);
612 // Make sure that all cases are handled by operator== (faster) and none by the
613 // cast operator (slower).
614 EXPECT_THAT((CountingRawPtrExpectations{
615 .get_for_dereference_cnt = 0,
616 .get_for_extraction_cnt = 0,
617 .get_for_comparison_cnt = 16,
618 }),
619 CountersMatch());
620 }
621
TEST_F(RawPtrTest,OperatorEQCastHierarchy)622 TEST_F(RawPtrTest, OperatorEQCastHierarchy) {
623 Derived derived_val(42, 84, 1024);
624 Derived* raw_derived_ptr = &derived_val;
625 const Base1* raw_base1_ptr = &derived_val;
626 volatile Base2* raw_base2_ptr = &derived_val;
627 // Double check the basic understanding of pointers: Even though the numeric
628 // value (i.e. the address) isn't equal, the pointers are still equal. That's
629 // because from derived to base adjusts the address.
630 // raw_ptr must behave the same, which is checked below.
631 ASSERT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
632 reinterpret_cast<uintptr_t>(raw_derived_ptr));
633 ASSERT_TRUE(raw_base2_ptr == raw_derived_ptr);
634
635 CountingRawPtr<const volatile Derived> checked_derived_ptr = &derived_val;
636 CountingRawPtr<volatile Base1> checked_base1_ptr = &derived_val;
637 CountingRawPtr<const Base2> checked_base2_ptr = &derived_val;
638 EXPECT_TRUE(checked_derived_ptr == checked_derived_ptr);
639 EXPECT_TRUE(checked_derived_ptr == raw_derived_ptr);
640 EXPECT_TRUE(raw_derived_ptr == checked_derived_ptr);
641 EXPECT_TRUE(checked_derived_ptr == checked_base1_ptr);
642 EXPECT_TRUE(checked_derived_ptr == raw_base1_ptr);
643 EXPECT_TRUE(raw_derived_ptr == checked_base1_ptr);
644 EXPECT_TRUE(checked_base1_ptr == checked_derived_ptr);
645 EXPECT_TRUE(checked_base1_ptr == raw_derived_ptr);
646 EXPECT_TRUE(raw_base1_ptr == checked_derived_ptr);
647 // |base2_ptr| points to the second base class of |derived|, so will be
648 // located at an offset. While the stored raw uinptr_t values shouldn't match,
649 // ensure that the internal pointer manipulation correctly offsets when
650 // casting up and down the class hierarchy.
651 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
652 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
653 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
654 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
655 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
656 reinterpret_cast<uintptr_t>(raw_derived_ptr));
657 EXPECT_TRUE(checked_derived_ptr == checked_base2_ptr);
658 EXPECT_TRUE(checked_derived_ptr == raw_base2_ptr);
659 EXPECT_TRUE(raw_derived_ptr == checked_base2_ptr);
660 EXPECT_TRUE(checked_base2_ptr == checked_derived_ptr);
661 EXPECT_TRUE(checked_base2_ptr == raw_derived_ptr);
662 EXPECT_TRUE(raw_base2_ptr == checked_derived_ptr);
663 // Make sure that all cases are handled by operator== (faster) and none by the
664 // cast operator (slower).
665 // The 4 extractions come from .get() checks, that compare raw addresses.
666 EXPECT_THAT((CountingRawPtrExpectations{
667 .get_for_dereference_cnt = 0,
668 .get_for_extraction_cnt = 4,
669 .get_for_comparison_cnt = 20,
670 }),
671 CountersMatch());
672 }
673
TEST_F(RawPtrTest,OperatorNECast)674 TEST_F(RawPtrTest, OperatorNECast) {
675 int foo = 42;
676 volatile int* raw_int_ptr = &foo;
677 const void* raw_void_ptr = &foo;
678 CountingRawPtr<const int> checked_int_ptr = &foo;
679 CountingRawPtr<volatile void> checked_void_ptr = &foo;
680 EXPECT_FALSE(checked_int_ptr != checked_int_ptr);
681 EXPECT_FALSE(checked_int_ptr != raw_int_ptr);
682 EXPECT_FALSE(raw_int_ptr != checked_int_ptr);
683 EXPECT_FALSE(checked_void_ptr != checked_void_ptr);
684 EXPECT_FALSE(checked_void_ptr != raw_void_ptr);
685 EXPECT_FALSE(raw_void_ptr != checked_void_ptr);
686 EXPECT_FALSE(checked_int_ptr != checked_void_ptr);
687 EXPECT_FALSE(checked_int_ptr != raw_void_ptr);
688 EXPECT_FALSE(raw_int_ptr != checked_void_ptr);
689 EXPECT_FALSE(checked_void_ptr != checked_int_ptr);
690 EXPECT_FALSE(checked_void_ptr != raw_int_ptr);
691 EXPECT_FALSE(raw_void_ptr != checked_int_ptr);
692 // Make sure that all cases are handled by operator== (faster) and none by the
693 // cast operator (slower).
694 EXPECT_THAT((CountingRawPtrExpectations{
695 .get_for_dereference_cnt = 0,
696 .get_for_extraction_cnt = 0,
697 .get_for_comparison_cnt = 16,
698 }),
699 CountersMatch());
700 }
701
TEST_F(RawPtrTest,OperatorNECastHierarchy)702 TEST_F(RawPtrTest, OperatorNECastHierarchy) {
703 Derived derived_val(42, 84, 1024);
704 const Derived* raw_derived_ptr = &derived_val;
705 volatile Base1* raw_base1_ptr = &derived_val;
706 const Base2* raw_base2_ptr = &derived_val;
707 CountingRawPtr<volatile Derived> checked_derived_ptr = &derived_val;
708 CountingRawPtr<const Base1> checked_base1_ptr = &derived_val;
709 CountingRawPtr<const volatile Base2> checked_base2_ptr = &derived_val;
710 EXPECT_FALSE(checked_derived_ptr != checked_derived_ptr);
711 EXPECT_FALSE(checked_derived_ptr != raw_derived_ptr);
712 EXPECT_FALSE(raw_derived_ptr != checked_derived_ptr);
713 EXPECT_FALSE(checked_derived_ptr != checked_base1_ptr);
714 EXPECT_FALSE(checked_derived_ptr != raw_base1_ptr);
715 EXPECT_FALSE(raw_derived_ptr != checked_base1_ptr);
716 EXPECT_FALSE(checked_base1_ptr != checked_derived_ptr);
717 EXPECT_FALSE(checked_base1_ptr != raw_derived_ptr);
718 EXPECT_FALSE(raw_base1_ptr != checked_derived_ptr);
719 // |base2_ptr| points to the second base class of |derived|, so will be
720 // located at an offset. While the stored raw uinptr_t values shouldn't match,
721 // ensure that the internal pointer manipulation correctly offsets when
722 // casting up and down the class hierarchy.
723 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
724 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
725 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
726 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
727 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
728 reinterpret_cast<uintptr_t>(raw_derived_ptr));
729 EXPECT_FALSE(checked_derived_ptr != checked_base2_ptr);
730 EXPECT_FALSE(checked_derived_ptr != raw_base2_ptr);
731 EXPECT_FALSE(raw_derived_ptr != checked_base2_ptr);
732 EXPECT_FALSE(checked_base2_ptr != checked_derived_ptr);
733 EXPECT_FALSE(checked_base2_ptr != raw_derived_ptr);
734 EXPECT_FALSE(raw_base2_ptr != checked_derived_ptr);
735 // Make sure that all cases are handled by operator== (faster) and none by the
736 // cast operator (slower).
737 // The 4 extractions come from .get() checks, that compare raw addresses.
738 EXPECT_THAT((CountingRawPtrExpectations{
739 .get_for_dereference_cnt = 0,
740 .get_for_extraction_cnt = 4,
741 .get_for_comparison_cnt = 20,
742 }),
743 CountersMatch());
744 }
745
TEST_F(RawPtrTest,Cast)746 TEST_F(RawPtrTest, Cast) {
747 Derived derived_val(42, 84, 1024);
748 raw_ptr<Derived> checked_derived_ptr = &derived_val;
749 Base1* raw_base1_ptr = checked_derived_ptr;
750 EXPECT_EQ(raw_base1_ptr->b1, 42);
751 Base2* raw_base2_ptr = checked_derived_ptr;
752 EXPECT_EQ(raw_base2_ptr->b2, 84);
753
754 Derived* raw_derived_ptr = static_cast<Derived*>(raw_base1_ptr);
755 EXPECT_EQ(raw_derived_ptr->b1, 42);
756 EXPECT_EQ(raw_derived_ptr->b2, 84);
757 EXPECT_EQ(raw_derived_ptr->d, 1024);
758 raw_derived_ptr = static_cast<Derived*>(raw_base2_ptr);
759 EXPECT_EQ(raw_derived_ptr->b1, 42);
760 EXPECT_EQ(raw_derived_ptr->b2, 84);
761 EXPECT_EQ(raw_derived_ptr->d, 1024);
762
763 raw_ptr<Base1> checked_base1_ptr = raw_derived_ptr;
764 EXPECT_EQ(checked_base1_ptr->b1, 42);
765 raw_ptr<Base2> checked_base2_ptr = raw_derived_ptr;
766 EXPECT_EQ(checked_base2_ptr->b2, 84);
767
768 raw_ptr<Derived> checked_derived_ptr2 =
769 static_cast<Derived*>(checked_base1_ptr);
770 EXPECT_EQ(checked_derived_ptr2->b1, 42);
771 EXPECT_EQ(checked_derived_ptr2->b2, 84);
772 EXPECT_EQ(checked_derived_ptr2->d, 1024);
773 checked_derived_ptr2 = static_cast<Derived*>(checked_base2_ptr);
774 EXPECT_EQ(checked_derived_ptr2->b1, 42);
775 EXPECT_EQ(checked_derived_ptr2->b2, 84);
776 EXPECT_EQ(checked_derived_ptr2->d, 1024);
777
778 const Derived* raw_const_derived_ptr = checked_derived_ptr2;
779 EXPECT_EQ(raw_const_derived_ptr->b1, 42);
780 EXPECT_EQ(raw_const_derived_ptr->b2, 84);
781 EXPECT_EQ(raw_const_derived_ptr->d, 1024);
782
783 raw_ptr<const Derived> checked_const_derived_ptr = raw_const_derived_ptr;
784 EXPECT_EQ(checked_const_derived_ptr->b1, 42);
785 EXPECT_EQ(checked_const_derived_ptr->b2, 84);
786 EXPECT_EQ(checked_const_derived_ptr->d, 1024);
787
788 const Derived* raw_const_derived_ptr2 = checked_const_derived_ptr;
789 EXPECT_EQ(raw_const_derived_ptr2->b1, 42);
790 EXPECT_EQ(raw_const_derived_ptr2->b2, 84);
791 EXPECT_EQ(raw_const_derived_ptr2->d, 1024);
792
793 raw_ptr<const Derived> checked_const_derived_ptr2 = raw_derived_ptr;
794 EXPECT_EQ(checked_const_derived_ptr2->b1, 42);
795 EXPECT_EQ(checked_const_derived_ptr2->b2, 84);
796 EXPECT_EQ(checked_const_derived_ptr2->d, 1024);
797
798 raw_ptr<const Derived> checked_const_derived_ptr3 = checked_derived_ptr2;
799 EXPECT_EQ(checked_const_derived_ptr3->b1, 42);
800 EXPECT_EQ(checked_const_derived_ptr3->b2, 84);
801 EXPECT_EQ(checked_const_derived_ptr3->d, 1024);
802
803 volatile Derived* raw_volatile_derived_ptr = checked_derived_ptr2;
804 EXPECT_EQ(raw_volatile_derived_ptr->b1, 42);
805 EXPECT_EQ(raw_volatile_derived_ptr->b2, 84);
806 EXPECT_EQ(raw_volatile_derived_ptr->d, 1024);
807
808 raw_ptr<volatile Derived> checked_volatile_derived_ptr =
809 raw_volatile_derived_ptr;
810 EXPECT_EQ(checked_volatile_derived_ptr->b1, 42);
811 EXPECT_EQ(checked_volatile_derived_ptr->b2, 84);
812 EXPECT_EQ(checked_volatile_derived_ptr->d, 1024);
813
814 void* raw_void_ptr = checked_derived_ptr;
815 raw_ptr<void> checked_void_ptr = raw_derived_ptr;
816 raw_ptr<Derived> checked_derived_ptr3 = static_cast<Derived*>(raw_void_ptr);
817 raw_ptr<Derived> checked_derived_ptr4 =
818 static_cast<Derived*>(checked_void_ptr);
819 EXPECT_EQ(checked_derived_ptr3->b1, 42);
820 EXPECT_EQ(checked_derived_ptr3->b2, 84);
821 EXPECT_EQ(checked_derived_ptr3->d, 1024);
822 EXPECT_EQ(checked_derived_ptr4->b1, 42);
823 EXPECT_EQ(checked_derived_ptr4->b2, 84);
824 EXPECT_EQ(checked_derived_ptr4->d, 1024);
825 }
826
TEST_F(RawPtrTest,UpcastConvertible)827 TEST_F(RawPtrTest, UpcastConvertible) {
828 {
829 Derived derived_val(42, 84, 1024);
830 raw_ptr<Derived> checked_derived_ptr = &derived_val;
831
832 raw_ptr<Base1> checked_base1_ptr(checked_derived_ptr);
833 EXPECT_EQ(checked_base1_ptr->b1, 42);
834 raw_ptr<Base2> checked_base2_ptr(checked_derived_ptr);
835 EXPECT_EQ(checked_base2_ptr->b2, 84);
836
837 checked_base1_ptr = checked_derived_ptr;
838 EXPECT_EQ(checked_base1_ptr->b1, 42);
839 checked_base2_ptr = checked_derived_ptr;
840 EXPECT_EQ(checked_base2_ptr->b2, 84);
841
842 EXPECT_EQ(checked_base1_ptr, checked_derived_ptr);
843 EXPECT_EQ(checked_base2_ptr, checked_derived_ptr);
844 }
845
846 {
847 Derived derived_val(42, 84, 1024);
848 raw_ptr<Derived> checked_derived_ptr1 = &derived_val;
849 raw_ptr<Derived> checked_derived_ptr2 = &derived_val;
850 raw_ptr<Derived> checked_derived_ptr3 = &derived_val;
851 raw_ptr<Derived> checked_derived_ptr4 = &derived_val;
852
853 raw_ptr<Base1> checked_base1_ptr(std::move(checked_derived_ptr1));
854 EXPECT_EQ(checked_base1_ptr->b1, 42);
855 raw_ptr<Base2> checked_base2_ptr(std::move(checked_derived_ptr2));
856 EXPECT_EQ(checked_base2_ptr->b2, 84);
857
858 checked_base1_ptr = std::move(checked_derived_ptr3);
859 EXPECT_EQ(checked_base1_ptr->b1, 42);
860 checked_base2_ptr = std::move(checked_derived_ptr4);
861 EXPECT_EQ(checked_base2_ptr->b2, 84);
862 }
863 }
864
TEST_F(RawPtrTest,UpcastNotConvertible)865 TEST_F(RawPtrTest, UpcastNotConvertible) {
866 class Base {};
867 class Derived : private Base {};
868 class Unrelated {};
869 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Derived>, raw_ptr<Base>>));
870 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Unrelated>, raw_ptr<Base>>));
871 EXPECT_FALSE((std::is_convertible_v<raw_ptr<Unrelated>, raw_ptr<void>>));
872 EXPECT_FALSE((std::is_convertible_v<raw_ptr<void>, raw_ptr<Unrelated>>));
873 EXPECT_FALSE((std::is_convertible_v<raw_ptr<int64_t>, raw_ptr<int32_t>>));
874 EXPECT_FALSE((std::is_convertible_v<raw_ptr<int16_t>, raw_ptr<int32_t>>));
875 }
876
TEST_F(RawPtrTest,UpcastPerformance)877 TEST_F(RawPtrTest, UpcastPerformance) {
878 {
879 Derived derived_val(42, 84, 1024);
880 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
881 CountingRawPtr<Base1> checked_base1_ptr(checked_derived_ptr);
882 CountingRawPtr<Base2> checked_base2_ptr(checked_derived_ptr);
883 checked_base1_ptr = checked_derived_ptr;
884 checked_base2_ptr = checked_derived_ptr;
885 }
886
887 {
888 Derived derived_val(42, 84, 1024);
889 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
890 CountingRawPtr<Base1> checked_base1_ptr(std::move(checked_derived_ptr));
891 CountingRawPtr<Base2> checked_base2_ptr(std::move(checked_derived_ptr));
892 checked_base1_ptr = std::move(checked_derived_ptr);
893 checked_base2_ptr = std::move(checked_derived_ptr);
894 }
895
896 EXPECT_THAT((CountingRawPtrExpectations{
897 .get_for_dereference_cnt = 0,
898 .get_for_extraction_cnt = 0,
899 .get_for_comparison_cnt = 0,
900 }),
901 CountersMatch());
902 }
903
TEST_F(RawPtrTest,CustomSwap)904 TEST_F(RawPtrTest, CustomSwap) {
905 int foo1, foo2;
906 CountingRawPtr<int> ptr1(&foo1);
907 CountingRawPtr<int> ptr2(&foo2);
908 // Recommended use pattern.
909 using std::swap;
910 swap(ptr1, ptr2);
911 EXPECT_EQ(ptr1.get(), &foo2);
912 EXPECT_EQ(ptr2.get(), &foo1);
913 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 1);
914 }
915
TEST_F(RawPtrTest,StdSwap)916 TEST_F(RawPtrTest, StdSwap) {
917 int foo1, foo2;
918 CountingRawPtr<int> ptr1(&foo1);
919 CountingRawPtr<int> ptr2(&foo2);
920 std::swap(ptr1, ptr2);
921 EXPECT_EQ(ptr1.get(), &foo2);
922 EXPECT_EQ(ptr2.get(), &foo1);
923 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 0);
924 }
925
TEST_F(RawPtrTest,PostIncrementOperator)926 TEST_F(RawPtrTest, PostIncrementOperator) {
927 std::vector<int> foo({42, 43, 44, 45});
928 CountingRawPtr<int> ptr = &foo[0];
929 for (int i = 0; i < 4; ++i) {
930 ASSERT_EQ(*ptr++, 42 + i);
931 }
932 EXPECT_THAT((CountingRawPtrExpectations{
933 .get_for_dereference_cnt = 4,
934 .get_for_extraction_cnt = 0,
935 .get_for_comparison_cnt = 0,
936 }),
937 CountersMatch());
938 }
939
TEST_F(RawPtrTest,PostDecrementOperator)940 TEST_F(RawPtrTest, PostDecrementOperator) {
941 std::vector<int> foo({42, 43, 44, 45});
942 CountingRawPtr<int> ptr = &foo[3];
943 // Avoid decrementing out of the slot holding the vector's backing store.
944 for (int i = 3; i > 0; --i) {
945 ASSERT_EQ(*ptr--, 42 + i);
946 }
947 ASSERT_EQ(*ptr, 42);
948 EXPECT_THAT((CountingRawPtrExpectations{
949 .get_for_dereference_cnt = 4,
950 .get_for_extraction_cnt = 0,
951 .get_for_comparison_cnt = 0,
952 }),
953 CountersMatch());
954 }
955
TEST_F(RawPtrTest,PreIncrementOperator)956 TEST_F(RawPtrTest, PreIncrementOperator) {
957 std::vector<int> foo({42, 43, 44, 45});
958 CountingRawPtr<int> ptr = &foo[0];
959 for (int i = 0; i < 4; ++i, ++ptr) {
960 ASSERT_EQ(*ptr, 42 + i);
961 }
962 EXPECT_THAT((CountingRawPtrExpectations{
963 .get_for_dereference_cnt = 4,
964 .get_for_extraction_cnt = 0,
965 .get_for_comparison_cnt = 0,
966 }),
967 CountersMatch());
968 }
969
TEST_F(RawPtrTest,PreDecrementOperator)970 TEST_F(RawPtrTest, PreDecrementOperator) {
971 std::vector<int> foo({42, 43, 44, 45});
972 CountingRawPtr<int> ptr = &foo[3];
973 // Avoid decrementing out of the slot holding the vector's backing store.
974 for (int i = 3; i > 0; --i, --ptr) {
975 ASSERT_EQ(*ptr, 42 + i);
976 }
977 ASSERT_EQ(*ptr, 42);
978 EXPECT_THAT((CountingRawPtrExpectations{
979 .get_for_dereference_cnt = 4,
980 .get_for_extraction_cnt = 0,
981 .get_for_comparison_cnt = 0,
982 }),
983 CountersMatch());
984 }
985
TEST_F(RawPtrTest,PlusEqualOperator)986 TEST_F(RawPtrTest, PlusEqualOperator) {
987 std::vector<int> foo({42, 43, 44, 45});
988 CountingRawPtr<int> ptr = &foo[0];
989 for (int i = 0; i < 4; i += 2, ptr += 2) {
990 ASSERT_EQ(*ptr, 42 + i);
991 }
992 EXPECT_THAT((CountingRawPtrExpectations{
993 .get_for_dereference_cnt = 2,
994 .get_for_extraction_cnt = 0,
995 .get_for_comparison_cnt = 0,
996 }),
997 CountersMatch());
998 }
999
TEST_F(RawPtrTest,PlusEqualOperatorTypes)1000 TEST_F(RawPtrTest, PlusEqualOperatorTypes) {
1001 std::vector<int> foo({42, 43, 44, 45});
1002 CountingRawPtr<int> ptr = &foo[0];
1003 ASSERT_EQ(*ptr, 42);
1004 ptr += 2; // Positive literal.
1005 ASSERT_EQ(*ptr, 44);
1006 ptr -= 2; // Negative literal.
1007 ASSERT_EQ(*ptr, 42);
1008 ptr += ptrdiff_t{1}; // ptrdiff_t.
1009 ASSERT_EQ(*ptr, 43);
1010 ptr += size_t{2}; // size_t.
1011 ASSERT_EQ(*ptr, 45);
1012 }
1013
TEST_F(RawPtrTest,MinusEqualOperator)1014 TEST_F(RawPtrTest, MinusEqualOperator) {
1015 std::vector<int> foo({42, 43, 44, 45});
1016 CountingRawPtr<int> ptr = &foo[3];
1017 ASSERT_EQ(*ptr, 45);
1018 ptr -= 2;
1019 ASSERT_EQ(*ptr, 43);
1020 EXPECT_THAT((CountingRawPtrExpectations{
1021 .get_for_dereference_cnt = 2,
1022 .get_for_extraction_cnt = 0,
1023 .get_for_comparison_cnt = 0,
1024 }),
1025 CountersMatch());
1026 }
1027
TEST_F(RawPtrTest,MinusEqualOperatorTypes)1028 TEST_F(RawPtrTest, MinusEqualOperatorTypes) {
1029 int foo[] = {42, 43, 44, 45};
1030 CountingRawPtr<int> ptr = &foo[3];
1031 ASSERT_EQ(*ptr, 45);
1032 ptr -= 2; // Positive literal.
1033 ASSERT_EQ(*ptr, 43);
1034 ptr -= -2; // Negative literal.
1035 ASSERT_EQ(*ptr, 45);
1036 ptr -= ptrdiff_t{2}; // ptrdiff_t.
1037 ASSERT_EQ(*ptr, 43);
1038 ptr -= size_t{1}; // size_t.
1039 ASSERT_EQ(*ptr, 42);
1040 }
1041
TEST_F(RawPtrTest,PlusOperator)1042 TEST_F(RawPtrTest, PlusOperator) {
1043 int foo[] = {42, 43, 44, 45};
1044 CountingRawPtr<int> ptr = foo;
1045 for (int i = 0; i < 4; ++i) {
1046 ASSERT_EQ(*(ptr + i), 42 + i);
1047 }
1048 EXPECT_THAT((CountingRawPtrExpectations{
1049 .get_for_dereference_cnt = 4,
1050 .get_for_extraction_cnt = 0,
1051 .get_for_comparison_cnt = 0,
1052 }),
1053 CountersMatch());
1054 }
1055
TEST_F(RawPtrTest,MinusOperator)1056 TEST_F(RawPtrTest, MinusOperator) {
1057 int foo[] = {42, 43, 44, 45};
1058 CountingRawPtr<int> ptr = &foo[4];
1059 for (int i = 1; i <= 4; ++i) {
1060 ASSERT_EQ(*(ptr - i), 46 - i);
1061 }
1062 EXPECT_THAT((CountingRawPtrExpectations{
1063 .get_for_dereference_cnt = 4,
1064 .get_for_extraction_cnt = 0,
1065 .get_for_comparison_cnt = 0,
1066 }),
1067 CountersMatch());
1068 }
1069
TEST_F(RawPtrTest,MinusDeltaOperator)1070 TEST_F(RawPtrTest, MinusDeltaOperator) {
1071 int foo[] = {42, 43, 44, 45};
1072 CountingRawPtr<int> ptrs[] = {&foo[0], &foo[1], &foo[2], &foo[3], &foo[4]};
1073 for (int i = 0; i <= 4; ++i) {
1074 for (int j = 0; j <= 4; ++j) {
1075 ASSERT_EQ(ptrs[i] - ptrs[j], i - j);
1076 ASSERT_EQ(ptrs[i] - &foo[j], i - j);
1077 ASSERT_EQ(&foo[i] - ptrs[j], i - j);
1078 }
1079 }
1080 EXPECT_THAT((CountingRawPtrExpectations{
1081 .get_for_dereference_cnt = 0,
1082 .get_for_extraction_cnt = 0,
1083 .get_for_comparison_cnt = 0,
1084 }),
1085 CountersMatch());
1086 }
1087
TEST_F(RawPtrTest,AdvanceString)1088 TEST_F(RawPtrTest, AdvanceString) {
1089 const char kChars[] = "Hello";
1090 std::string str = kChars;
1091 CountingRawPtr<const char> ptr = str.c_str();
1092 for (size_t i = 0; i < str.size(); ++i, ++ptr) {
1093 ASSERT_EQ(*ptr, kChars[i]);
1094 }
1095 EXPECT_THAT((CountingRawPtrExpectations{
1096 .get_for_dereference_cnt = 5,
1097 .get_for_extraction_cnt = 0,
1098 .get_for_comparison_cnt = 0,
1099 }),
1100 CountersMatch());
1101 }
1102
TEST_F(RawPtrTest,AssignmentFromNullptr)1103 TEST_F(RawPtrTest, AssignmentFromNullptr) {
1104 CountingRawPtr<int> wrapped_ptr;
1105 wrapped_ptr = nullptr;
1106 EXPECT_THAT((CountingRawPtrExpectations{
1107 .wrap_raw_ptr_cnt = 0,
1108 .get_for_dereference_cnt = 0,
1109 .get_for_extraction_cnt = 0,
1110 .get_for_comparison_cnt = 0,
1111 }),
1112 CountersMatch());
1113 }
1114
FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr,int * expected_ptr)1115 void FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr, int* expected_ptr) {
1116 EXPECT_EQ(actual_ptr.get(), expected_ptr);
1117 EXPECT_EQ(*actual_ptr, *expected_ptr);
1118 }
1119
1120 // This test checks that raw_ptr<T> can be passed by value into function
1121 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ImplicitlyMovedTemporary)1122 TEST_F(RawPtrTest, FunctionParameters_ImplicitlyMovedTemporary) {
1123 int x = 123;
1124 FunctionWithRawPtrParameter(
1125 raw_ptr<int>(&x), // Temporary that will be moved into the function.
1126 &x);
1127 }
1128
1129 // This test checks that raw_ptr<T> can be passed by value into function
1130 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ExplicitlyMovedLValue)1131 TEST_F(RawPtrTest, FunctionParameters_ExplicitlyMovedLValue) {
1132 int x = 123;
1133 raw_ptr<int> ptr(&x);
1134 FunctionWithRawPtrParameter(std::move(ptr), &x);
1135 }
1136
1137 // This test checks that raw_ptr<T> can be passed by value into function
1138 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_Copy)1139 TEST_F(RawPtrTest, FunctionParameters_Copy) {
1140 int x = 123;
1141 raw_ptr<int> ptr(&x);
1142 FunctionWithRawPtrParameter(ptr, // `ptr` will be copied into the function.
1143 &x);
1144 }
1145
TEST_F(RawPtrTest,SetLookupUsesGetForComparison)1146 TEST_F(RawPtrTest, SetLookupUsesGetForComparison) {
1147 int x = 123;
1148 CountingRawPtr<int> ptr(&x);
1149 std::set<CountingRawPtr<int>> set;
1150
1151 RawPtrCountingImpl::ClearCounters();
1152 set.emplace(&x);
1153 EXPECT_THAT((CountingRawPtrExpectations{
1154 .wrap_raw_ptr_cnt = 1,
1155 // Nothing to compare to yet.
1156 .get_for_dereference_cnt = 0,
1157 .get_for_extraction_cnt = 0,
1158 .get_for_comparison_cnt = 0,
1159 .wrapped_ptr_less_cnt = 0,
1160 }),
1161 CountersMatch());
1162
1163 RawPtrCountingImpl::ClearCounters();
1164 set.emplace(ptr);
1165 EXPECT_THAT((CountingRawPtrExpectations{
1166 .wrap_raw_ptr_cnt = 0,
1167 .get_for_dereference_cnt = 0,
1168 .get_for_extraction_cnt = 0,
1169 // 2 items to compare to => 4 calls.
1170 .get_for_comparison_cnt = 4,
1171 // 1 element to compare to => 2 calls.
1172 .wrapped_ptr_less_cnt = 2,
1173 }),
1174 CountersMatch());
1175
1176 RawPtrCountingImpl::ClearCounters();
1177 set.count(&x);
1178 EXPECT_THAT((CountingRawPtrExpectations{
1179 .wrap_raw_ptr_cnt = 0,
1180 .get_for_dereference_cnt = 0,
1181 .get_for_extraction_cnt = 0,
1182 // 2 comparisons => 2 extractions. Less than before, because
1183 // this time a raw pointer is one side of the comparison.
1184 .get_for_comparison_cnt = 2,
1185 // 2 items to compare to => 4 calls.
1186 .wrapped_ptr_less_cnt = 2,
1187 }),
1188 CountersMatch());
1189
1190 RawPtrCountingImpl::ClearCounters();
1191 set.count(ptr);
1192 EXPECT_THAT((CountingRawPtrExpectations{
1193 .wrap_raw_ptr_cnt = 0,
1194 .get_for_dereference_cnt = 0,
1195 .get_for_extraction_cnt = 0,
1196 // 2 comparisons => 4 extractions.
1197 .get_for_comparison_cnt = 4,
1198 // 2 items to compare to => 4 calls.
1199 .wrapped_ptr_less_cnt = 2,
1200 }),
1201 CountersMatch());
1202 }
1203
TEST_F(RawPtrTest,ComparisonOperatorUsesGetForComparison)1204 TEST_F(RawPtrTest, ComparisonOperatorUsesGetForComparison) {
1205 int x = 123;
1206 CountingRawPtr<int> ptr(&x);
1207
1208 RawPtrCountingImpl::ClearCounters();
1209 EXPECT_FALSE(ptr < ptr);
1210 EXPECT_FALSE(ptr > ptr);
1211 EXPECT_TRUE(ptr <= ptr);
1212 EXPECT_TRUE(ptr >= ptr);
1213 EXPECT_THAT((CountingRawPtrExpectations{
1214 .wrap_raw_ptr_cnt = 0,
1215 .get_for_dereference_cnt = 0,
1216 .get_for_extraction_cnt = 0,
1217 .get_for_comparison_cnt = 8,
1218 // < is used directly, not std::less().
1219 .wrapped_ptr_less_cnt = 0,
1220 }),
1221 CountersMatch());
1222
1223 RawPtrCountingImpl::ClearCounters();
1224 EXPECT_FALSE(ptr < &x);
1225 EXPECT_FALSE(ptr > &x);
1226 EXPECT_TRUE(ptr <= &x);
1227 EXPECT_TRUE(ptr >= &x);
1228 EXPECT_THAT((CountingRawPtrExpectations{
1229 .wrap_raw_ptr_cnt = 0,
1230 .get_for_dereference_cnt = 0,
1231 .get_for_extraction_cnt = 0,
1232 .get_for_comparison_cnt = 4,
1233 .wrapped_ptr_less_cnt = 0,
1234 }),
1235 CountersMatch());
1236
1237 RawPtrCountingImpl::ClearCounters();
1238 EXPECT_FALSE(&x < ptr);
1239 EXPECT_FALSE(&x > ptr);
1240 EXPECT_TRUE(&x <= ptr);
1241 EXPECT_TRUE(&x >= ptr);
1242 EXPECT_THAT((CountingRawPtrExpectations{
1243 .wrap_raw_ptr_cnt = 0,
1244 .get_for_dereference_cnt = 0,
1245 .get_for_extraction_cnt = 0,
1246 .get_for_comparison_cnt = 4,
1247 .wrapped_ptr_less_cnt = 0,
1248 }),
1249 CountersMatch());
1250 }
1251
1252 // Two `raw_ptr`s with different Traits should still hit `GetForComparison()`
1253 // (as opposed to `GetForExtraction()`) in their comparison operators. We use
1254 // `CountingRawPtr` and `CountingRawPtrMayDangle` to contrast two different
1255 // Traits.
TEST_F(RawPtrTest,OperatorsUseGetForComparison)1256 TEST_F(RawPtrTest, OperatorsUseGetForComparison) {
1257 int x = 123;
1258 CountingRawPtr<int> ptr1 = &x;
1259 CountingRawPtrMayDangle<int> ptr2 = &x;
1260
1261 RawPtrCountingImpl::ClearCounters();
1262
1263 EXPECT_TRUE(ptr1 == ptr2);
1264 EXPECT_FALSE(ptr1 != ptr2);
1265 EXPECT_THAT((CountingRawPtrExpectations{
1266 .get_for_extraction_cnt = 0,
1267 .get_for_comparison_cnt = 4,
1268 }),
1269 CountersMatch());
1270
1271 EXPECT_FALSE(ptr1 < ptr2);
1272 EXPECT_FALSE(ptr1 > ptr2);
1273 EXPECT_TRUE(ptr1 <= ptr2);
1274 EXPECT_TRUE(ptr1 >= ptr2);
1275 EXPECT_THAT((CountingRawPtrExpectations{
1276 .get_for_extraction_cnt = 0,
1277 .get_for_comparison_cnt = 12,
1278 }),
1279 CountersMatch());
1280 }
1281
1282 // This test checks how the std library handles collections like
1283 // std::vector<raw_ptr<T>>.
1284 //
1285 // When this test is written, reallocating std::vector's storage (e.g.
1286 // when growing the vector) requires calling raw_ptr's destructor on the
1287 // old storage (after std::move-ing the data to the new storage). In
1288 // the future we hope that TRIVIAL_ABI (or [trivially_relocatable]]
1289 // proposed by P1144 [1]) will allow memcpy-ing the elements into the
1290 // new storage (without invoking destructors and move constructors
1291 // and/or move assignment operators). At that point, the assert in the
1292 // test should be modified to capture the new, better behavior.
1293 //
1294 // In the meantime, this test serves as a basic correctness test that
1295 // ensures that raw_ptr<T> stored in a std::vector passes basic smoke
1296 // tests.
1297 //
1298 // [1]
1299 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p1144r5.html#wording-attribute
TEST_F(RawPtrTest,TrivialRelocability)1300 TEST_F(RawPtrTest, TrivialRelocability) {
1301 std::vector<CountingRawPtr<int>> vector;
1302 int x = 123;
1303
1304 // See how many times raw_ptr's destructor is called when std::vector
1305 // needs to increase its capacity and reallocate the internal vector
1306 // storage (moving the raw_ptr elements).
1307 RawPtrCountingImpl::ClearCounters();
1308 size_t number_of_capacity_changes = 0;
1309 do {
1310 size_t previous_capacity = vector.capacity();
1311 while (vector.capacity() == previous_capacity) {
1312 vector.emplace_back(&x);
1313 }
1314 number_of_capacity_changes++;
1315 } while (number_of_capacity_changes < 10);
1316 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
1317 BUILDFLAG(USE_ASAN_UNOWNED_PTR) || BUILDFLAG(USE_HOOKABLE_RAW_PTR) || \
1318 BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1319 // TODO(lukasza): In the future (once C++ language and std library
1320 // support custom trivially relocatable objects) this #if branch can
1321 // be removed (keeping only the right long-term expectation from the
1322 // #else branch).
1323 EXPECT_NE(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1324 #else
1325 // This is the right long-term expectation.
1326 //
1327 // (This EXPECT_EQ assertion is slightly misleading when NoOpImpl is used,
1328 // because, unless zeroing is requested, it forces raw_ptr<> to use a default
1329 // destructor that doesn't go through RawPtrCountingImpl::ReleaseWrappedPtr,
1330 // so we can't really depend on `g_release_wrapped_ptr_cnt`. Nevertheless, the
1331 // spirit of the EXPECT_EQ is correct + the assertion should be true in the
1332 // long-term.)
1333 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1334 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) ||
1335 // BUILDFLAG(USE_ASAN_UNOWNED_PTR) ||
1336 // BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1337
1338 // Basic smoke test that raw_ptr elements in a vector work okay.
1339 for (const auto& elem : vector) {
1340 EXPECT_EQ(elem.get(), &x);
1341 EXPECT_EQ(*elem, x);
1342 }
1343
1344 // Verification that release_wrapped_ptr_cnt does capture how many times the
1345 // destructors are called (e.g. that it is not always zero).
1346 RawPtrCountingImpl::ClearCounters();
1347 size_t number_of_cleared_elements = vector.size();
1348 vector.clear();
1349 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
1350 BUILDFLAG(USE_ASAN_UNOWNED_PTR) || BUILDFLAG(USE_HOOKABLE_RAW_PTR) || \
1351 BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1352 EXPECT_EQ((int)number_of_cleared_elements,
1353 RawPtrCountingImpl::release_wrapped_ptr_cnt);
1354 #else
1355 // TODO(lukasza): NoOpImpl has a default destructor that, unless zeroing is
1356 // requested, doesn't go through RawPtrCountingImpl::ReleaseWrappedPtr. So we
1357 // can't really depend on `g_release_wrapped_ptr_cnt`. This #else branch
1358 // should be deleted once USE_BACKUP_REF_PTR is removed (e.g. once
1359 // BackupRefPtr ships to the Stable channel).
1360 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1361 std::ignore = number_of_cleared_elements;
1362 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) ||
1363 // BUILDFLAG(USE_ASAN_UNOWNED_PTR) ||
1364 // BUILDFLAG(RAW_PTR_ZERO_ON_DESTRUCT)
1365 }
1366
1367 struct BaseStruct {
BaseStruct__anondf4b49410211::BaseStruct1368 explicit BaseStruct(int a) : a(a) {}
1369 virtual ~BaseStruct() = default;
1370
1371 int a;
1372 };
1373
1374 struct DerivedType1 : public BaseStruct {
DerivedType1__anondf4b49410211::DerivedType11375 explicit DerivedType1(int a, int b) : BaseStruct(a), b(b) {}
1376 int b;
1377 };
1378
1379 struct DerivedType2 : public BaseStruct {
DerivedType2__anondf4b49410211::DerivedType21380 explicit DerivedType2(int a, int c) : BaseStruct(a), c(c) {}
1381 int c;
1382 };
1383
TEST_F(RawPtrTest,DerivedStructsComparison)1384 TEST_F(RawPtrTest, DerivedStructsComparison) {
1385 DerivedType1 derived_1(42, 84);
1386 raw_ptr<DerivedType1> checked_derived1_ptr = &derived_1;
1387 DerivedType2 derived_2(21, 10);
1388 raw_ptr<DerivedType2> checked_derived2_ptr = &derived_2;
1389
1390 // Make sure that comparing a |DerivedType2*| to a |DerivedType1*| casted
1391 // as a |BaseStruct*| doesn't cause CFI errors.
1392 EXPECT_NE(checked_derived1_ptr,
1393 static_cast<BaseStruct*>(checked_derived2_ptr.get()));
1394 EXPECT_NE(static_cast<BaseStruct*>(checked_derived1_ptr.get()),
1395 checked_derived2_ptr);
1396 }
1397
1398 class PmfTestBase {
1399 public:
MemFunc(char,double) const1400 int MemFunc(char, double) const { return 11; }
1401 };
1402
1403 class PmfTestDerived : public PmfTestBase {
1404 public:
1405 using PmfTestBase::MemFunc;
MemFunc(float,double)1406 int MemFunc(float, double) { return 22; }
1407 };
1408
TEST_F(RawPtrTest,PointerToMemberFunction)1409 TEST_F(RawPtrTest, PointerToMemberFunction) {
1410 PmfTestDerived object;
1411 int (PmfTestBase::*pmf_base_base)(char, double) const = &PmfTestBase::MemFunc;
1412 int (PmfTestDerived::*pmf_derived_base)(char, double) const =
1413 &PmfTestDerived::MemFunc;
1414 int (PmfTestDerived::*pmf_derived_derived)(float, double) =
1415 &PmfTestDerived::MemFunc;
1416
1417 // Test for `derived_ptr`
1418 CountingRawPtr<PmfTestDerived> derived_ptr = &object;
1419
1420 EXPECT_EQ((derived_ptr->*pmf_base_base)(0, 0), 11);
1421 EXPECT_EQ((derived_ptr->*pmf_derived_base)(0, 0), 11);
1422 EXPECT_EQ((derived_ptr->*pmf_derived_derived)(0, 0), 22);
1423
1424 // Test for `derived_ptr_const`
1425 const CountingRawPtr<PmfTestDerived> derived_ptr_const = &object;
1426
1427 EXPECT_EQ((derived_ptr_const->*pmf_base_base)(0, 0), 11);
1428 EXPECT_EQ((derived_ptr_const->*pmf_derived_base)(0, 0), 11);
1429 EXPECT_EQ((derived_ptr_const->*pmf_derived_derived)(0, 0), 22);
1430
1431 // Test for `const_derived_ptr`
1432 CountingRawPtr<const PmfTestDerived> const_derived_ptr = &object;
1433
1434 EXPECT_EQ((const_derived_ptr->*pmf_base_base)(0, 0), 11);
1435 EXPECT_EQ((const_derived_ptr->*pmf_derived_base)(0, 0), 11);
1436 // const_derived_ptr->*pmf_derived_derived is not a const member function,
1437 // so it's not possible to test it.
1438 }
1439
TEST_F(RawPtrTest,WorksWithOptional)1440 TEST_F(RawPtrTest, WorksWithOptional) {
1441 int x = 0;
1442 absl::optional<raw_ptr<int>> maybe_int;
1443 EXPECT_FALSE(maybe_int.has_value());
1444
1445 maybe_int = nullptr;
1446 ASSERT_TRUE(maybe_int.has_value());
1447 EXPECT_EQ(nullptr, maybe_int.value());
1448
1449 maybe_int = &x;
1450 ASSERT_TRUE(maybe_int.has_value());
1451 EXPECT_EQ(&x, maybe_int.value());
1452 }
1453
TEST_F(RawPtrTest,WorksWithVariant)1454 TEST_F(RawPtrTest, WorksWithVariant) {
1455 int x = 100;
1456 absl::variant<int, raw_ptr<int>> vary;
1457 ASSERT_EQ(0u, vary.index());
1458 EXPECT_EQ(0, absl::get<int>(vary));
1459
1460 vary = x;
1461 ASSERT_EQ(0u, vary.index());
1462 EXPECT_EQ(100, absl::get<int>(vary));
1463
1464 vary = nullptr;
1465 ASSERT_EQ(1u, vary.index());
1466 EXPECT_EQ(nullptr, absl::get<raw_ptr<int>>(vary));
1467
1468 vary = &x;
1469 ASSERT_EQ(1u, vary.index());
1470 EXPECT_EQ(&x, absl::get<raw_ptr<int>>(vary));
1471 }
1472
TEST_F(RawPtrTest,CrossKindConversion)1473 TEST_F(RawPtrTest, CrossKindConversion) {
1474 int x = 123;
1475 CountingRawPtr<int> ptr1 = &x;
1476
1477 RawPtrCountingImpl::ClearCounters();
1478
1479 CountingRawPtrMayDangle<int> ptr2(ptr1);
1480 CountingRawPtrMayDangle<int> ptr3(std::move(ptr1)); // Falls back to copy.
1481
1482 EXPECT_THAT((CountingRawPtrExpectations{.wrap_raw_ptr_cnt = 0,
1483 .get_for_dereference_cnt = 0,
1484 .get_for_extraction_cnt = 0,
1485 .wrap_raw_ptr_for_dup_cnt = 2,
1486 .get_for_duplication_cnt = 2}),
1487 CountersMatch());
1488 }
1489
TEST_F(RawPtrTest,CrossKindAssignment)1490 TEST_F(RawPtrTest, CrossKindAssignment) {
1491 int x = 123;
1492 CountingRawPtr<int> ptr1 = &x;
1493
1494 RawPtrCountingImpl::ClearCounters();
1495
1496 CountingRawPtrMayDangle<int> ptr2;
1497 CountingRawPtrMayDangle<int> ptr3;
1498 ptr2 = ptr1;
1499 ptr3 = std::move(ptr1); // Falls back to copy.
1500
1501 EXPECT_THAT((CountingRawPtrExpectations{.wrap_raw_ptr_cnt = 0,
1502 .get_for_dereference_cnt = 0,
1503 .get_for_extraction_cnt = 0,
1504 .wrap_raw_ptr_for_dup_cnt = 2,
1505 .get_for_duplication_cnt = 2}),
1506 CountersMatch());
1507 }
1508
1509 // Without the explicitly customized `raw_ptr::to_address()`,
1510 // `std::to_address()` will use the dereference operator. This is not
1511 // what we want; this test enforces extraction semantics for
1512 // `to_address()`.
TEST_F(RawPtrTest,ToAddressDoesNotDereference)1513 TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
1514 CountingRawPtr<int> ptr = nullptr;
1515 int* raw = std::to_address(ptr);
1516 std::ignore = raw;
1517 EXPECT_THAT((CountingRawPtrExpectations{.get_for_dereference_cnt = 0,
1518 .get_for_extraction_cnt = 1,
1519 .get_for_comparison_cnt = 0,
1520 .get_for_duplication_cnt = 0}),
1521 CountersMatch());
1522 }
1523
TEST_F(RawPtrTest,ToAddressGivesBackRawAddress)1524 TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
1525 int* raw = nullptr;
1526 raw_ptr<int> miracle = raw;
1527 EXPECT_EQ(std::to_address(raw), std::to_address(miracle));
1528 }
1529
InOutParamFuncWithPointer(int * in,int ** out)1530 void InOutParamFuncWithPointer(int* in, int** out) {
1531 *out = in;
1532 }
1533
TEST_F(RawPtrTest,EphemeralRawAddrPointerPointer)1534 TEST_F(RawPtrTest, EphemeralRawAddrPointerPointer) {
1535 int v1 = 123;
1536 int v2 = 456;
1537 raw_ptr<int> ptr = &v1;
1538 // Pointer pointer should point to a pointer other than one inside raw_ptr.
1539 EXPECT_NE(&ptr.AsEphemeralRawAddr(),
1540 reinterpret_cast<int**>(std::addressof(ptr)));
1541 // But inner pointer should point to the same address.
1542 EXPECT_EQ(*&ptr.AsEphemeralRawAddr(), &v1);
1543
1544 // Inner pointer can be rewritten via the pointer pointer.
1545 *&ptr.AsEphemeralRawAddr() = &v2;
1546 EXPECT_EQ(ptr.get(), &v2);
1547 InOutParamFuncWithPointer(&v1, &ptr.AsEphemeralRawAddr());
1548 EXPECT_EQ(ptr.get(), &v1);
1549 }
1550
InOutParamFuncWithReference(int * in,int * & out)1551 void InOutParamFuncWithReference(int* in, int*& out) {
1552 out = in;
1553 }
1554
TEST_F(RawPtrTest,EphemeralRawAddrPointerReference)1555 TEST_F(RawPtrTest, EphemeralRawAddrPointerReference) {
1556 int v1 = 123;
1557 int v2 = 456;
1558 raw_ptr<int> ptr = &v1;
1559 // Pointer reference should refer to a pointer other than one inside raw_ptr.
1560 EXPECT_NE(&static_cast<int*&>(ptr.AsEphemeralRawAddr()),
1561 reinterpret_cast<int**>(std::addressof(ptr)));
1562 // But inner pointer should point to the same address.
1563 EXPECT_EQ(static_cast<int*&>(ptr.AsEphemeralRawAddr()), &v1);
1564
1565 // Inner pointer can be rewritten via the pointer pointer.
1566 static_cast<int*&>(ptr.AsEphemeralRawAddr()) = &v2;
1567 EXPECT_EQ(ptr.get(), &v2);
1568 InOutParamFuncWithReference(&v1, ptr.AsEphemeralRawAddr());
1569 EXPECT_EQ(ptr.get(), &v1);
1570 }
1571
1572 #if defined(COMPILER_GCC) && !defined(__clang__)
1573 // In GCC this test will optimize the return value of the constructor, so
1574 // assert fails. Disable optimizations to verify uninitialized attribute works
1575 // as expected.
1576 #pragma GCC push_options
1577 #pragma GCC optimize("O0")
1578 #endif
TEST_F(RawPtrTest,AllowUninitialized)1579 TEST_F(RawPtrTest, AllowUninitialized) {
1580 constexpr uintptr_t kPattern = 0x12345678;
1581 uintptr_t storage = kPattern;
1582 // Placement new over stored pattern must not change it.
1583 new (&storage) CountingRawPtrUninitialized<int>;
1584 EXPECT_EQ(storage, kPattern);
1585 }
1586 #if defined(COMPILER_GCC) && !defined(__clang__)
1587 #pragma GCC pop_options
1588 #endif
1589
1590 } // namespace
1591
1592 namespace base::internal {
1593
1594 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
1595 !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
1596
HandleOOM(size_t unused_size)1597 void HandleOOM(size_t unused_size) {
1598 LOG(FATAL) << "Out of memory";
1599 }
1600
1601 class BackupRefPtrTest : public testing::Test {
1602 protected:
SetUp()1603 void SetUp() override {
1604 // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
1605 // new/delete once PartitionAlloc Everywhere is fully enabled.
1606 partition_alloc::PartitionAllocGlobalInit(HandleOOM);
1607 }
1608
1609 partition_alloc::PartitionAllocator allocator_ =
__anondf4b49410302() 1610 partition_alloc::PartitionAllocator([]() {
1611 partition_alloc::PartitionOptions opts;
1612 opts.backup_ref_ptr = partition_alloc::PartitionOptions::kEnabled;
1613 opts.memory_tagging = {
1614 .enabled = base::CPU::GetInstanceNoAllocation().has_mte()
1615 ? partition_alloc::PartitionOptions::kEnabled
1616 : partition_alloc::PartitionOptions::kDisabled};
1617 return opts;
1618 }());
1619 };
1620
TEST_F(BackupRefPtrTest,Basic)1621 TEST_F(BackupRefPtrTest, Basic) {
1622 base::CPU cpu;
1623
1624 int* raw_ptr1 =
1625 reinterpret_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
1626 // Use the actual raw_ptr implementation, not a test substitute, to
1627 // exercise real PartitionAlloc paths.
1628 raw_ptr<int, DisableDanglingPtrDetection> wrapped_ptr1 = raw_ptr1;
1629
1630 *raw_ptr1 = 42;
1631 EXPECT_EQ(*raw_ptr1, *wrapped_ptr1);
1632
1633 allocator_.root()->Free(raw_ptr1);
1634 #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1635 // In debug builds, the use-after-free should be caught immediately.
1636 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1637 #else // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1638 if (cpu.has_mte()) {
1639 // If the hardware supports MTE, the use-after-free should also be caught.
1640 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1641 } else {
1642 // The allocation should be poisoned since there's a raw_ptr alive.
1643 EXPECT_NE(*wrapped_ptr1, 42);
1644 }
1645
1646 // The allocator should not be able to reuse the slot at this point.
1647 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(int), "");
1648 EXPECT_NE(partition_alloc::UntagPtr(raw_ptr1),
1649 partition_alloc::UntagPtr(raw_ptr2));
1650 allocator_.root()->Free(raw_ptr2);
1651
1652 // When the last reference is released, the slot should become reusable.
1653 wrapped_ptr1 = nullptr;
1654 void* raw_ptr3 = allocator_.root()->Alloc(sizeof(int), "");
1655 EXPECT_EQ(partition_alloc::UntagPtr(raw_ptr1),
1656 partition_alloc::UntagPtr(raw_ptr3));
1657 allocator_.root()->Free(raw_ptr3);
1658 #endif // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1659 }
1660
TEST_F(BackupRefPtrTest,ZeroSized)1661 TEST_F(BackupRefPtrTest, ZeroSized) {
1662 std::vector<raw_ptr<void>> ptrs;
1663 // Use a reasonable number of elements to fill up the slot span.
1664 for (int i = 0; i < 128 * 1024; ++i) {
1665 // Constructing a raw_ptr instance from a zero-sized allocation should
1666 // not result in a crash.
1667 ptrs.emplace_back(allocator_.root()->Alloc(0));
1668 }
1669 }
1670
TEST_F(BackupRefPtrTest,EndPointer)1671 TEST_F(BackupRefPtrTest, EndPointer) {
1672 // This test requires a fresh partition with an empty free list.
1673 // Check multiple size buckets and levels of slot filling.
1674 for (int size = 0; size < 1024; size += sizeof(void*)) {
1675 // Creating a raw_ptr from an address right past the end of an allocation
1676 // should not result in a crash or corrupt the free list.
1677 char* raw_ptr1 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1678 raw_ptr<char, AllowPtrArithmetic> wrapped_ptr = raw_ptr1 + size;
1679 wrapped_ptr = nullptr;
1680 // We need to make two more allocations to turn the possible free list
1681 // corruption into an observable crash.
1682 char* raw_ptr2 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1683 char* raw_ptr3 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1684
1685 // Similarly for operator+=.
1686 char* raw_ptr4 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1687 wrapped_ptr = raw_ptr4;
1688 wrapped_ptr += size;
1689 wrapped_ptr = nullptr;
1690 char* raw_ptr5 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1691 char* raw_ptr6 = reinterpret_cast<char*>(allocator_.root()->Alloc(size));
1692
1693 allocator_.root()->Free(raw_ptr1);
1694 allocator_.root()->Free(raw_ptr2);
1695 allocator_.root()->Free(raw_ptr3);
1696 allocator_.root()->Free(raw_ptr4);
1697 allocator_.root()->Free(raw_ptr5);
1698 allocator_.root()->Free(raw_ptr6);
1699 }
1700 }
1701
TEST_F(BackupRefPtrTest,QuarantinedBytes)1702 TEST_F(BackupRefPtrTest, QuarantinedBytes) {
1703 uint64_t* raw_ptr1 = reinterpret_cast<uint64_t*>(
1704 allocator_.root()->Alloc(sizeof(uint64_t), ""));
1705 raw_ptr<uint64_t, DisableDanglingPtrDetection> wrapped_ptr1 = raw_ptr1;
1706 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1707 std::memory_order_relaxed),
1708 0U);
1709 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1710 std::memory_order_relaxed),
1711 0U);
1712
1713 // Memory should get quarantined.
1714 allocator_.root()->Free(raw_ptr1);
1715 EXPECT_GT(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1716 std::memory_order_relaxed),
1717 0U);
1718 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1719 std::memory_order_relaxed),
1720 1U);
1721
1722 // Non quarantined free should not effect total_size_of_brp_quarantined_bytes
1723 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(uint64_t), "");
1724 allocator_.root()->Free(raw_ptr2);
1725
1726 // Freeing quarantined memory should bring the size back down to zero.
1727 wrapped_ptr1 = nullptr;
1728 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1729 std::memory_order_relaxed),
1730 0U);
1731 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1732 std::memory_order_relaxed),
1733 0U);
1734 }
1735
RunBackupRefPtrImplAdvanceTest(partition_alloc::PartitionAllocator & allocator,size_t requested_size)1736 void RunBackupRefPtrImplAdvanceTest(
1737 partition_alloc::PartitionAllocator& allocator,
1738 size_t requested_size) {
1739 char* ptr = static_cast<char*>(allocator.root()->Alloc(requested_size));
1740 raw_ptr<char, AllowPtrArithmetic> protected_ptr = ptr;
1741 protected_ptr += 123;
1742 protected_ptr -= 123;
1743 protected_ptr = protected_ptr + 123;
1744 protected_ptr = protected_ptr - 123;
1745 protected_ptr += requested_size / 2;
1746 // end-of-allocation address should not cause an error immediately, but it may
1747 // result in the pointer being poisoned.
1748 protected_ptr = protected_ptr + (requested_size + 1) / 2;
1749 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1750 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr = ' ', "");
1751 protected_ptr -= 1; // This brings the pointer back within
1752 // bounds, which causes the poison to be removed.
1753 *protected_ptr = ' ';
1754 protected_ptr += 1; // Reposition pointer back past end of allocation.
1755 #endif
1756 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr + 1);
1757 EXPECT_CHECK_DEATH(protected_ptr += 1);
1758 EXPECT_CHECK_DEATH(++protected_ptr);
1759
1760 // Even though |protected_ptr| is already pointing to the end of the
1761 // allocation, assign it explicitly to make sure the underlying implementation
1762 // doesn't "switch" to the next slot.
1763 protected_ptr = ptr + requested_size;
1764 protected_ptr -= (requested_size + 1) / 2;
1765 protected_ptr = protected_ptr - requested_size / 2;
1766 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr - 1);
1767 EXPECT_CHECK_DEATH(protected_ptr -= 1);
1768 EXPECT_CHECK_DEATH(--protected_ptr);
1769
1770 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1771 // An array type that should be more than a third the size of the available
1772 // memory for the allocation such that incrementing a pointer to this type
1773 // twice causes it to point to a memory location that is too small to fit a
1774 // complete element of this type.
1775 typedef int OverThirdArray[200 / sizeof(int)];
1776 raw_ptr<OverThirdArray> protected_arr_ptr =
1777 reinterpret_cast<OverThirdArray*>(ptr);
1778
1779 protected_arr_ptr++;
1780 **protected_arr_ptr = 4;
1781 protected_arr_ptr++;
1782 EXPECT_DEATH_IF_SUPPORTED(** protected_arr_ptr = 4, "");
1783 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1784
1785 protected_ptr = nullptr;
1786 allocator.root()->Free(ptr);
1787 }
1788
TEST_F(BackupRefPtrTest,Advance)1789 TEST_F(BackupRefPtrTest, Advance) {
1790 // This requires some internal PartitionAlloc knowledge, but for the test to
1791 // work well the allocation + extras have to fill out the entire slot. That's
1792 // because PartitionAlloc doesn't know exact allocation size and bases the
1793 // guards on the slot size.
1794 //
1795 // A power of two is a safe choice for a slot size, then adjust it for extras.
1796 size_t slot_size = 512;
1797 size_t requested_size =
1798 allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
1799 // Verify that we're indeed filling up the slot.
1800 ASSERT_EQ(
1801 requested_size,
1802 allocator_.root()->AllocationCapacityFromRequestedSize(requested_size));
1803 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1804
1805 // We don't have the same worry for single-slot spans, as PartitionAlloc knows
1806 // exactly where the allocation ends.
1807 size_t raw_size = 300003;
1808 ASSERT_GT(raw_size, partition_alloc::internal::MaxRegularSlotSpanSize());
1809 ASSERT_LE(raw_size, partition_alloc::internal::kMaxBucketed);
1810 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(raw_size);
1811 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1812
1813 // Same for direct map.
1814 raw_size = 1001001;
1815 ASSERT_GT(raw_size, partition_alloc::internal::kMaxBucketed);
1816 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(raw_size);
1817 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1818 }
1819
TEST_F(BackupRefPtrTest,AdvanceAcrossPools)1820 TEST_F(BackupRefPtrTest, AdvanceAcrossPools) {
1821 char array1[1000];
1822 char array2[1000];
1823
1824 char* in_pool_ptr = static_cast<char*>(allocator_.root()->Alloc(123));
1825
1826 raw_ptr<char, AllowPtrArithmetic> protected_ptr = array1;
1827 // Nothing bad happens. Both pointers are outside of the BRP pool, so no
1828 // checks are triggered.
1829 protected_ptr += (array2 - array1);
1830 // A pointer is shifted from outside of the BRP pool into the BRP pool. This
1831 // should trigger death to avoid
1832 EXPECT_CHECK_DEATH(protected_ptr += (in_pool_ptr - array2));
1833
1834 protected_ptr = in_pool_ptr;
1835 // Same when a pointer is shifted from inside the BRP pool out of it.
1836 EXPECT_CHECK_DEATH(protected_ptr += (array1 - in_pool_ptr));
1837
1838 protected_ptr = nullptr;
1839 allocator_.root()->Free(in_pool_ptr);
1840 }
1841
TEST_F(BackupRefPtrTest,GetDeltaElems)1842 TEST_F(BackupRefPtrTest, GetDeltaElems) {
1843 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
1844 char* ptr1 = static_cast<char*>(allocator_.root()->Alloc(requested_size));
1845 char* ptr2 = static_cast<char*>(allocator_.root()->Alloc(requested_size));
1846 ASSERT_LT(ptr1, ptr2); // There should be a ref-count between slots.
1847 raw_ptr<char> protected_ptr1 = ptr1;
1848 raw_ptr<char> protected_ptr1_2 = ptr1 + 1;
1849 raw_ptr<char> protected_ptr1_3 = ptr1 + requested_size - 1;
1850 raw_ptr<char> protected_ptr1_4 = ptr1 + requested_size;
1851 raw_ptr<char> protected_ptr2 = ptr2;
1852 raw_ptr<char> protected_ptr2_2 = ptr2 + 1;
1853
1854 EXPECT_EQ(protected_ptr1_2 - protected_ptr1, 1);
1855 EXPECT_EQ(protected_ptr1 - protected_ptr1_2, -1);
1856 EXPECT_EQ(protected_ptr1_3 - protected_ptr1,
1857 checked_cast<ptrdiff_t>(requested_size) - 1);
1858 EXPECT_EQ(protected_ptr1 - protected_ptr1_3,
1859 -checked_cast<ptrdiff_t>(requested_size) + 1);
1860 EXPECT_EQ(protected_ptr1_4 - protected_ptr1,
1861 checked_cast<ptrdiff_t>(requested_size));
1862 EXPECT_EQ(protected_ptr1 - protected_ptr1_4,
1863 -checked_cast<ptrdiff_t>(requested_size));
1864 #if BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1865 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1);
1866 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2);
1867 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1_4);
1868 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2);
1869 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1);
1870 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2_2);
1871 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1_4);
1872 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2_2);
1873 #endif // BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1874 EXPECT_EQ(protected_ptr2_2 - protected_ptr2, 1);
1875 EXPECT_EQ(protected_ptr2 - protected_ptr2_2, -1);
1876
1877 protected_ptr1 = nullptr;
1878 protected_ptr1_2 = nullptr;
1879 protected_ptr1_3 = nullptr;
1880 protected_ptr1_4 = nullptr;
1881 protected_ptr2 = nullptr;
1882 protected_ptr2_2 = nullptr;
1883
1884 allocator_.root()->Free(ptr1);
1885 allocator_.root()->Free(ptr2);
1886 }
1887
IsQuarantineEmpty(partition_alloc::PartitionAllocator & allocator)1888 bool IsQuarantineEmpty(partition_alloc::PartitionAllocator& allocator) {
1889 return allocator.root()->total_size_of_brp_quarantined_bytes.load(
1890 std::memory_order_relaxed) == 0;
1891 }
1892
1893 struct BoundRawPtrTestHelper {
Createbase::internal::BoundRawPtrTestHelper1894 static BoundRawPtrTestHelper* Create(
1895 partition_alloc::PartitionAllocator& allocator) {
1896 return new (allocator.root()->Alloc(sizeof(BoundRawPtrTestHelper), ""))
1897 BoundRawPtrTestHelper(allocator);
1898 }
1899
BoundRawPtrTestHelperbase::internal::BoundRawPtrTestHelper1900 explicit BoundRawPtrTestHelper(partition_alloc::PartitionAllocator& allocator)
1901 : owning_allocator(allocator),
1902 once_callback(
1903 BindOnce(&BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1904 Unretained(this))),
1905 repeating_callback(BindRepeating(
1906 &BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1907 Unretained(this))) {}
1908
DeleteItselfAndCheckIfInQuarantinebase::internal::BoundRawPtrTestHelper1909 void DeleteItselfAndCheckIfInQuarantine() {
1910 auto& allocator = *owning_allocator;
1911 EXPECT_TRUE(IsQuarantineEmpty(allocator));
1912
1913 // Since we use a non-default partition, `delete` has to be simulated.
1914 this->~BoundRawPtrTestHelper();
1915 allocator.root()->Free(this);
1916
1917 EXPECT_FALSE(IsQuarantineEmpty(allocator));
1918 }
1919
1920 const raw_ref<partition_alloc::PartitionAllocator> owning_allocator;
1921 OnceClosure once_callback;
1922 RepeatingClosure repeating_callback;
1923 };
1924
1925 // Check that bound callback arguments remain protected by BRP for the
1926 // entire duration of a callback invocation.
TEST_F(BackupRefPtrTest,Bind)1927 TEST_F(BackupRefPtrTest, Bind) {
1928 // This test requires a separate partition; otherwise, unrelated allocations
1929 // might interfere with `IsQuarantineEmpty`.
1930 auto* object_for_once_callback1 = BoundRawPtrTestHelper::Create(allocator_);
1931 std::move(object_for_once_callback1->once_callback).Run();
1932 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1933
1934 auto* object_for_repeating_callback1 =
1935 BoundRawPtrTestHelper::Create(allocator_);
1936 std::move(object_for_repeating_callback1->repeating_callback).Run();
1937 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1938
1939 // `RepeatingCallback` has both lvalue and rvalue versions of `Run`.
1940 auto* object_for_repeating_callback2 =
1941 BoundRawPtrTestHelper::Create(allocator_);
1942 object_for_repeating_callback2->repeating_callback.Run();
1943 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1944 }
1945
1946 #if PA_CONFIG(REF_COUNT_CHECK_COOKIE)
TEST_F(BackupRefPtrTest,ReinterpretCast)1947 TEST_F(BackupRefPtrTest, ReinterpretCast) {
1948 void* ptr = allocator_.root()->Alloc(16);
1949 allocator_.root()->Free(ptr);
1950
1951 raw_ptr<void>* wrapped_ptr = reinterpret_cast<raw_ptr<void>*>(&ptr);
1952 // The reference count cookie check should detect that the allocation has
1953 // been already freed.
1954 BASE_EXPECT_DEATH(*wrapped_ptr = nullptr, "");
1955 }
1956 #endif
1957
1958 namespace {
1959
1960 // Install dangling raw_ptr handlers and restore them when going out of scope.
1961 class ScopedInstallDanglingRawPtrChecks {
1962 public:
ScopedInstallDanglingRawPtrChecks()1963 ScopedInstallDanglingRawPtrChecks() {
1964 enabled_feature_list_.InitWithFeaturesAndParameters(
1965 {{features::kPartitionAllocDanglingPtr, {{"mode", "crash"}}}},
1966 {/* disabled_features */});
1967 old_detected_fn_ = partition_alloc::GetDanglingRawPtrDetectedFn();
1968 old_dereferenced_fn_ = partition_alloc::GetDanglingRawPtrReleasedFn();
1969 allocator::InstallDanglingRawPtrChecks();
1970 }
~ScopedInstallDanglingRawPtrChecks()1971 ~ScopedInstallDanglingRawPtrChecks() {
1972 partition_alloc::SetDanglingRawPtrDetectedFn(old_detected_fn_);
1973 partition_alloc::SetDanglingRawPtrReleasedFn(old_dereferenced_fn_);
1974 }
1975
1976 private:
1977 test::ScopedFeatureList enabled_feature_list_;
1978 partition_alloc::DanglingRawPtrDetectedFn* old_detected_fn_;
1979 partition_alloc::DanglingRawPtrReleasedFn* old_dereferenced_fn_;
1980 };
1981
1982 } // namespace
1983
TEST_F(BackupRefPtrTest,RawPtrMayDangle)1984 TEST_F(BackupRefPtrTest, RawPtrMayDangle) {
1985 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1986
1987 void* ptr = allocator_.root()->Alloc(16);
1988 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr = ptr;
1989 allocator_.root()->Free(ptr); // No dangling raw_ptr reported.
1990 dangling_ptr = nullptr; // No dangling raw_ptr reported.
1991 }
1992
TEST_F(BackupRefPtrTest,RawPtrNotDangling)1993 TEST_F(BackupRefPtrTest, RawPtrNotDangling) {
1994 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1995
1996 void* ptr = allocator_.root()->Alloc(16);
1997 raw_ptr<void> dangling_ptr = ptr;
1998 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
1999 !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
2000 BASE_EXPECT_DEATH(
2001 {
2002 allocator_.root()->Free(ptr); // Dangling raw_ptr detected.
2003 dangling_ptr = nullptr; // Dangling raw_ptr released.
2004 },
2005 AllOf(HasSubstr("Detected dangling raw_ptr"),
2006 HasSubstr("The memory was freed at:"),
2007 HasSubstr("The dangling raw_ptr was released at:")));
2008 #else
2009 allocator_.root()->Free(ptr);
2010 dangling_ptr = nullptr;
2011 #endif
2012 }
2013
2014 // Check the comparator operators work, even across raw_ptr with different
2015 // dangling policies.
TEST_F(BackupRefPtrTest,DanglingPtrComparison)2016 TEST_F(BackupRefPtrTest, DanglingPtrComparison) {
2017 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2018
2019 void* ptr_1 = allocator_.root()->Alloc(16);
2020 void* ptr_2 = allocator_.root()->Alloc(16);
2021
2022 if (ptr_1 > ptr_2) {
2023 std::swap(ptr_1, ptr_2);
2024 }
2025
2026 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1 = ptr_1;
2027 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2 = ptr_2;
2028 raw_ptr<void> not_dangling_ptr_1 = ptr_1;
2029 raw_ptr<void> not_dangling_ptr_2 = ptr_2;
2030
2031 EXPECT_EQ(dangling_ptr_1, not_dangling_ptr_1);
2032 EXPECT_EQ(dangling_ptr_2, not_dangling_ptr_2);
2033 EXPECT_NE(dangling_ptr_1, not_dangling_ptr_2);
2034 EXPECT_NE(dangling_ptr_2, not_dangling_ptr_1);
2035 EXPECT_LT(dangling_ptr_1, not_dangling_ptr_2);
2036 EXPECT_GT(dangling_ptr_2, not_dangling_ptr_1);
2037 EXPECT_LT(not_dangling_ptr_1, dangling_ptr_2);
2038 EXPECT_GT(not_dangling_ptr_2, dangling_ptr_1);
2039
2040 not_dangling_ptr_1 = nullptr;
2041 not_dangling_ptr_2 = nullptr;
2042
2043 allocator_.root()->Free(ptr_1);
2044 allocator_.root()->Free(ptr_2);
2045 }
2046
2047 // Check the assignment operator works, even across raw_ptr with different
2048 // dangling policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrAssignment)2049 TEST_F(BackupRefPtrTest, DanglingPtrAssignment) {
2050 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2051
2052 void* ptr = allocator_.root()->Alloc(16);
2053
2054 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr;
2055 raw_ptr<void> not_dangling_ptr;
2056
2057 not_dangling_ptr = ptr;
2058 dangling_ptr = not_dangling_ptr;
2059 not_dangling_ptr = nullptr;
2060
2061 allocator_.root()->Free(ptr);
2062
2063 dangling_ptr = nullptr;
2064 }
2065
2066 // Check the copy constructor works, even across raw_ptr with different dangling
2067 // policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrCopyContructor)2068 TEST_F(BackupRefPtrTest, DanglingPtrCopyContructor) {
2069 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2070
2071 void* ptr = allocator_.root()->Alloc(16);
2072
2073 raw_ptr<void> not_dangling_ptr(ptr);
2074 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr(not_dangling_ptr);
2075
2076 not_dangling_ptr = nullptr;
2077 dangling_ptr = nullptr;
2078
2079 allocator_.root()->Free(ptr);
2080 }
2081
TEST_F(BackupRefPtrTest,RawPtrExtractAsDangling)2082 TEST_F(BackupRefPtrTest, RawPtrExtractAsDangling) {
2083 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2084
2085 raw_ptr<int> ptr =
2086 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2087 allocator_.root()->Free(
2088 ptr.ExtractAsDangling()); // No dangling raw_ptr reported.
2089 EXPECT_EQ(ptr, nullptr);
2090 }
2091
TEST_F(BackupRefPtrTest,RawPtrDeleteWithoutExtractAsDangling)2092 TEST_F(BackupRefPtrTest, RawPtrDeleteWithoutExtractAsDangling) {
2093 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2094
2095 raw_ptr<int> ptr =
2096 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2097 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
2098 !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
2099 BASE_EXPECT_DEATH(
2100 {
2101 allocator_.root()->Free(ptr.get()); // Dangling raw_ptr detected.
2102 ptr = nullptr; // Dangling raw_ptr released.
2103 },
2104 AllOf(HasSubstr("Detected dangling raw_ptr"),
2105 HasSubstr("The memory was freed at:"),
2106 HasSubstr("The dangling raw_ptr was released at:")));
2107 #else
2108 allocator_.root()->Free(ptr.get());
2109 ptr = nullptr;
2110 #endif // BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
2111 // !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
2112 }
2113
TEST_F(BackupRefPtrTest,SpatialAlgoCompat)2114 TEST_F(BackupRefPtrTest, SpatialAlgoCompat) {
2115 size_t slot_size = 512;
2116 size_t requested_size =
2117 allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
2118 // Verify that we're indeed filling up the slot.
2119 ASSERT_EQ(
2120 requested_size,
2121 allocator_.root()->AllocationCapacityFromRequestedSize(requested_size));
2122 size_t requested_elements = requested_size / sizeof(uint32_t);
2123
2124 uint32_t* ptr =
2125 reinterpret_cast<uint32_t*>(allocator_.root()->Alloc(requested_size));
2126 uint32_t* ptr_end = ptr + requested_elements;
2127
2128 CountingRawPtr<uint32_t> protected_ptr = ptr;
2129 CountingRawPtr<uint32_t> protected_ptr_end =
2130 protected_ptr + requested_elements;
2131
2132 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
2133 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr_end = 1, "");
2134 #endif
2135
2136 RawPtrCountingImpl::ClearCounters();
2137
2138 uint32_t gen_val = 1;
2139 std::generate(protected_ptr, protected_ptr_end, [&gen_val]() {
2140 gen_val ^= gen_val + 1;
2141 return gen_val;
2142 });
2143
2144 EXPECT_THAT((CountingRawPtrExpectations{
2145 .get_for_dereference_cnt = requested_elements,
2146 .get_for_extraction_cnt = 0,
2147 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2148 }),
2149 CountersMatch());
2150
2151 RawPtrCountingImpl::ClearCounters();
2152
2153 for (CountingRawPtr<uint32_t> protected_ptr_i = protected_ptr;
2154 protected_ptr_i < protected_ptr_end; protected_ptr_i++) {
2155 *protected_ptr_i ^= *protected_ptr_i + 1;
2156 }
2157
2158 EXPECT_THAT((CountingRawPtrExpectations{
2159 .get_for_dereference_cnt = requested_elements * 2,
2160 .get_for_extraction_cnt = 0,
2161 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2162 }),
2163 CountersMatch());
2164
2165 RawPtrCountingImpl::ClearCounters();
2166
2167 for (CountingRawPtr<uint32_t> protected_ptr_i = protected_ptr;
2168 protected_ptr_i < ptr_end; protected_ptr_i++) {
2169 *protected_ptr_i ^= *protected_ptr_i + 1;
2170 }
2171
2172 EXPECT_THAT((CountingRawPtrExpectations{
2173 .get_for_dereference_cnt = requested_elements * 2,
2174 .get_for_extraction_cnt = 0,
2175 .get_for_comparison_cnt = requested_elements + 1,
2176 }),
2177 CountersMatch());
2178
2179 RawPtrCountingImpl::ClearCounters();
2180
2181 for (uint32_t* ptr_i = ptr; ptr_i < protected_ptr_end; ptr_i++) {
2182 *ptr_i ^= *ptr_i + 1;
2183 }
2184
2185 EXPECT_THAT((CountingRawPtrExpectations{
2186 .get_for_dereference_cnt = 0,
2187 .get_for_extraction_cnt = 0,
2188 .get_for_comparison_cnt = requested_elements + 1,
2189 }),
2190 CountersMatch());
2191
2192 RawPtrCountingImpl::ClearCounters();
2193
2194 size_t iter_cnt = 0;
2195 for (uint32_t *ptr_i = protected_ptr, *ptr_i_end = protected_ptr_end;
2196 ptr_i < ptr_i_end; ptr_i++) {
2197 *ptr_i ^= *ptr_i + 1;
2198 iter_cnt++;
2199 }
2200 EXPECT_EQ(iter_cnt, requested_elements);
2201
2202 EXPECT_THAT((CountingRawPtrExpectations{
2203 .get_for_dereference_cnt = 0,
2204 .get_for_extraction_cnt = 2,
2205 .get_for_comparison_cnt = 0,
2206 }),
2207 CountersMatch());
2208
2209 protected_ptr = nullptr;
2210 protected_ptr_end = nullptr;
2211 allocator_.root()->Free(ptr);
2212 }
2213
2214 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
TEST_F(BackupRefPtrTest,Duplicate)2215 TEST_F(BackupRefPtrTest, Duplicate) {
2216 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
2217 char* ptr = static_cast<char*>(allocator_.root()->Alloc(requested_size));
2218 raw_ptr<char> protected_ptr1 = ptr;
2219 protected_ptr1 += requested_size; // Pointer should now be poisoned.
2220
2221 // Duplicating a poisoned pointer should be allowed.
2222 raw_ptr<char> protected_ptr2 = protected_ptr1;
2223
2224 // The poison bit should be propagated to the duplicate such that the OOB
2225 // access is disallowed:
2226 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr2 = ' ', "");
2227
2228 // Assignment from a poisoned pointer should be allowed.
2229 raw_ptr<char> protected_ptr3;
2230 protected_ptr3 = protected_ptr1;
2231
2232 // The poison bit should be propagated via the assignment such that the OOB
2233 // access is disallowed:
2234 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr3 = ' ', "");
2235
2236 allocator_.root()->Free(ptr);
2237 }
2238 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
2239
2240 #if BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
TEST_F(BackupRefPtrTest,WriteAfterFree)2241 TEST_F(BackupRefPtrTest, WriteAfterFree) {
2242 constexpr uint64_t kPayload = 0x1234567890ABCDEF;
2243
2244 raw_ptr<uint64_t, DisableDanglingPtrDetection> ptr =
2245 static_cast<uint64_t*>(allocator_.root()->Alloc(sizeof(uint64_t), ""));
2246
2247 // Now |ptr| should be quarantined.
2248 allocator_.root()->Free(ptr);
2249
2250 EXPECT_DEATH_IF_SUPPORTED(
2251 {
2252 // Write something different from |kQuarantinedByte|.
2253 *ptr = kPayload;
2254 // Write-after-Free should lead to crash
2255 // on |PartitionAllocFreeForRefCounting|.
2256 ptr = nullptr;
2257 },
2258 "");
2259 }
2260 #endif // BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
2261
2262 namespace {
2263 constexpr uint8_t kCustomQuarantineByte = 0xff;
2264 static_assert(kCustomQuarantineByte !=
2265 partition_alloc::internal::kQuarantinedByte);
2266
CustomQuarantineHook(void * address,size_t size)2267 void CustomQuarantineHook(void* address, size_t size) {
2268 partition_alloc::internal::SecureMemset(address, kCustomQuarantineByte, size);
2269 }
2270 } // namespace
2271
TEST_F(BackupRefPtrTest,QuarantineHook)2272 TEST_F(BackupRefPtrTest, QuarantineHook) {
2273 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(
2274 CustomQuarantineHook);
2275 uint8_t* native_ptr =
2276 static_cast<uint8_t*>(allocator_.root()->Alloc(sizeof(uint8_t), ""));
2277 *native_ptr = 0;
2278 {
2279 raw_ptr<uint8_t, DisableDanglingPtrDetection> smart_ptr = native_ptr;
2280
2281 allocator_.root()->Free(smart_ptr);
2282 // Access the allocation through the native pointer to avoid triggering
2283 // dereference checks in debug builds.
2284 EXPECT_EQ(*partition_alloc::internal::TagPtr(native_ptr),
2285 kCustomQuarantineByte);
2286
2287 // Leaving |smart_ptr| filled with |kCustomQuarantineByte| can
2288 // cause a crash because we have a DCHECK that expects it to be filled with
2289 // |kQuarantineByte|. We need to ensure it is unquarantined before
2290 // unregistering the hook.
2291 } // <- unquarantined here
2292
2293 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(nullptr);
2294 }
2295
2296 #if BUILDFLAG(PA_IS_CHROMEOS_ASH)
TEST_F(BackupRefPtrTest,ExperimentalAsh)2297 TEST_F(BackupRefPtrTest, ExperimentalAsh) {
2298 const bool feature_enabled_by_default =
2299 BackupRefPtrGlobalSettings::IsExperimentalAshEnabled();
2300 if (feature_enabled_by_default) {
2301 BackupRefPtrGlobalSettings::DisableExperimentalAshForTest();
2302 }
2303
2304 // Allocate a slot so that a slot span doesn't get decommitted from memory,
2305 // while we allocate/deallocate/access the tested slot below.
2306 void* sentinel = allocator_.root()->Alloc(sizeof(unsigned int), "");
2307
2308 constexpr uint32_t kQuarantined2Bytes =
2309 partition_alloc::internal::kQuarantinedByte |
2310 (partition_alloc::internal::kQuarantinedByte << 8);
2311 constexpr uint32_t kQuarantined4Bytes =
2312 kQuarantined2Bytes | (kQuarantined2Bytes << 16);
2313
2314 // Plain raw_ptr, with BRP for ExperimentalAsh pointer disabled.
2315 {
2316 raw_ptr<unsigned int, DanglingUntriaged> ptr = static_cast<unsigned int*>(
2317 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2318 *ptr = 0;
2319 allocator_.root()->Free(ptr);
2320 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
2321 EXPECT_DEATH_IF_SUPPORTED(*ptr = 0, "");
2322 #else
2323 EXPECT_EQ(kQuarantined4Bytes, *ptr);
2324 #endif
2325 }
2326 // raw_ptr with ExperimentalAsh, BRP is expected to be off, as it is enabled
2327 // independently for these pointers.
2328 {
2329 raw_ptr<unsigned int, DanglingUntriaged | ExperimentalAsh> ptr =
2330 static_cast<unsigned int*>(
2331 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2332 *ptr = 0;
2333 allocator_.root()->Free(ptr);
2334 // A tad fragile as a new allocation or free-list pointer may be there, but
2335 // highly unlikely it'll match 4 quarantine bytes in a row.
2336 EXPECT_NE(kQuarantined4Bytes, *ptr);
2337 }
2338
2339 BackupRefPtrGlobalSettings::EnableExperimentalAsh();
2340 // BRP should be on for both types of pointers.
2341 {
2342 raw_ptr<unsigned int, DanglingUntriaged> ptr = static_cast<unsigned int*>(
2343 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2344 *ptr = 0;
2345 allocator_.root()->Free(ptr);
2346 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
2347 EXPECT_DEATH_IF_SUPPORTED(*ptr = 0, "");
2348 #else
2349 EXPECT_EQ(kQuarantined4Bytes, *ptr);
2350 #endif
2351 }
2352 {
2353 raw_ptr<unsigned int, DanglingUntriaged | ExperimentalAsh> ptr =
2354 static_cast<unsigned int*>(
2355 allocator_.root()->Alloc(sizeof(unsigned int), ""));
2356 *ptr = 0;
2357 allocator_.root()->Free(ptr);
2358 #if BUILDFLAG(PA_DCHECK_IS_ON) || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
2359 EXPECT_DEATH_IF_SUPPORTED(*ptr = 0, "");
2360 #else
2361 EXPECT_EQ(kQuarantined4Bytes, *ptr);
2362 #endif
2363 }
2364
2365 allocator_.root()->Free(sentinel);
2366
2367 // Restore the feature state to avoid one test to "leak" into the next one.
2368 if (!feature_enabled_by_default) {
2369 BackupRefPtrGlobalSettings::DisableExperimentalAshForTest();
2370 }
2371 }
2372 #endif // BUILDFLAG(PA_IS_CHROMEOS_ASH)
2373
2374 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
2375 // !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
2376
2377 #if BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2378
2379 namespace {
2380 #define FOR_EACH_RAW_PTR_OPERATION(F) \
2381 F(wrap_ptr) \
2382 F(release_wrapped_ptr) \
2383 F(safely_unwrap_for_dereference) \
2384 F(safely_unwrap_for_extraction) \
2385 F(unsafely_unwrap_for_comparison) \
2386 F(advance) \
2387 F(duplicate) \
2388 F(wrap_ptr_for_duplication) \
2389 F(unsafely_unwrap_for_duplication)
2390
2391 // Can't use gMock to count the number of invocations because
2392 // gMock itself triggers raw_ptr<T> operations.
2393 struct CountingHooks {
ResetCountsbase::internal::__anondf4b49410711::CountingHooks2394 void ResetCounts() {
2395 #define F(name) name##_count = 0;
2396 FOR_EACH_RAW_PTR_OPERATION(F)
2397 #undef F
2398 }
2399
Getbase::internal::__anondf4b49410711::CountingHooks2400 static CountingHooks* Get() {
2401 static thread_local CountingHooks instance;
2402 return &instance;
2403 }
2404
2405 // The adapter method is templated to accept any number of arguments.
2406 #define F(name) \
2407 template <typename... T> \
2408 static void name##_adapter(T...) { \
2409 Get()->name##_count++; \
2410 } \
2411 size_t name##_count = 0;
2412 FOR_EACH_RAW_PTR_OPERATION(F)
2413 #undef F
2414 };
2415
2416 constexpr RawPtrHooks raw_ptr_hooks{
2417 #define F(name) .name = CountingHooks::name##_adapter,
2418 FOR_EACH_RAW_PTR_OPERATION(F)
2419 #undef F
2420 };
2421 } // namespace
2422
2423 class HookableRawPtrImplTest : public testing::Test {
2424 protected:
SetUp()2425 void SetUp() override { InstallRawPtrHooks(&raw_ptr_hooks); }
TearDown()2426 void TearDown() override { ResetRawPtrHooks(); }
2427 };
2428
TEST_F(HookableRawPtrImplTest,WrapPtr)2429 TEST_F(HookableRawPtrImplTest, WrapPtr) {
2430 // Can't call `ResetCounts` in `SetUp` because gTest triggers
2431 // raw_ptr<T> operations between `SetUp` and the test body.
2432 CountingHooks::Get()->ResetCounts();
2433 {
2434 int* ptr = new int;
2435 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2436 delete ptr;
2437 }
2438 EXPECT_EQ(CountingHooks::Get()->wrap_ptr_count, 1u);
2439 }
2440
TEST_F(HookableRawPtrImplTest,ReleaseWrappedPtr)2441 TEST_F(HookableRawPtrImplTest, ReleaseWrappedPtr) {
2442 CountingHooks::Get()->ResetCounts();
2443 {
2444 int* ptr = new int;
2445 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2446 delete ptr;
2447 }
2448 EXPECT_EQ(CountingHooks::Get()->release_wrapped_ptr_count, 1u);
2449 }
2450
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForDereference)2451 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForDereference) {
2452 CountingHooks::Get()->ResetCounts();
2453 {
2454 int* ptr = new int;
2455 raw_ptr<int> interesting_ptr = ptr;
2456 *interesting_ptr = 1;
2457 delete ptr;
2458 }
2459 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_dereference_count, 1u);
2460 }
2461
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForExtraction)2462 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForExtraction) {
2463 CountingHooks::Get()->ResetCounts();
2464 {
2465 int* ptr = new int;
2466 raw_ptr<int> interesting_ptr = ptr;
2467 ptr = interesting_ptr;
2468 delete ptr;
2469 }
2470 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_extraction_count, 1u);
2471 }
2472
TEST_F(HookableRawPtrImplTest,UnsafelyUnwrapForComparison)2473 TEST_F(HookableRawPtrImplTest, UnsafelyUnwrapForComparison) {
2474 CountingHooks::Get()->ResetCounts();
2475 {
2476 int* ptr = new int;
2477 raw_ptr<int> interesting_ptr = ptr;
2478 EXPECT_EQ(interesting_ptr, ptr);
2479 delete ptr;
2480 }
2481 EXPECT_EQ(CountingHooks::Get()->unsafely_unwrap_for_comparison_count, 1u);
2482 }
2483
TEST_F(HookableRawPtrImplTest,Advance)2484 TEST_F(HookableRawPtrImplTest, Advance) {
2485 CountingHooks::Get()->ResetCounts();
2486 {
2487 int* ptr = new int[10];
2488 raw_ptr<int, AllowPtrArithmetic> interesting_ptr = ptr;
2489 interesting_ptr += 1;
2490 delete[] ptr;
2491 }
2492 EXPECT_EQ(CountingHooks::Get()->advance_count, 1u);
2493 }
2494
TEST_F(HookableRawPtrImplTest,Duplicate)2495 TEST_F(HookableRawPtrImplTest, Duplicate) {
2496 CountingHooks::Get()->ResetCounts();
2497 {
2498 int* ptr = new int;
2499 raw_ptr<int> interesting_ptr = ptr;
2500 raw_ptr<int> interesting_ptr2 = interesting_ptr;
2501 delete ptr;
2502 }
2503 EXPECT_EQ(CountingHooks::Get()->duplicate_count, 1u);
2504 }
2505
TEST_F(HookableRawPtrImplTest,CrossKindCopyConstruction)2506 TEST_F(HookableRawPtrImplTest, CrossKindCopyConstruction) {
2507 CountingHooks::Get()->ResetCounts();
2508 {
2509 int* ptr = new int;
2510 raw_ptr<int> non_dangling_ptr = ptr;
2511 raw_ptr<int, RawPtrTraits::kMayDangle> dangling_ptr(non_dangling_ptr);
2512 delete ptr;
2513 }
2514 EXPECT_EQ(CountingHooks::Get()->duplicate_count, 0u);
2515 EXPECT_EQ(CountingHooks::Get()->wrap_ptr_for_duplication_count, 1u);
2516 EXPECT_EQ(CountingHooks::Get()->unsafely_unwrap_for_duplication_count, 1u);
2517 }
2518
2519 #endif // BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2520
TEST(DanglingPtrTest,DetectAndReset)2521 TEST(DanglingPtrTest, DetectAndReset) {
2522 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2523 if (!instrumentation.has_value()) {
2524 GTEST_SKIP() << instrumentation.error();
2525 }
2526
2527 auto owned_ptr = std::make_unique<int>(42);
2528 raw_ptr<int> dangling_ptr = owned_ptr.get();
2529 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2530 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2531 owned_ptr.reset();
2532 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2533 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2534 dangling_ptr = nullptr;
2535 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2536 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2537 }
2538
TEST(DanglingPtrTest,DetectAndDestructor)2539 TEST(DanglingPtrTest, DetectAndDestructor) {
2540 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2541 if (!instrumentation.has_value()) {
2542 GTEST_SKIP() << instrumentation.error();
2543 }
2544
2545 auto owned_ptr = std::make_unique<int>(42);
2546 {
2547 [[maybe_unused]] raw_ptr<int> dangling_ptr = owned_ptr.get();
2548 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2549 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2550 owned_ptr.reset();
2551 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2552 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2553 }
2554 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2555 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2556 }
2557
TEST(DanglingPtrTest,DetectResetAndDestructor)2558 TEST(DanglingPtrTest, DetectResetAndDestructor) {
2559 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2560 if (!instrumentation.has_value()) {
2561 GTEST_SKIP() << instrumentation.error();
2562 }
2563
2564 auto owned_ptr = std::make_unique<int>(42);
2565 {
2566 raw_ptr<int> dangling_ptr = owned_ptr.get();
2567 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2568 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2569 owned_ptr.reset();
2570 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2571 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2572 dangling_ptr = nullptr;
2573 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2574 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2575 }
2576 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2577 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2578 }
2579
2580 } // namespace base::internal
2581