1 // Copyright 2020 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/allocator/partition_allocator/pointers/raw_ptr.h"
6
7 #include <climits>
8 #include <cstddef>
9 #include <cstdint>
10 #include <string>
11 #include <thread>
12 #include <type_traits>
13 #include <utility>
14
15 #include "base/allocator/partition_alloc_features.h"
16 #include "base/allocator/partition_alloc_support.h"
17 #include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
18 #include "base/allocator/partition_allocator/partition_alloc-inl.h"
19 #include "base/allocator/partition_allocator/partition_alloc.h"
20 #include "base/allocator/partition_allocator/partition_alloc_base/numerics/checked_math.h"
21 #include "base/allocator/partition_allocator/partition_alloc_buildflags.h"
22 #include "base/allocator/partition_allocator/partition_alloc_config.h"
23 #include "base/allocator/partition_allocator/partition_alloc_constants.h"
24 #include "base/allocator/partition_allocator/partition_alloc_hooks.h"
25 #include "base/allocator/partition_allocator/pointers/raw_ptr_test_support.h"
26 #include "base/allocator/partition_allocator/pointers/raw_ref.h"
27 #include "base/allocator/partition_allocator/tagging.h"
28 #include "base/cpu.h"
29 #include "base/cxx20_to_address.h"
30 #include "base/logging.h"
31 #include "base/memory/raw_ptr_asan_service.h"
32 #include "base/task/thread_pool.h"
33 #include "base/test/bind.h"
34 #include "base/test/gtest_util.h"
35 #include "base/test/memory/dangling_ptr_instrumentation.h"
36 #include "base/test/scoped_feature_list.h"
37 #include "base/test/task_environment.h"
38 #include "build/build_config.h"
39 #include "build/buildflag.h"
40 #include "testing/gmock/include/gmock/gmock.h"
41 #include "testing/gtest/include/gtest/gtest.h"
42 #include "third_party/abseil-cpp/absl/types/optional.h"
43 #include "third_party/abseil-cpp/absl/types/variant.h"
44
45 #if BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
46 #include <sanitizer/asan_interface.h>
47 #include "base/debug/asan_service.h"
48 #endif
49
50 using testing::AllOf;
51 using testing::HasSubstr;
52 using testing::Test;
53
54 static_assert(sizeof(raw_ptr<void>) == sizeof(void*),
55 "raw_ptr shouldn't add memory overhead");
56 static_assert(sizeof(raw_ptr<int>) == sizeof(int*),
57 "raw_ptr shouldn't add memory overhead");
58 static_assert(sizeof(raw_ptr<std::string>) == sizeof(std::string*),
59 "raw_ptr shouldn't add memory overhead");
60
61 #if !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
62 !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR)
63 // |is_trivially_copyable| assertion means that arrays/vectors of raw_ptr can
64 // be copied by memcpy.
65 static_assert(std::is_trivially_copyable<raw_ptr<void>>::value,
66 "raw_ptr should be trivially copyable");
67 static_assert(std::is_trivially_copyable<raw_ptr<int>>::value,
68 "raw_ptr should be trivially copyable");
69 static_assert(std::is_trivially_copyable<raw_ptr<std::string>>::value,
70 "raw_ptr should be trivially copyable");
71
72 // |is_trivially_default_constructible| assertion helps retain implicit default
73 // constructors when raw_ptr is used as a union field. Example of an error
74 // if this assertion didn't hold:
75 //
76 // ../../base/trace_event/trace_arguments.h:249:16: error: call to
77 // implicitly-deleted default constructor of 'base::trace_event::TraceValue'
78 // TraceValue ret;
79 // ^
80 // ../../base/trace_event/trace_arguments.h:211:26: note: default
81 // constructor of 'TraceValue' is implicitly deleted because variant field
82 // 'as_pointer' has a non-trivial default constructor
83 // raw_ptr<const void> as_pointer;
84 static_assert(std::is_trivially_default_constructible<raw_ptr<void>>::value,
85 "raw_ptr should be trivially default constructible");
86 static_assert(std::is_trivially_default_constructible<raw_ptr<int>>::value,
87 "raw_ptr should be trivially default constructible");
88 static_assert(
89 std::is_trivially_default_constructible<raw_ptr<std::string>>::value,
90 "raw_ptr should be trivially default constructible");
91 #endif // !BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
92 // !BUILDFLAG(USE_ASAN_UNOWNED_PTR) && !BUILDFLAG(USE_HOOKABLE_RAW_PTR)
93
94 // Verify that raw_ptr is a literal type, and its entire interface is constexpr.
95 //
96 // Constexpr destructors were introduced in C++20. PartitionAlloc's minimum
97 // supported C++ version is C++17, so raw_ptr is not a literal type in C++17.
98 // Thus we only test for constexpr in C++20.
99 #if defined(__cpp_constexpr) && __cpp_constexpr >= 201907L
__anon811eace80102() 100 static_assert([]() constexpr {
101 struct IntBase {};
102 struct Int : public IntBase {
103 int i = 0;
104 };
105
106 Int* i = new Int();
107 {
108 raw_ptr<Int> r(i); // raw_ptr(T*)
109 raw_ptr<Int> r2(r); // raw_ptr(const raw_ptr&)
110 raw_ptr<Int> r3(std::move(r)); // raw_ptr(raw_ptr&&)
111 r = r2; // operator=(const raw_ptr&)
112 r = std::move(r3); // operator=(raw_ptr&&)
113 raw_ptr<Int, base::RawPtrTraits::kMayDangle> r4(
114 r); // raw_ptr(const raw_ptr<DifferentTraits>&)
115 r4 = r2; // operator=(const raw_ptr<DifferentTraits>&)
116 // (There is no move-version of DifferentTraits.)
117 [[maybe_unused]] raw_ptr<IntBase> r5(
118 r2); // raw_ptr(const raw_ptr<Convertible>&)
119 [[maybe_unused]] raw_ptr<IntBase> r6(
120 std::move(r2)); // raw_ptr(raw_ptr<Convertible>&&)
121 r2 = r; // Reset after move...
122 r5 = r2; // operator=(const raw_ptr<Convertible>&)
123 r5 = std::move(r2); // operator=(raw_ptr<Convertible>&&)
124 [[maybe_unused]] raw_ptr<Int> r7(nullptr); // raw_ptr(nullptr)
125 r4 = nullptr; // operator=(nullptr)
126 r4 = i; // operator=(T*)
127 r5 = r4; // operator=(const Upcast&)
128 r5 = std::move(r4); // operator=(Upcast&&)
129 r.get()->i += 1; // get()
130 [[maybe_unused]] bool b = r; // operator bool
131 (*r).i += 1; // operator*()
132 r->i += 1; // operator->()
133 [[maybe_unused]] Int* i2 = r; // operator T*()
134 [[maybe_unused]] IntBase* i3 = r; // operator Convertible*()
135
136 Int* array = new Int[3]();
137 {
138 raw_ptr<Int, base::RawPtrTraits::kAllowPtrArithmetic> ra(array);
139 ++ra; // operator++()
140 --ra; // operator--()
141 ra++; // operator++(int)
142 ra--; // operator--(int)
143 ra += 1u; // operator+=()
144 ra -= 1u; // operator-=()
145 }
146 delete[] array;
147 }
148 delete i;
149 return true;
150 }());
151 #endif
152
153 // Don't use base::internal for testing raw_ptr API, to test if code outside
154 // this namespace calls the correct functions from this namespace.
155 namespace {
156
157 // `kAllowPtrArithmetic` matches what `CountingRawPtr` does internally.
158 // `kUseCountingWrapperForTest` is removed.
159 using RawPtrCountingImpl = base::internal::RawPtrCountingImplWrapperForTest<
160 base::RawPtrTraits::kAllowPtrArithmetic>;
161
162 // `kMayDangle | kAllowPtrArithmetic` matches what `CountingRawPtrMayDangle`
163 // does internally. `kUseCountingWrapperForTest` is removed, and `kMayDangle`
164 // and `kAllowPtrArithmetic` are kept.
165 using RawPtrCountingMayDangleImpl =
166 base::internal::RawPtrCountingImplWrapperForTest<
167 base::RawPtrTraits::kMayDangle |
168 base::RawPtrTraits::kAllowPtrArithmetic>;
169
170 template <typename T>
171 using CountingRawPtr = raw_ptr<T,
172 base::RawPtrTraits::kUseCountingWrapperForTest |
173 base::RawPtrTraits::kAllowPtrArithmetic>;
174 static_assert(std::is_same_v<CountingRawPtr<int>::Impl, RawPtrCountingImpl>);
175
176 template <typename T>
177 using CountingRawPtrMayDangle =
178 raw_ptr<T,
179 base::RawPtrTraits::kMayDangle |
180 base::RawPtrTraits::kUseCountingWrapperForTest |
181 base::RawPtrTraits::kAllowPtrArithmetic>;
182 static_assert(std::is_same_v<CountingRawPtrMayDangle<int>::Impl,
183 RawPtrCountingMayDangleImpl>);
184
185 struct MyStruct {
186 int x;
187 };
188
189 struct Base1 {
Base1__anon811eace80211::Base1190 explicit Base1(int b1) : b1(b1) {}
191 int b1;
192 };
193
194 struct Base2 {
Base2__anon811eace80211::Base2195 explicit Base2(int b2) : b2(b2) {}
196 int b2;
197 };
198
199 struct Derived : Base1, Base2 {
Derived__anon811eace80211::Derived200 Derived(int b1, int b2, int d) : Base1(b1), Base2(b2), d(d) {}
201 int d;
202 };
203
204 class RawPtrTest : public Test {
205 protected:
SetUp()206 void SetUp() override {
207 RawPtrCountingImpl::ClearCounters();
208 RawPtrCountingMayDangleImpl::ClearCounters();
209 }
210 };
211
212 // Use this instead of std::ignore, to prevent the instruction from getting
213 // optimized out by the compiler.
214 volatile int g_volatile_int_to_ignore;
215
TEST_F(RawPtrTest,NullStarDereference)216 TEST_F(RawPtrTest, NullStarDereference) {
217 raw_ptr<int> ptr = nullptr;
218 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *ptr, "");
219 }
220
TEST_F(RawPtrTest,NullArrowDereference)221 TEST_F(RawPtrTest, NullArrowDereference) {
222 raw_ptr<MyStruct> ptr = nullptr;
223 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = ptr->x, "");
224 }
225
TEST_F(RawPtrTest,NullExtractNoDereference)226 TEST_F(RawPtrTest, NullExtractNoDereference) {
227 CountingRawPtr<int> ptr = nullptr;
228 // No dereference hence shouldn't crash.
229 int* raw = ptr;
230 std::ignore = raw;
231 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
232 .get_for_dereference_cnt = 0,
233 .get_for_extraction_cnt = 1,
234 .get_for_comparison_cnt = 0}),
235 CountersMatch());
236 }
237
TEST_F(RawPtrTest,InvalidExtractNoDereference)238 TEST_F(RawPtrTest, InvalidExtractNoDereference) {
239 // Some code uses invalid pointer values as indicators, so those values must
240 // be accepted by raw_ptr and passed through unchanged during extraction.
241 int* inv_ptr = reinterpret_cast<int*>(~static_cast<uintptr_t>(0));
242 CountingRawPtr<int> ptr = inv_ptr;
243 int* raw = ptr;
244 EXPECT_EQ(raw, inv_ptr);
245 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
246 .get_for_dereference_cnt = 0,
247 .get_for_extraction_cnt = 1,
248 .get_for_comparison_cnt = 0}),
249 CountersMatch());
250 }
251
TEST_F(RawPtrTest,NullCmpExplicit)252 TEST_F(RawPtrTest, NullCmpExplicit) {
253 CountingRawPtr<int> ptr = nullptr;
254 EXPECT_TRUE(ptr == nullptr);
255 EXPECT_TRUE(nullptr == ptr);
256 EXPECT_FALSE(ptr != nullptr);
257 EXPECT_FALSE(nullptr != ptr);
258 // No need to unwrap pointer, just compare against 0.
259 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
260 .get_for_dereference_cnt = 0,
261 .get_for_extraction_cnt = 0,
262 .get_for_comparison_cnt = 0,
263 }),
264 CountersMatch());
265 }
266
TEST_F(RawPtrTest,NullCmpBool)267 TEST_F(RawPtrTest, NullCmpBool) {
268 CountingRawPtr<int> ptr = nullptr;
269 EXPECT_FALSE(ptr);
270 EXPECT_TRUE(!ptr);
271 // No need to unwrap pointer, just compare against 0.
272 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
273 .get_for_dereference_cnt = 0,
274 .get_for_extraction_cnt = 0,
275 .get_for_comparison_cnt = 0,
276 }),
277 CountersMatch());
278 }
279
FuncThatAcceptsBool(bool b)280 void FuncThatAcceptsBool(bool b) {}
281
IsValidNoCast(CountingRawPtr<int> ptr)282 bool IsValidNoCast(CountingRawPtr<int> ptr) {
283 return !!ptr; // !! to avoid implicit cast
284 }
IsValidNoCast2(CountingRawPtr<int> ptr)285 bool IsValidNoCast2(CountingRawPtr<int> ptr) {
286 return ptr && true;
287 }
288
TEST_F(RawPtrTest,BoolOpNotCast)289 TEST_F(RawPtrTest, BoolOpNotCast) {
290 CountingRawPtr<int> ptr = nullptr;
291 volatile bool is_valid = !!ptr; // !! to avoid implicit cast
292 is_valid = ptr || is_valid; // volatile, so won't be optimized
293 if (ptr) {
294 is_valid = true;
295 }
296 [[maybe_unused]] bool is_not_valid = !ptr;
297 if (!ptr) {
298 is_not_valid = true;
299 }
300 std::ignore = IsValidNoCast(ptr);
301 std::ignore = IsValidNoCast2(ptr);
302 FuncThatAcceptsBool(!ptr);
303 // No need to unwrap pointer, just compare against 0.
304 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
305 .get_for_dereference_cnt = 0,
306 .get_for_extraction_cnt = 0,
307 .get_for_comparison_cnt = 0,
308 }),
309 CountersMatch());
310 }
311
IsValidWithCast(CountingRawPtr<int> ptr)312 bool IsValidWithCast(CountingRawPtr<int> ptr) {
313 return ptr;
314 }
315
316 // This test is mostly for documentation purposes. It demonstrates cases where
317 // |operator T*| is called first and then the pointer is converted to bool,
318 // as opposed to calling |operator bool| directly. The former may be more
319 // costly, so the caller has to be careful not to trigger this path.
TEST_F(RawPtrTest,CastNotBoolOp)320 TEST_F(RawPtrTest, CastNotBoolOp) {
321 CountingRawPtr<int> ptr = nullptr;
322 [[maybe_unused]] bool is_valid = ptr;
323 is_valid = IsValidWithCast(ptr);
324 FuncThatAcceptsBool(ptr);
325 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
326 .get_for_dereference_cnt = 0,
327 .get_for_extraction_cnt = 3,
328 .get_for_comparison_cnt = 0,
329 }),
330 CountersMatch());
331 }
332
TEST_F(RawPtrTest,StarDereference)333 TEST_F(RawPtrTest, StarDereference) {
334 int foo = 42;
335 CountingRawPtr<int> ptr = &foo;
336 EXPECT_EQ(*ptr, 42);
337 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
338 .get_for_dereference_cnt = 1,
339 .get_for_extraction_cnt = 0,
340 .get_for_comparison_cnt = 0,
341 }),
342 CountersMatch());
343 }
344
TEST_F(RawPtrTest,ArrowDereference)345 TEST_F(RawPtrTest, ArrowDereference) {
346 MyStruct foo = {42};
347 CountingRawPtr<MyStruct> ptr = &foo;
348 EXPECT_EQ(ptr->x, 42);
349 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
350 .get_for_dereference_cnt = 1,
351 .get_for_extraction_cnt = 0,
352 .get_for_comparison_cnt = 0,
353 }),
354 CountersMatch());
355 }
356
TEST_F(RawPtrTest,Delete)357 TEST_F(RawPtrTest, Delete) {
358 CountingRawPtr<int> ptr = new int(42);
359 delete ptr.ExtractAsDangling();
360 // The pointer is first internally converted to MayDangle kind, then extracted
361 // using implicit cast before passing to |delete|.
362 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
363 .get_for_dereference_cnt = 0,
364 .get_for_extraction_cnt = 0,
365 .get_for_comparison_cnt = 0,
366 .wrap_raw_ptr_for_dup_cnt = 0,
367 .get_for_duplication_cnt = 1,
368 }),
369 CountersMatch());
370 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
371 .get_for_dereference_cnt = 0,
372 .get_for_extraction_cnt = 1,
373 .get_for_comparison_cnt = 0,
374 .wrap_raw_ptr_for_dup_cnt = 1,
375 .get_for_duplication_cnt = 0,
376 }),
377 CountersMatch());
378 }
379
TEST_F(RawPtrTest,ClearAndDelete)380 TEST_F(RawPtrTest, ClearAndDelete) {
381 CountingRawPtr<int> ptr(new int);
382 ptr.ClearAndDelete();
383
384 // TODO(crbug.com/1346513): clang-format has a difficult time making
385 // sense of preprocessor arms mixed with designated initializers.
386 //
387 // clang-format off
388 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl> {
389 .wrap_raw_ptr_cnt = 1,
390 .release_wrapped_ptr_cnt = 1,
391 .get_for_dereference_cnt = 0,
392 .get_for_extraction_cnt = 1,
393 .wrapped_ptr_swap_cnt = 0,
394 }),
395 CountersMatch());
396 // clang-format on
397 EXPECT_EQ(ptr.get(), nullptr);
398 }
399
TEST_F(RawPtrTest,ClearAndDeleteArray)400 TEST_F(RawPtrTest, ClearAndDeleteArray) {
401 CountingRawPtr<int> ptr(new int[8]);
402 ptr.ClearAndDeleteArray();
403
404 // TODO(crbug.com/1346513): clang-format has a difficult time making
405 // sense of preprocessor arms mixed with designated initializers.
406 //
407 // clang-format off
408 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl> {
409 .wrap_raw_ptr_cnt = 1,
410 .release_wrapped_ptr_cnt = 1,
411 .get_for_dereference_cnt = 0,
412 .get_for_extraction_cnt = 1,
413 .wrapped_ptr_swap_cnt = 0,
414 }),
415 CountersMatch());
416 // clang-format on
417 EXPECT_EQ(ptr.get(), nullptr);
418 }
419
TEST_F(RawPtrTest,ExtractAsDangling)420 TEST_F(RawPtrTest, ExtractAsDangling) {
421 CountingRawPtr<int> ptr(new int);
422
423 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
424 .wrap_raw_ptr_cnt = 1,
425 .release_wrapped_ptr_cnt = 0,
426 .get_for_dereference_cnt = 0,
427 .wrapped_ptr_swap_cnt = 0,
428 .wrap_raw_ptr_for_dup_cnt = 0,
429 .get_for_duplication_cnt = 0,
430 }),
431 CountersMatch());
432 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
433 .wrap_raw_ptr_cnt = 0,
434 .release_wrapped_ptr_cnt = 0,
435 .get_for_dereference_cnt = 0,
436 .wrapped_ptr_swap_cnt = 0,
437 .wrap_raw_ptr_for_dup_cnt = 0,
438 .get_for_duplication_cnt = 0,
439 }),
440 CountersMatch());
441
442 EXPECT_TRUE(ptr.get());
443
444 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
445
446 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
447 .wrap_raw_ptr_cnt = 1,
448 .release_wrapped_ptr_cnt = 1,
449 .get_for_dereference_cnt = 0,
450 .wrapped_ptr_swap_cnt = 0,
451 .wrap_raw_ptr_for_dup_cnt = 0,
452 .get_for_duplication_cnt = 1,
453 }),
454 CountersMatch());
455 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
456 .wrap_raw_ptr_cnt = 0,
457 .release_wrapped_ptr_cnt = 0,
458 .get_for_dereference_cnt = 0,
459 .wrapped_ptr_swap_cnt = 0,
460 .wrap_raw_ptr_for_dup_cnt = 1,
461 .get_for_duplication_cnt = 0,
462 }),
463 CountersMatch());
464
465 EXPECT_FALSE(ptr.get());
466 EXPECT_TRUE(dangling.get());
467
468 dangling.ClearAndDelete();
469 }
470
TEST_F(RawPtrTest,ExtractAsDanglingFromDangling)471 TEST_F(RawPtrTest, ExtractAsDanglingFromDangling) {
472 CountingRawPtrMayDangle<int> ptr(new int);
473
474 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
475 .wrap_raw_ptr_cnt = 1,
476 .release_wrapped_ptr_cnt = 0,
477 .get_for_dereference_cnt = 0,
478 .wrapped_ptr_swap_cnt = 0,
479 .wrap_raw_ptr_for_dup_cnt = 0,
480 .get_for_duplication_cnt = 0,
481 }),
482 CountersMatch());
483
484 CountingRawPtrMayDangle<int> dangling = ptr.ExtractAsDangling();
485
486 // wrap_raw_ptr_cnt remains `1` because, as `ptr` is already a dangling
487 // pointer, we are only moving `ptr` to `dangling` here to avoid extra cost.
488 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
489 .wrap_raw_ptr_cnt = 1,
490 .release_wrapped_ptr_cnt = 1,
491 .get_for_dereference_cnt = 0,
492 .wrapped_ptr_swap_cnt = 0,
493 .wrap_raw_ptr_for_dup_cnt = 0,
494 .get_for_duplication_cnt = 0,
495 }),
496 CountersMatch());
497
498 dangling.ClearAndDelete();
499 }
500
TEST_F(RawPtrTest,ConstVolatileVoidPtr)501 TEST_F(RawPtrTest, ConstVolatileVoidPtr) {
502 int32_t foo[] = {1234567890};
503 CountingRawPtr<const volatile void> ptr = foo;
504 EXPECT_EQ(*static_cast<const volatile int32_t*>(ptr), 1234567890);
505 // Because we're using a cast, the extraction API kicks in, which doesn't
506 // know if the extracted pointer will be dereferenced or not.
507 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
508 .get_for_dereference_cnt = 0,
509 .get_for_extraction_cnt = 1,
510 .get_for_comparison_cnt = 0,
511 }),
512 CountersMatch());
513 }
514
TEST_F(RawPtrTest,VoidPtr)515 TEST_F(RawPtrTest, VoidPtr) {
516 int32_t foo[] = {1234567890};
517 CountingRawPtr<void> ptr = foo;
518 EXPECT_EQ(*static_cast<int32_t*>(ptr), 1234567890);
519 // Because we're using a cast, the extraction API kicks in, which doesn't
520 // know if the extracted pointer will be dereferenced or not.
521 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
522 .get_for_dereference_cnt = 0,
523 .get_for_extraction_cnt = 1,
524 .get_for_comparison_cnt = 0,
525 }),
526 CountersMatch());
527 }
528
TEST_F(RawPtrTest,OperatorEQ)529 TEST_F(RawPtrTest, OperatorEQ) {
530 int foo;
531 CountingRawPtr<int> ptr1 = nullptr;
532 EXPECT_TRUE(ptr1 == ptr1);
533
534 CountingRawPtr<int> ptr2 = nullptr;
535 EXPECT_TRUE(ptr1 == ptr2);
536
537 CountingRawPtr<int> ptr3 = &foo;
538 EXPECT_TRUE(&foo == ptr3);
539 EXPECT_TRUE(ptr3 == &foo);
540 EXPECT_FALSE(ptr1 == ptr3);
541
542 ptr1 = &foo;
543 EXPECT_TRUE(ptr1 == ptr3);
544 EXPECT_TRUE(ptr3 == ptr1);
545
546 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
547 .get_for_dereference_cnt = 0,
548 .get_for_extraction_cnt = 0,
549 .get_for_comparison_cnt = 12,
550 }),
551 CountersMatch());
552 }
553
TEST_F(RawPtrTest,OperatorNE)554 TEST_F(RawPtrTest, OperatorNE) {
555 int foo;
556 CountingRawPtr<int> ptr1 = nullptr;
557 EXPECT_FALSE(ptr1 != ptr1);
558
559 CountingRawPtr<int> ptr2 = nullptr;
560 EXPECT_FALSE(ptr1 != ptr2);
561
562 CountingRawPtr<int> ptr3 = &foo;
563 EXPECT_FALSE(&foo != ptr3);
564 EXPECT_FALSE(ptr3 != &foo);
565 EXPECT_TRUE(ptr1 != ptr3);
566
567 ptr1 = &foo;
568 EXPECT_FALSE(ptr1 != ptr3);
569 EXPECT_FALSE(ptr3 != ptr1);
570
571 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
572 .get_for_dereference_cnt = 0,
573 .get_for_extraction_cnt = 0,
574 .get_for_comparison_cnt = 12,
575 }),
576 CountersMatch());
577 }
578
TEST_F(RawPtrTest,OperatorEQCast)579 TEST_F(RawPtrTest, OperatorEQCast) {
580 int foo = 42;
581 const int* raw_int_ptr = &foo;
582 volatile void* raw_void_ptr = &foo;
583 CountingRawPtr<volatile int> checked_int_ptr = &foo;
584 CountingRawPtr<const void> checked_void_ptr = &foo;
585 EXPECT_TRUE(checked_int_ptr == checked_int_ptr);
586 EXPECT_TRUE(checked_int_ptr == raw_int_ptr);
587 EXPECT_TRUE(raw_int_ptr == checked_int_ptr);
588 EXPECT_TRUE(checked_void_ptr == checked_void_ptr);
589 EXPECT_TRUE(checked_void_ptr == raw_void_ptr);
590 EXPECT_TRUE(raw_void_ptr == checked_void_ptr);
591 EXPECT_TRUE(checked_int_ptr == checked_void_ptr);
592 EXPECT_TRUE(checked_int_ptr == raw_void_ptr);
593 EXPECT_TRUE(raw_int_ptr == checked_void_ptr);
594 EXPECT_TRUE(checked_void_ptr == checked_int_ptr);
595 EXPECT_TRUE(checked_void_ptr == raw_int_ptr);
596 EXPECT_TRUE(raw_void_ptr == checked_int_ptr);
597 // Make sure that all cases are handled by operator== (faster) and none by the
598 // cast operator (slower).
599 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
600 .get_for_dereference_cnt = 0,
601 .get_for_extraction_cnt = 0,
602 .get_for_comparison_cnt = 16,
603 }),
604 CountersMatch());
605 }
606
TEST_F(RawPtrTest,OperatorEQCastHierarchy)607 TEST_F(RawPtrTest, OperatorEQCastHierarchy) {
608 Derived derived_val(42, 84, 1024);
609 Derived* raw_derived_ptr = &derived_val;
610 const Base1* raw_base1_ptr = &derived_val;
611 volatile Base2* raw_base2_ptr = &derived_val;
612 // Double check the basic understanding of pointers: Even though the numeric
613 // value (i.e. the address) isn't equal, the pointers are still equal. That's
614 // because from derived to base adjusts the address.
615 // raw_ptr must behave the same, which is checked below.
616 ASSERT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
617 reinterpret_cast<uintptr_t>(raw_derived_ptr));
618 ASSERT_TRUE(raw_base2_ptr == raw_derived_ptr);
619
620 CountingRawPtr<const volatile Derived> checked_derived_ptr = &derived_val;
621 CountingRawPtr<volatile Base1> checked_base1_ptr = &derived_val;
622 CountingRawPtr<const Base2> checked_base2_ptr = &derived_val;
623 EXPECT_TRUE(checked_derived_ptr == checked_derived_ptr);
624 EXPECT_TRUE(checked_derived_ptr == raw_derived_ptr);
625 EXPECT_TRUE(raw_derived_ptr == checked_derived_ptr);
626 EXPECT_TRUE(checked_derived_ptr == checked_base1_ptr);
627 EXPECT_TRUE(checked_derived_ptr == raw_base1_ptr);
628 EXPECT_TRUE(raw_derived_ptr == checked_base1_ptr);
629 EXPECT_TRUE(checked_base1_ptr == checked_derived_ptr);
630 EXPECT_TRUE(checked_base1_ptr == raw_derived_ptr);
631 EXPECT_TRUE(raw_base1_ptr == checked_derived_ptr);
632 // |base2_ptr| points to the second base class of |derived|, so will be
633 // located at an offset. While the stored raw uinptr_t values shouldn't match,
634 // ensure that the internal pointer manipulation correctly offsets when
635 // casting up and down the class hierarchy.
636 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
637 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
638 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
639 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
640 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
641 reinterpret_cast<uintptr_t>(raw_derived_ptr));
642 EXPECT_TRUE(checked_derived_ptr == checked_base2_ptr);
643 EXPECT_TRUE(checked_derived_ptr == raw_base2_ptr);
644 EXPECT_TRUE(raw_derived_ptr == checked_base2_ptr);
645 EXPECT_TRUE(checked_base2_ptr == checked_derived_ptr);
646 EXPECT_TRUE(checked_base2_ptr == raw_derived_ptr);
647 EXPECT_TRUE(raw_base2_ptr == checked_derived_ptr);
648 // Make sure that all cases are handled by operator== (faster) and none by the
649 // cast operator (slower).
650 // The 4 extractions come from .get() checks, that compare raw addresses.
651 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
652 .get_for_dereference_cnt = 0,
653 .get_for_extraction_cnt = 4,
654 .get_for_comparison_cnt = 20,
655 }),
656 CountersMatch());
657 }
658
TEST_F(RawPtrTest,OperatorNECast)659 TEST_F(RawPtrTest, OperatorNECast) {
660 int foo = 42;
661 volatile int* raw_int_ptr = &foo;
662 const void* raw_void_ptr = &foo;
663 CountingRawPtr<const int> checked_int_ptr = &foo;
664 CountingRawPtr<volatile void> checked_void_ptr = &foo;
665 EXPECT_FALSE(checked_int_ptr != checked_int_ptr);
666 EXPECT_FALSE(checked_int_ptr != raw_int_ptr);
667 EXPECT_FALSE(raw_int_ptr != checked_int_ptr);
668 EXPECT_FALSE(checked_void_ptr != checked_void_ptr);
669 EXPECT_FALSE(checked_void_ptr != raw_void_ptr);
670 EXPECT_FALSE(raw_void_ptr != checked_void_ptr);
671 EXPECT_FALSE(checked_int_ptr != checked_void_ptr);
672 EXPECT_FALSE(checked_int_ptr != raw_void_ptr);
673 EXPECT_FALSE(raw_int_ptr != checked_void_ptr);
674 EXPECT_FALSE(checked_void_ptr != checked_int_ptr);
675 EXPECT_FALSE(checked_void_ptr != raw_int_ptr);
676 EXPECT_FALSE(raw_void_ptr != checked_int_ptr);
677 // Make sure that all cases are handled by operator== (faster) and none by the
678 // cast operator (slower).
679 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
680 .get_for_dereference_cnt = 0,
681 .get_for_extraction_cnt = 0,
682 .get_for_comparison_cnt = 16,
683 }),
684 CountersMatch());
685 }
686
TEST_F(RawPtrTest,OperatorNECastHierarchy)687 TEST_F(RawPtrTest, OperatorNECastHierarchy) {
688 Derived derived_val(42, 84, 1024);
689 const Derived* raw_derived_ptr = &derived_val;
690 volatile Base1* raw_base1_ptr = &derived_val;
691 const Base2* raw_base2_ptr = &derived_val;
692 CountingRawPtr<volatile Derived> checked_derived_ptr = &derived_val;
693 CountingRawPtr<const Base1> checked_base1_ptr = &derived_val;
694 CountingRawPtr<const volatile Base2> checked_base2_ptr = &derived_val;
695 EXPECT_FALSE(checked_derived_ptr != checked_derived_ptr);
696 EXPECT_FALSE(checked_derived_ptr != raw_derived_ptr);
697 EXPECT_FALSE(raw_derived_ptr != checked_derived_ptr);
698 EXPECT_FALSE(checked_derived_ptr != checked_base1_ptr);
699 EXPECT_FALSE(checked_derived_ptr != raw_base1_ptr);
700 EXPECT_FALSE(raw_derived_ptr != checked_base1_ptr);
701 EXPECT_FALSE(checked_base1_ptr != checked_derived_ptr);
702 EXPECT_FALSE(checked_base1_ptr != raw_derived_ptr);
703 EXPECT_FALSE(raw_base1_ptr != checked_derived_ptr);
704 // |base2_ptr| points to the second base class of |derived|, so will be
705 // located at an offset. While the stored raw uinptr_t values shouldn't match,
706 // ensure that the internal pointer manipulation correctly offsets when
707 // casting up and down the class hierarchy.
708 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
709 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
710 EXPECT_NE(reinterpret_cast<uintptr_t>(raw_base2_ptr),
711 reinterpret_cast<uintptr_t>(checked_derived_ptr.get()));
712 EXPECT_NE(reinterpret_cast<uintptr_t>(checked_base2_ptr.get()),
713 reinterpret_cast<uintptr_t>(raw_derived_ptr));
714 EXPECT_FALSE(checked_derived_ptr != checked_base2_ptr);
715 EXPECT_FALSE(checked_derived_ptr != raw_base2_ptr);
716 EXPECT_FALSE(raw_derived_ptr != checked_base2_ptr);
717 EXPECT_FALSE(checked_base2_ptr != checked_derived_ptr);
718 EXPECT_FALSE(checked_base2_ptr != raw_derived_ptr);
719 EXPECT_FALSE(raw_base2_ptr != checked_derived_ptr);
720 // Make sure that all cases are handled by operator== (faster) and none by the
721 // cast operator (slower).
722 // The 4 extractions come from .get() checks, that compare raw addresses.
723 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
724 .get_for_dereference_cnt = 0,
725 .get_for_extraction_cnt = 4,
726 .get_for_comparison_cnt = 20,
727 }),
728 CountersMatch());
729 }
730
TEST_F(RawPtrTest,Cast)731 TEST_F(RawPtrTest, Cast) {
732 Derived derived_val(42, 84, 1024);
733 raw_ptr<Derived> checked_derived_ptr = &derived_val;
734 Base1* raw_base1_ptr = checked_derived_ptr;
735 EXPECT_EQ(raw_base1_ptr->b1, 42);
736 Base2* raw_base2_ptr = checked_derived_ptr;
737 EXPECT_EQ(raw_base2_ptr->b2, 84);
738
739 Derived* raw_derived_ptr = static_cast<Derived*>(raw_base1_ptr);
740 EXPECT_EQ(raw_derived_ptr->b1, 42);
741 EXPECT_EQ(raw_derived_ptr->b2, 84);
742 EXPECT_EQ(raw_derived_ptr->d, 1024);
743 raw_derived_ptr = static_cast<Derived*>(raw_base2_ptr);
744 EXPECT_EQ(raw_derived_ptr->b1, 42);
745 EXPECT_EQ(raw_derived_ptr->b2, 84);
746 EXPECT_EQ(raw_derived_ptr->d, 1024);
747
748 raw_ptr<Base1> checked_base1_ptr = raw_derived_ptr;
749 EXPECT_EQ(checked_base1_ptr->b1, 42);
750 raw_ptr<Base2> checked_base2_ptr = raw_derived_ptr;
751 EXPECT_EQ(checked_base2_ptr->b2, 84);
752
753 raw_ptr<Derived> checked_derived_ptr2 =
754 static_cast<Derived*>(checked_base1_ptr);
755 EXPECT_EQ(checked_derived_ptr2->b1, 42);
756 EXPECT_EQ(checked_derived_ptr2->b2, 84);
757 EXPECT_EQ(checked_derived_ptr2->d, 1024);
758 checked_derived_ptr2 = static_cast<Derived*>(checked_base2_ptr);
759 EXPECT_EQ(checked_derived_ptr2->b1, 42);
760 EXPECT_EQ(checked_derived_ptr2->b2, 84);
761 EXPECT_EQ(checked_derived_ptr2->d, 1024);
762
763 const Derived* raw_const_derived_ptr = checked_derived_ptr2;
764 EXPECT_EQ(raw_const_derived_ptr->b1, 42);
765 EXPECT_EQ(raw_const_derived_ptr->b2, 84);
766 EXPECT_EQ(raw_const_derived_ptr->d, 1024);
767
768 raw_ptr<const Derived> checked_const_derived_ptr = raw_const_derived_ptr;
769 EXPECT_EQ(checked_const_derived_ptr->b1, 42);
770 EXPECT_EQ(checked_const_derived_ptr->b2, 84);
771 EXPECT_EQ(checked_const_derived_ptr->d, 1024);
772
773 const Derived* raw_const_derived_ptr2 = checked_const_derived_ptr;
774 EXPECT_EQ(raw_const_derived_ptr2->b1, 42);
775 EXPECT_EQ(raw_const_derived_ptr2->b2, 84);
776 EXPECT_EQ(raw_const_derived_ptr2->d, 1024);
777
778 raw_ptr<const Derived> checked_const_derived_ptr2 = raw_derived_ptr;
779 EXPECT_EQ(checked_const_derived_ptr2->b1, 42);
780 EXPECT_EQ(checked_const_derived_ptr2->b2, 84);
781 EXPECT_EQ(checked_const_derived_ptr2->d, 1024);
782
783 raw_ptr<const Derived> checked_const_derived_ptr3 = checked_derived_ptr2;
784 EXPECT_EQ(checked_const_derived_ptr3->b1, 42);
785 EXPECT_EQ(checked_const_derived_ptr3->b2, 84);
786 EXPECT_EQ(checked_const_derived_ptr3->d, 1024);
787
788 volatile Derived* raw_volatile_derived_ptr = checked_derived_ptr2;
789 EXPECT_EQ(raw_volatile_derived_ptr->b1, 42);
790 EXPECT_EQ(raw_volatile_derived_ptr->b2, 84);
791 EXPECT_EQ(raw_volatile_derived_ptr->d, 1024);
792
793 raw_ptr<volatile Derived> checked_volatile_derived_ptr =
794 raw_volatile_derived_ptr;
795 EXPECT_EQ(checked_volatile_derived_ptr->b1, 42);
796 EXPECT_EQ(checked_volatile_derived_ptr->b2, 84);
797 EXPECT_EQ(checked_volatile_derived_ptr->d, 1024);
798
799 void* raw_void_ptr = checked_derived_ptr;
800 raw_ptr<void> checked_void_ptr = raw_derived_ptr;
801 raw_ptr<Derived> checked_derived_ptr3 = static_cast<Derived*>(raw_void_ptr);
802 raw_ptr<Derived> checked_derived_ptr4 =
803 static_cast<Derived*>(checked_void_ptr);
804 EXPECT_EQ(checked_derived_ptr3->b1, 42);
805 EXPECT_EQ(checked_derived_ptr3->b2, 84);
806 EXPECT_EQ(checked_derived_ptr3->d, 1024);
807 EXPECT_EQ(checked_derived_ptr4->b1, 42);
808 EXPECT_EQ(checked_derived_ptr4->b2, 84);
809 EXPECT_EQ(checked_derived_ptr4->d, 1024);
810 }
811
TEST_F(RawPtrTest,UpcastConvertible)812 TEST_F(RawPtrTest, UpcastConvertible) {
813 {
814 Derived derived_val(42, 84, 1024);
815 raw_ptr<Derived> checked_derived_ptr = &derived_val;
816
817 raw_ptr<Base1> checked_base1_ptr(checked_derived_ptr);
818 EXPECT_EQ(checked_base1_ptr->b1, 42);
819 raw_ptr<Base2> checked_base2_ptr(checked_derived_ptr);
820 EXPECT_EQ(checked_base2_ptr->b2, 84);
821
822 checked_base1_ptr = checked_derived_ptr;
823 EXPECT_EQ(checked_base1_ptr->b1, 42);
824 checked_base2_ptr = checked_derived_ptr;
825 EXPECT_EQ(checked_base2_ptr->b2, 84);
826
827 EXPECT_EQ(checked_base1_ptr, checked_derived_ptr);
828 EXPECT_EQ(checked_base2_ptr, checked_derived_ptr);
829 }
830
831 {
832 Derived derived_val(42, 84, 1024);
833 raw_ptr<Derived> checked_derived_ptr1 = &derived_val;
834 raw_ptr<Derived> checked_derived_ptr2 = &derived_val;
835 raw_ptr<Derived> checked_derived_ptr3 = &derived_val;
836 raw_ptr<Derived> checked_derived_ptr4 = &derived_val;
837
838 raw_ptr<Base1> checked_base1_ptr(std::move(checked_derived_ptr1));
839 EXPECT_EQ(checked_base1_ptr->b1, 42);
840 raw_ptr<Base2> checked_base2_ptr(std::move(checked_derived_ptr2));
841 EXPECT_EQ(checked_base2_ptr->b2, 84);
842
843 checked_base1_ptr = std::move(checked_derived_ptr3);
844 EXPECT_EQ(checked_base1_ptr->b1, 42);
845 checked_base2_ptr = std::move(checked_derived_ptr4);
846 EXPECT_EQ(checked_base2_ptr->b2, 84);
847 }
848 }
849
TEST_F(RawPtrTest,UpcastNotConvertible)850 TEST_F(RawPtrTest, UpcastNotConvertible) {
851 class Base {};
852 class Derived : private Base {};
853 class Unrelated {};
854 EXPECT_FALSE((std::is_convertible<raw_ptr<Derived>, raw_ptr<Base>>::value));
855 EXPECT_FALSE((std::is_convertible<raw_ptr<Unrelated>, raw_ptr<Base>>::value));
856 EXPECT_FALSE((std::is_convertible<raw_ptr<Unrelated>, raw_ptr<void>>::value));
857 EXPECT_FALSE((std::is_convertible<raw_ptr<void>, raw_ptr<Unrelated>>::value));
858 EXPECT_FALSE(
859 (std::is_convertible<raw_ptr<int64_t>, raw_ptr<int32_t>>::value));
860 EXPECT_FALSE(
861 (std::is_convertible<raw_ptr<int16_t>, raw_ptr<int32_t>>::value));
862 }
863
TEST_F(RawPtrTest,UpcastPerformance)864 TEST_F(RawPtrTest, UpcastPerformance) {
865 {
866 Derived derived_val(42, 84, 1024);
867 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
868 CountingRawPtr<Base1> checked_base1_ptr(checked_derived_ptr);
869 CountingRawPtr<Base2> checked_base2_ptr(checked_derived_ptr);
870 checked_base1_ptr = checked_derived_ptr;
871 checked_base2_ptr = checked_derived_ptr;
872 }
873
874 {
875 Derived derived_val(42, 84, 1024);
876 CountingRawPtr<Derived> checked_derived_ptr = &derived_val;
877 CountingRawPtr<Base1> checked_base1_ptr(std::move(checked_derived_ptr));
878 CountingRawPtr<Base2> checked_base2_ptr(std::move(checked_derived_ptr));
879 checked_base1_ptr = std::move(checked_derived_ptr);
880 checked_base2_ptr = std::move(checked_derived_ptr);
881 }
882
883 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
884 .get_for_dereference_cnt = 0,
885 .get_for_extraction_cnt = 0,
886 .get_for_comparison_cnt = 0,
887 }),
888 CountersMatch());
889 }
890
TEST_F(RawPtrTest,CustomSwap)891 TEST_F(RawPtrTest, CustomSwap) {
892 int foo1, foo2;
893 CountingRawPtr<int> ptr1(&foo1);
894 CountingRawPtr<int> ptr2(&foo2);
895 // Recommended use pattern.
896 using std::swap;
897 swap(ptr1, ptr2);
898 EXPECT_EQ(ptr1.get(), &foo2);
899 EXPECT_EQ(ptr2.get(), &foo1);
900 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 1);
901 }
902
TEST_F(RawPtrTest,StdSwap)903 TEST_F(RawPtrTest, StdSwap) {
904 int foo1, foo2;
905 CountingRawPtr<int> ptr1(&foo1);
906 CountingRawPtr<int> ptr2(&foo2);
907 std::swap(ptr1, ptr2);
908 EXPECT_EQ(ptr1.get(), &foo2);
909 EXPECT_EQ(ptr2.get(), &foo1);
910 EXPECT_EQ(RawPtrCountingImpl::wrapped_ptr_swap_cnt, 0);
911 }
912
TEST_F(RawPtrTest,PostIncrementOperator)913 TEST_F(RawPtrTest, PostIncrementOperator) {
914 std::vector<int> foo({42, 43, 44, 45});
915 CountingRawPtr<int> ptr = &foo[0];
916 for (int i = 0; i < 4; ++i) {
917 ASSERT_EQ(*ptr++, 42 + i);
918 }
919 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
920 .get_for_dereference_cnt = 4,
921 .get_for_extraction_cnt = 0,
922 .get_for_comparison_cnt = 0,
923 }),
924 CountersMatch());
925 }
926
TEST_F(RawPtrTest,PostDecrementOperator)927 TEST_F(RawPtrTest, PostDecrementOperator) {
928 std::vector<int> foo({42, 43, 44, 45});
929 CountingRawPtr<int> ptr = &foo[3];
930 // Avoid decrementing out of the slot holding the vector's backing store.
931 for (int i = 3; i > 0; --i) {
932 ASSERT_EQ(*ptr--, 42 + i);
933 }
934 ASSERT_EQ(*ptr, 42);
935 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
936 .get_for_dereference_cnt = 4,
937 .get_for_extraction_cnt = 0,
938 .get_for_comparison_cnt = 0,
939 }),
940 CountersMatch());
941 }
942
TEST_F(RawPtrTest,PreIncrementOperator)943 TEST_F(RawPtrTest, PreIncrementOperator) {
944 std::vector<int> foo({42, 43, 44, 45});
945 CountingRawPtr<int> ptr = &foo[0];
946 for (int i = 0; i < 4; ++i, ++ptr) {
947 ASSERT_EQ(*ptr, 42 + i);
948 }
949 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
950 .get_for_dereference_cnt = 4,
951 .get_for_extraction_cnt = 0,
952 .get_for_comparison_cnt = 0,
953 }),
954 CountersMatch());
955 }
956
TEST_F(RawPtrTest,PreDecrementOperator)957 TEST_F(RawPtrTest, PreDecrementOperator) {
958 std::vector<int> foo({42, 43, 44, 45});
959 CountingRawPtr<int> ptr = &foo[3];
960 // Avoid decrementing out of the slot holding the vector's backing store.
961 for (int i = 3; i > 0; --i, --ptr) {
962 ASSERT_EQ(*ptr, 42 + i);
963 }
964 ASSERT_EQ(*ptr, 42);
965 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
966 .get_for_dereference_cnt = 4,
967 .get_for_extraction_cnt = 0,
968 .get_for_comparison_cnt = 0,
969 }),
970 CountersMatch());
971 }
972
TEST_F(RawPtrTest,PlusEqualOperator)973 TEST_F(RawPtrTest, PlusEqualOperator) {
974 std::vector<int> foo({42, 43, 44, 45});
975 CountingRawPtr<int> ptr = &foo[0];
976 for (int i = 0; i < 4; i += 2, ptr += 2) {
977 ASSERT_EQ(*ptr, 42 + i);
978 }
979 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
980 .get_for_dereference_cnt = 2,
981 .get_for_extraction_cnt = 0,
982 .get_for_comparison_cnt = 0,
983 }),
984 CountersMatch());
985 }
986
TEST_F(RawPtrTest,PlusEqualOperatorTypes)987 TEST_F(RawPtrTest, PlusEqualOperatorTypes) {
988 std::vector<int> foo({42, 43, 44, 45});
989 CountingRawPtr<int> ptr = &foo[0];
990 ASSERT_EQ(*ptr, 42);
991 ptr += 2; // Positive literal.
992 ASSERT_EQ(*ptr, 44);
993 ptr -= 2; // Negative literal.
994 ASSERT_EQ(*ptr, 42);
995 ptr += ptrdiff_t{1}; // ptrdiff_t.
996 ASSERT_EQ(*ptr, 43);
997 ptr += size_t{2}; // size_t.
998 ASSERT_EQ(*ptr, 45);
999 }
1000
TEST_F(RawPtrTest,MinusEqualOperator)1001 TEST_F(RawPtrTest, MinusEqualOperator) {
1002 std::vector<int> foo({42, 43, 44, 45});
1003 CountingRawPtr<int> ptr = &foo[3];
1004 ASSERT_EQ(*ptr, 45);
1005 ptr -= 2;
1006 ASSERT_EQ(*ptr, 43);
1007 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1008 .get_for_dereference_cnt = 2,
1009 .get_for_extraction_cnt = 0,
1010 .get_for_comparison_cnt = 0,
1011 }),
1012 CountersMatch());
1013 }
1014
TEST_F(RawPtrTest,MinusEqualOperatorTypes)1015 TEST_F(RawPtrTest, MinusEqualOperatorTypes) {
1016 int foo[] = {42, 43, 44, 45};
1017 CountingRawPtr<int> ptr = &foo[3];
1018 ASSERT_EQ(*ptr, 45);
1019 ptr -= 2; // Positive literal.
1020 ASSERT_EQ(*ptr, 43);
1021 ptr -= -2; // Negative literal.
1022 ASSERT_EQ(*ptr, 45);
1023 ptr -= ptrdiff_t{2}; // ptrdiff_t.
1024 ASSERT_EQ(*ptr, 43);
1025 ptr -= size_t{1}; // size_t.
1026 ASSERT_EQ(*ptr, 42);
1027 }
1028
TEST_F(RawPtrTest,PlusOperator)1029 TEST_F(RawPtrTest, PlusOperator) {
1030 int foo[] = {42, 43, 44, 45};
1031 CountingRawPtr<int> ptr = foo;
1032 for (int i = 0; i < 4; ++i) {
1033 ASSERT_EQ(*(ptr + i), 42 + i);
1034 }
1035 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1036 .get_for_dereference_cnt = 4,
1037 .get_for_extraction_cnt = 0,
1038 .get_for_comparison_cnt = 0,
1039 }),
1040 CountersMatch());
1041 }
1042
TEST_F(RawPtrTest,MinusOperator)1043 TEST_F(RawPtrTest, MinusOperator) {
1044 int foo[] = {42, 43, 44, 45};
1045 CountingRawPtr<int> ptr = &foo[4];
1046 for (int i = 1; i <= 4; ++i) {
1047 ASSERT_EQ(*(ptr - i), 46 - i);
1048 }
1049 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1050 .get_for_dereference_cnt = 4,
1051 .get_for_extraction_cnt = 0,
1052 .get_for_comparison_cnt = 0,
1053 }),
1054 CountersMatch());
1055 }
1056
TEST_F(RawPtrTest,MinusDeltaOperator)1057 TEST_F(RawPtrTest, MinusDeltaOperator) {
1058 int foo[] = {42, 43, 44, 45};
1059 CountingRawPtr<int> ptrs[] = {&foo[0], &foo[1], &foo[2], &foo[3], &foo[4]};
1060 for (int i = 0; i <= 4; ++i) {
1061 for (int j = 0; j <= 4; ++j) {
1062 ASSERT_EQ(ptrs[i] - ptrs[j], i - j);
1063 ASSERT_EQ(ptrs[i] - &foo[j], i - j);
1064 ASSERT_EQ(&foo[i] - ptrs[j], i - j);
1065 }
1066 }
1067 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1068 .get_for_dereference_cnt = 0,
1069 .get_for_extraction_cnt = 0,
1070 .get_for_comparison_cnt = 0,
1071 }),
1072 CountersMatch());
1073 }
1074
TEST_F(RawPtrTest,AdvanceString)1075 TEST_F(RawPtrTest, AdvanceString) {
1076 const char kChars[] = "Hello";
1077 std::string str = kChars;
1078 CountingRawPtr<const char> ptr = str.c_str();
1079 for (size_t i = 0; i < str.size(); ++i, ++ptr) {
1080 ASSERT_EQ(*ptr, kChars[i]);
1081 }
1082 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1083 .get_for_dereference_cnt = 5,
1084 .get_for_extraction_cnt = 0,
1085 .get_for_comparison_cnt = 0,
1086 }),
1087 CountersMatch());
1088 }
1089
TEST_F(RawPtrTest,AssignmentFromNullptr)1090 TEST_F(RawPtrTest, AssignmentFromNullptr) {
1091 CountingRawPtr<int> wrapped_ptr;
1092 wrapped_ptr = nullptr;
1093 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1094 .wrap_raw_ptr_cnt = 0,
1095 .get_for_dereference_cnt = 0,
1096 .get_for_extraction_cnt = 0,
1097 .get_for_comparison_cnt = 0,
1098 }),
1099 CountersMatch());
1100 }
1101
FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr,int * expected_ptr)1102 void FunctionWithRawPtrParameter(raw_ptr<int> actual_ptr, int* expected_ptr) {
1103 EXPECT_EQ(actual_ptr.get(), expected_ptr);
1104 EXPECT_EQ(*actual_ptr, *expected_ptr);
1105 }
1106
1107 // This test checks that raw_ptr<T> can be passed by value into function
1108 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ImplicitlyMovedTemporary)1109 TEST_F(RawPtrTest, FunctionParameters_ImplicitlyMovedTemporary) {
1110 int x = 123;
1111 FunctionWithRawPtrParameter(
1112 raw_ptr<int>(&x), // Temporary that will be moved into the function.
1113 &x);
1114 }
1115
1116 // This test checks that raw_ptr<T> can be passed by value into function
1117 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_ExplicitlyMovedLValue)1118 TEST_F(RawPtrTest, FunctionParameters_ExplicitlyMovedLValue) {
1119 int x = 123;
1120 raw_ptr<int> ptr(&x);
1121 FunctionWithRawPtrParameter(std::move(ptr), &x);
1122 }
1123
1124 // This test checks that raw_ptr<T> can be passed by value into function
1125 // parameters. This is mostly a smoke test for TRIVIAL_ABI attribute.
TEST_F(RawPtrTest,FunctionParameters_Copy)1126 TEST_F(RawPtrTest, FunctionParameters_Copy) {
1127 int x = 123;
1128 raw_ptr<int> ptr(&x);
1129 FunctionWithRawPtrParameter(ptr, // `ptr` will be copied into the function.
1130 &x);
1131 }
1132
TEST_F(RawPtrTest,SetLookupUsesGetForComparison)1133 TEST_F(RawPtrTest, SetLookupUsesGetForComparison) {
1134 int x = 123;
1135 CountingRawPtr<int> ptr(&x);
1136 std::set<CountingRawPtr<int>> set;
1137
1138 RawPtrCountingImpl::ClearCounters();
1139 set.emplace(&x);
1140 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1141 .wrap_raw_ptr_cnt = 1,
1142 // Nothing to compare to yet.
1143 .get_for_dereference_cnt = 0,
1144 .get_for_extraction_cnt = 0,
1145 .get_for_comparison_cnt = 0,
1146 .wrapped_ptr_less_cnt = 0,
1147 }),
1148 CountersMatch());
1149
1150 RawPtrCountingImpl::ClearCounters();
1151 set.emplace(ptr);
1152 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1153 .wrap_raw_ptr_cnt = 0,
1154 .get_for_dereference_cnt = 0,
1155 .get_for_extraction_cnt = 0,
1156 // 2 items to compare to => 4 calls.
1157 .get_for_comparison_cnt = 4,
1158 // 1 element to compare to => 2 calls.
1159 .wrapped_ptr_less_cnt = 2,
1160 }),
1161 CountersMatch());
1162
1163 RawPtrCountingImpl::ClearCounters();
1164 set.count(&x);
1165 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1166 .wrap_raw_ptr_cnt = 0,
1167 .get_for_dereference_cnt = 0,
1168 .get_for_extraction_cnt = 0,
1169 // 2 comparisons => 2 extractions. Less than before, because
1170 // this time a raw pointer is one side of the comparison.
1171 .get_for_comparison_cnt = 2,
1172 // 2 items to compare to => 4 calls.
1173 .wrapped_ptr_less_cnt = 2,
1174 }),
1175 CountersMatch());
1176
1177 RawPtrCountingImpl::ClearCounters();
1178 set.count(ptr);
1179 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1180 .wrap_raw_ptr_cnt = 0,
1181 .get_for_dereference_cnt = 0,
1182 .get_for_extraction_cnt = 0,
1183 // 2 comparisons => 4 extractions.
1184 .get_for_comparison_cnt = 4,
1185 // 2 items to compare to => 4 calls.
1186 .wrapped_ptr_less_cnt = 2,
1187 }),
1188 CountersMatch());
1189 }
1190
TEST_F(RawPtrTest,ComparisonOperatorUsesGetForComparison)1191 TEST_F(RawPtrTest, ComparisonOperatorUsesGetForComparison) {
1192 int x = 123;
1193 CountingRawPtr<int> ptr(&x);
1194
1195 RawPtrCountingImpl::ClearCounters();
1196 EXPECT_FALSE(ptr < ptr);
1197 EXPECT_FALSE(ptr > ptr);
1198 EXPECT_TRUE(ptr <= ptr);
1199 EXPECT_TRUE(ptr >= ptr);
1200 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1201 .wrap_raw_ptr_cnt = 0,
1202 .get_for_dereference_cnt = 0,
1203 .get_for_extraction_cnt = 0,
1204 .get_for_comparison_cnt = 8,
1205 // < is used directly, not std::less().
1206 .wrapped_ptr_less_cnt = 0,
1207 }),
1208 CountersMatch());
1209
1210 RawPtrCountingImpl::ClearCounters();
1211 EXPECT_FALSE(ptr < &x);
1212 EXPECT_FALSE(ptr > &x);
1213 EXPECT_TRUE(ptr <= &x);
1214 EXPECT_TRUE(ptr >= &x);
1215 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1216 .wrap_raw_ptr_cnt = 0,
1217 .get_for_dereference_cnt = 0,
1218 .get_for_extraction_cnt = 0,
1219 .get_for_comparison_cnt = 4,
1220 .wrapped_ptr_less_cnt = 0,
1221 }),
1222 CountersMatch());
1223
1224 RawPtrCountingImpl::ClearCounters();
1225 EXPECT_FALSE(&x < ptr);
1226 EXPECT_FALSE(&x > ptr);
1227 EXPECT_TRUE(&x <= ptr);
1228 EXPECT_TRUE(&x >= ptr);
1229 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1230 .wrap_raw_ptr_cnt = 0,
1231 .get_for_dereference_cnt = 0,
1232 .get_for_extraction_cnt = 0,
1233 .get_for_comparison_cnt = 4,
1234 .wrapped_ptr_less_cnt = 0,
1235 }),
1236 CountersMatch());
1237 }
1238
1239 // Two `raw_ptr`s with different Traits should still hit `GetForComparison()`
1240 // (as opposed to `GetForExtraction()`) in their comparison operators. We use
1241 // `CountingRawPtr` and `CountingRawPtrMayDangle` to contrast two different
1242 // Traits.
TEST_F(RawPtrTest,OperatorsUseGetForComparison)1243 TEST_F(RawPtrTest, OperatorsUseGetForComparison) {
1244 int x = 123;
1245 CountingRawPtr<int> ptr1 = &x;
1246 CountingRawPtrMayDangle<int> ptr2 = &x;
1247
1248 RawPtrCountingImpl::ClearCounters();
1249 RawPtrCountingMayDangleImpl::ClearCounters();
1250
1251 EXPECT_TRUE(ptr1 == ptr2);
1252 EXPECT_FALSE(ptr1 != ptr2);
1253 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1254 .get_for_extraction_cnt = 0,
1255 .get_for_comparison_cnt = 2,
1256 }),
1257 CountersMatch());
1258 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
1259 .get_for_extraction_cnt = 0,
1260 .get_for_comparison_cnt = 2,
1261 }),
1262 CountersMatch());
1263
1264 EXPECT_FALSE(ptr1 < ptr2);
1265 EXPECT_FALSE(ptr1 > ptr2);
1266 EXPECT_TRUE(ptr1 <= ptr2);
1267 EXPECT_TRUE(ptr1 >= ptr2);
1268 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1269 .get_for_extraction_cnt = 0,
1270 .get_for_comparison_cnt = 6,
1271 }),
1272 CountersMatch());
1273 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
1274 .get_for_extraction_cnt = 0,
1275 .get_for_comparison_cnt = 6,
1276 }),
1277 CountersMatch());
1278 }
1279
1280 // This test checks how the std library handles collections like
1281 // std::vector<raw_ptr<T>>.
1282 //
1283 // When this test is written, reallocating std::vector's storage (e.g.
1284 // when growing the vector) requires calling raw_ptr's destructor on the
1285 // old storage (after std::move-ing the data to the new storage). In
1286 // the future we hope that TRIVIAL_ABI (or [trivially_relocatable]]
1287 // proposed by P1144 [1]) will allow memcpy-ing the elements into the
1288 // new storage (without invoking destructors and move constructors
1289 // and/or move assignment operators). At that point, the assert in the
1290 // test should be modified to capture the new, better behavior.
1291 //
1292 // In the meantime, this test serves as a basic correctness test that
1293 // ensures that raw_ptr<T> stored in a std::vector passes basic smoke
1294 // tests.
1295 //
1296 // [1]
1297 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2020/p1144r5.html#wording-attribute
TEST_F(RawPtrTest,TrivialRelocability)1298 TEST_F(RawPtrTest, TrivialRelocability) {
1299 std::vector<CountingRawPtr<int>> vector;
1300 int x = 123;
1301
1302 // See how many times raw_ptr's destructor is called when std::vector
1303 // needs to increase its capacity and reallocate the internal vector
1304 // storage (moving the raw_ptr elements).
1305 RawPtrCountingImpl::ClearCounters();
1306 size_t number_of_capacity_changes = 0;
1307 do {
1308 size_t previous_capacity = vector.capacity();
1309 while (vector.capacity() == previous_capacity) {
1310 vector.emplace_back(&x);
1311 }
1312 number_of_capacity_changes++;
1313 } while (number_of_capacity_changes < 10);
1314 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
1315 BUILDFLAG(USE_ASAN_UNOWNED_PTR) || BUILDFLAG(USE_HOOKABLE_RAW_PTR)
1316 // TODO(lukasza): In the future (once C++ language and std library
1317 // support custom trivially relocatable objects) this #if branch can
1318 // be removed (keeping only the right long-term expectation from the
1319 // #else branch).
1320 EXPECT_NE(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1321 #else
1322 // This is the right long-term expectation.
1323 //
1324 // (This EXPECT_EQ assertion is slightly misleading in
1325 // !USE_BACKUP_REF_PTR mode, because RawPtrNoOpImpl has a default
1326 // destructor that doesn't go through
1327 // RawPtrCountingImpl::ReleaseWrappedPtr. Nevertheless, the spirit of
1328 // the EXPECT_EQ is correct + the assertion should be true in the
1329 // long-term.)
1330 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1331 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) ||
1332 // BUILDFLAG(USE_ASAN_UNOWNED_PTR)
1333
1334 // Basic smoke test that raw_ptr elements in a vector work okay.
1335 for (const auto& elem : vector) {
1336 EXPECT_EQ(elem.get(), &x);
1337 EXPECT_EQ(*elem, x);
1338 }
1339
1340 // Verification that release_wrapped_ptr_cnt does capture how many times the
1341 // destructors are called (e.g. that it is not always zero).
1342 RawPtrCountingImpl::ClearCounters();
1343 size_t number_of_cleared_elements = vector.size();
1344 vector.clear();
1345 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) || \
1346 BUILDFLAG(USE_ASAN_UNOWNED_PTR) || BUILDFLAG(USE_HOOKABLE_RAW_PTR)
1347
1348 EXPECT_EQ((int)number_of_cleared_elements,
1349 RawPtrCountingImpl::release_wrapped_ptr_cnt);
1350 #else
1351 // TODO(lukasza): !USE_BACKUP_REF_PTR / RawPtrNoOpImpl has a default
1352 // destructor that doesn't go through
1353 // RawPtrCountingImpl::ReleaseWrappedPtr. So we can't really depend
1354 // on `g_release_wrapped_ptr_cnt`. This #else branch should be
1355 // deleted once USE_BACKUP_REF_PTR is removed (e.g. once BackupRefPtr
1356 // ships to the Stable channel).
1357 EXPECT_EQ(0, RawPtrCountingImpl::release_wrapped_ptr_cnt);
1358 std::ignore = number_of_cleared_elements;
1359 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) ||
1360 // BUILDFLAG(USE_ASAN_UNOWNED_PTR)
1361 }
1362
1363 struct BaseStruct {
BaseStruct__anon811eace80211::BaseStruct1364 explicit BaseStruct(int a) : a(a) {}
1365 virtual ~BaseStruct() = default;
1366
1367 int a;
1368 };
1369
1370 struct DerivedType1 : public BaseStruct {
DerivedType1__anon811eace80211::DerivedType11371 explicit DerivedType1(int a, int b) : BaseStruct(a), b(b) {}
1372 int b;
1373 };
1374
1375 struct DerivedType2 : public BaseStruct {
DerivedType2__anon811eace80211::DerivedType21376 explicit DerivedType2(int a, int c) : BaseStruct(a), c(c) {}
1377 int c;
1378 };
1379
TEST_F(RawPtrTest,DerivedStructsComparison)1380 TEST_F(RawPtrTest, DerivedStructsComparison) {
1381 DerivedType1 derived_1(42, 84);
1382 raw_ptr<DerivedType1> checked_derived1_ptr = &derived_1;
1383 DerivedType2 derived_2(21, 10);
1384 raw_ptr<DerivedType2> checked_derived2_ptr = &derived_2;
1385
1386 // Make sure that comparing a |DerivedType2*| to a |DerivedType1*| casted
1387 // as a |BaseStruct*| doesn't cause CFI errors.
1388 EXPECT_NE(checked_derived1_ptr,
1389 static_cast<BaseStruct*>(checked_derived2_ptr.get()));
1390 EXPECT_NE(static_cast<BaseStruct*>(checked_derived1_ptr.get()),
1391 checked_derived2_ptr);
1392 }
1393
1394 class PmfTestBase {
1395 public:
MemFunc(char,double) const1396 int MemFunc(char, double) const { return 11; }
1397 };
1398
1399 class PmfTestDerived : public PmfTestBase {
1400 public:
1401 using PmfTestBase::MemFunc;
MemFunc(float,double)1402 int MemFunc(float, double) { return 22; }
1403 };
1404
TEST_F(RawPtrTest,WorksWithOptional)1405 TEST_F(RawPtrTest, WorksWithOptional) {
1406 int x = 0;
1407 absl::optional<raw_ptr<int>> maybe_int;
1408 EXPECT_FALSE(maybe_int.has_value());
1409
1410 maybe_int = nullptr;
1411 ASSERT_TRUE(maybe_int.has_value());
1412 EXPECT_EQ(nullptr, maybe_int.value());
1413
1414 maybe_int = &x;
1415 ASSERT_TRUE(maybe_int.has_value());
1416 EXPECT_EQ(&x, maybe_int.value());
1417 }
1418
TEST_F(RawPtrTest,WorksWithVariant)1419 TEST_F(RawPtrTest, WorksWithVariant) {
1420 int x = 100;
1421 absl::variant<int, raw_ptr<int>> vary;
1422 ASSERT_EQ(0u, vary.index());
1423 EXPECT_EQ(0, absl::get<int>(vary));
1424
1425 vary = x;
1426 ASSERT_EQ(0u, vary.index());
1427 EXPECT_EQ(100, absl::get<int>(vary));
1428
1429 vary = nullptr;
1430 ASSERT_EQ(1u, vary.index());
1431 EXPECT_EQ(nullptr, absl::get<raw_ptr<int>>(vary));
1432
1433 vary = &x;
1434 ASSERT_EQ(1u, vary.index());
1435 EXPECT_EQ(&x, absl::get<raw_ptr<int>>(vary));
1436 }
1437
TEST_F(RawPtrTest,CrossKindConversion)1438 TEST_F(RawPtrTest, CrossKindConversion) {
1439 int x = 123;
1440 CountingRawPtr<int> ptr1 = &x;
1441
1442 RawPtrCountingImpl::ClearCounters();
1443 RawPtrCountingMayDangleImpl::ClearCounters();
1444
1445 CountingRawPtrMayDangle<int> ptr2(ptr1);
1446 CountingRawPtrMayDangle<int> ptr3(std::move(ptr1)); // Falls back to copy.
1447
1448 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1449 .get_for_dereference_cnt = 0,
1450 .get_for_extraction_cnt = 0,
1451 .get_for_duplication_cnt = 2}),
1452 CountersMatch());
1453 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
1454 .wrap_raw_ptr_cnt = 0, .wrap_raw_ptr_for_dup_cnt = 2}),
1455 CountersMatch());
1456 }
1457
TEST_F(RawPtrTest,CrossKindAssignment)1458 TEST_F(RawPtrTest, CrossKindAssignment) {
1459 int x = 123;
1460 CountingRawPtr<int> ptr1 = &x;
1461
1462 RawPtrCountingImpl::ClearCounters();
1463 RawPtrCountingMayDangleImpl::ClearCounters();
1464
1465 CountingRawPtrMayDangle<int> ptr2;
1466 CountingRawPtrMayDangle<int> ptr3;
1467 ptr2 = ptr1;
1468 ptr3 = std::move(ptr1); // Falls back to copy.
1469
1470 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1471 .get_for_dereference_cnt = 0,
1472 .get_for_extraction_cnt = 0,
1473 .get_for_duplication_cnt = 2}),
1474 CountersMatch());
1475 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingMayDangleImpl>{
1476 .wrap_raw_ptr_cnt = 0, .wrap_raw_ptr_for_dup_cnt = 2}),
1477 CountersMatch());
1478 }
1479
1480 // Without the explicitly customized `raw_ptr::to_address()`,
1481 // `base::to_address()` will use the dereference operator. This is not
1482 // what we want; this test enforces extraction semantics for
1483 // `to_address()`.
TEST_F(RawPtrTest,ToAddressDoesNotDereference)1484 TEST_F(RawPtrTest, ToAddressDoesNotDereference) {
1485 CountingRawPtr<int> ptr = nullptr;
1486 int* raw = base::to_address(ptr);
1487 std::ignore = raw;
1488 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
1489 .get_for_dereference_cnt = 0,
1490 .get_for_extraction_cnt = 1,
1491 .get_for_comparison_cnt = 0,
1492 .get_for_duplication_cnt = 0}),
1493 CountersMatch());
1494 }
1495
TEST_F(RawPtrTest,ToAddressGivesBackRawAddress)1496 TEST_F(RawPtrTest, ToAddressGivesBackRawAddress) {
1497 int* raw = nullptr;
1498 raw_ptr<int> miracle = raw;
1499 EXPECT_EQ(base::to_address(raw), base::to_address(miracle));
1500 }
1501
1502 // Verifies that `raw_ptr_experimental` is aliased appropriately.
1503 //
1504 // The `DisableDanglingPtrDetection` trait is arbitrarily chosen and is
1505 // just there to ensure that `raw_ptr_experimental` knows how to field
1506 // the traits template argument.
1507 #if BUILDFLAG(ENABLE_RAW_PTR_EXPERIMENTAL)
1508 static_assert(
1509 std::is_same_v<raw_ptr_experimental<int, DisableDanglingPtrDetection>,
1510 raw_ptr<int, DisableDanglingPtrDetection>>);
1511 static_assert(
1512 std::is_same_v<raw_ptr_experimental<const int, DisableDanglingPtrDetection>,
1513 raw_ptr<const int, DisableDanglingPtrDetection>>);
1514 static_assert(
1515 std::is_same_v<
1516 const raw_ptr_experimental<const int, DisableDanglingPtrDetection>,
1517 const raw_ptr<const int, DisableDanglingPtrDetection>>);
1518 #else // BUILDFLAG(ENABLE_RAW_PTR_EXPERIMENTAL)
1519 // `DisableDanglingPtrDetection` means nothing here and is silently
1520 // ignored.
1521 static_assert(
1522 std::is_same_v<raw_ptr_experimental<int, DisableDanglingPtrDetection>,
1523 int*>);
1524 static_assert(
1525 std::is_same_v<raw_ptr_experimental<const int, DisableDanglingPtrDetection>,
1526 const int*>);
1527 static_assert(
1528 std::is_same_v<
1529 const raw_ptr_experimental<const int, DisableDanglingPtrDetection>,
1530 const int* const>);
1531 #endif // BUILDFLAG(ENABLE_RAW_PTR_EXPERIMENTAL)
1532
1533 } // namespace
1534
1535 namespace base {
1536 namespace internal {
1537
1538 #if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) && \
1539 !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
1540
HandleOOM(size_t unused_size)1541 void HandleOOM(size_t unused_size) {
1542 LOG(FATAL) << "Out of memory";
1543 }
1544
1545 static constexpr partition_alloc::PartitionOptions kOpts = {
1546 partition_alloc::PartitionOptions::AlignedAlloc::kDisallowed,
1547 partition_alloc::PartitionOptions::ThreadCache::kDisabled,
1548 partition_alloc::PartitionOptions::Quarantine::kDisallowed,
1549 partition_alloc::PartitionOptions::Cookie::kAllowed,
1550 partition_alloc::PartitionOptions::BackupRefPtr::kEnabled,
1551 partition_alloc::PartitionOptions::BackupRefPtrZapping::kEnabled,
1552 partition_alloc::PartitionOptions::UseConfigurablePool::kNo,
1553 };
1554
1555 class BackupRefPtrTest : public testing::Test {
1556 protected:
SetUp()1557 void SetUp() override {
1558 // TODO(bartekn): Avoid using PartitionAlloc API directly. Switch to
1559 // new/delete once PartitionAlloc Everywhere is fully enabled.
1560 partition_alloc::PartitionAllocGlobalInit(HandleOOM);
1561 allocator_.init(kOpts);
1562 }
1563
1564 partition_alloc::PartitionAllocator allocator_;
1565 };
1566
TEST_F(BackupRefPtrTest,Basic)1567 TEST_F(BackupRefPtrTest, Basic) {
1568 base::CPU cpu;
1569
1570 int* raw_ptr1 =
1571 reinterpret_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
1572 // Use the actual raw_ptr implementation, not a test substitute, to
1573 // exercise real PartitionAlloc paths.
1574 raw_ptr<int> wrapped_ptr1 = raw_ptr1;
1575
1576 *raw_ptr1 = 42;
1577 EXPECT_EQ(*raw_ptr1, *wrapped_ptr1);
1578
1579 allocator_.root()->Free(raw_ptr1);
1580 #if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1581 // In debug builds, the use-after-free should be caught immediately.
1582 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1583 #else // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1584 if (cpu.has_mte()) {
1585 // If the hardware supports MTE, the use-after-free should also be caught.
1586 EXPECT_DEATH_IF_SUPPORTED(g_volatile_int_to_ignore = *wrapped_ptr1, "");
1587 } else {
1588 // The allocation should be poisoned since there's a raw_ptr alive.
1589 EXPECT_NE(*wrapped_ptr1, 42);
1590 }
1591
1592 // The allocator should not be able to reuse the slot at this point.
1593 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(int), "");
1594 EXPECT_NE(partition_alloc::UntagPtr(raw_ptr1),
1595 partition_alloc::UntagPtr(raw_ptr2));
1596 allocator_.root()->Free(raw_ptr2);
1597
1598 // When the last reference is released, the slot should become reusable.
1599 wrapped_ptr1 = nullptr;
1600 void* raw_ptr3 = allocator_.root()->Alloc(sizeof(int), "");
1601 EXPECT_EQ(partition_alloc::UntagPtr(raw_ptr1),
1602 partition_alloc::UntagPtr(raw_ptr3));
1603 allocator_.root()->Free(raw_ptr3);
1604 #endif // DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
1605 }
1606
TEST_F(BackupRefPtrTest,ZeroSized)1607 TEST_F(BackupRefPtrTest, ZeroSized) {
1608 std::vector<raw_ptr<void>> ptrs;
1609 // Use a reasonable number of elements to fill up the slot span.
1610 for (int i = 0; i < 128 * 1024; ++i) {
1611 // Constructing a raw_ptr instance from a zero-sized allocation should
1612 // not result in a crash.
1613 ptrs.emplace_back(allocator_.root()->Alloc(0, ""));
1614 }
1615 }
1616
TEST_F(BackupRefPtrTest,EndPointer)1617 TEST_F(BackupRefPtrTest, EndPointer) {
1618 // This test requires a fresh partition with an empty free list.
1619 // Check multiple size buckets and levels of slot filling.
1620 for (int size = 0; size < 1024; size += sizeof(void*)) {
1621 // Creating a raw_ptr from an address right past the end of an allocation
1622 // should not result in a crash or corrupt the free list.
1623 char* raw_ptr1 =
1624 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1625 raw_ptr<char, AllowPtrArithmetic> wrapped_ptr = raw_ptr1 + size;
1626 wrapped_ptr = nullptr;
1627 // We need to make two more allocations to turn the possible free list
1628 // corruption into an observable crash.
1629 char* raw_ptr2 =
1630 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1631 char* raw_ptr3 =
1632 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1633
1634 // Similarly for operator+=.
1635 char* raw_ptr4 =
1636 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1637 wrapped_ptr = raw_ptr4;
1638 wrapped_ptr += size;
1639 wrapped_ptr = nullptr;
1640 char* raw_ptr5 =
1641 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1642 char* raw_ptr6 =
1643 reinterpret_cast<char*>(allocator_.root()->Alloc(size, ""));
1644
1645 allocator_.root()->Free(raw_ptr1);
1646 allocator_.root()->Free(raw_ptr2);
1647 allocator_.root()->Free(raw_ptr3);
1648 allocator_.root()->Free(raw_ptr4);
1649 allocator_.root()->Free(raw_ptr5);
1650 allocator_.root()->Free(raw_ptr6);
1651 }
1652 }
1653
TEST_F(BackupRefPtrTest,QuarantinedBytes)1654 TEST_F(BackupRefPtrTest, QuarantinedBytes) {
1655 uint64_t* raw_ptr1 = reinterpret_cast<uint64_t*>(
1656 allocator_.root()->Alloc(sizeof(uint64_t), ""));
1657 raw_ptr<uint64_t> wrapped_ptr1 = raw_ptr1;
1658 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1659 std::memory_order_relaxed),
1660 0U);
1661 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1662 std::memory_order_relaxed),
1663 0U);
1664
1665 // Memory should get quarantined.
1666 allocator_.root()->Free(raw_ptr1);
1667 EXPECT_GT(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1668 std::memory_order_relaxed),
1669 0U);
1670 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1671 std::memory_order_relaxed),
1672 1U);
1673
1674 // Non quarantined free should not effect total_size_of_brp_quarantined_bytes
1675 void* raw_ptr2 = allocator_.root()->Alloc(sizeof(uint64_t), "");
1676 allocator_.root()->Free(raw_ptr2);
1677
1678 // Freeing quarantined memory should bring the size back down to zero.
1679 wrapped_ptr1 = nullptr;
1680 EXPECT_EQ(allocator_.root()->total_size_of_brp_quarantined_bytes.load(
1681 std::memory_order_relaxed),
1682 0U);
1683 EXPECT_EQ(allocator_.root()->total_count_of_brp_quarantined_slots.load(
1684 std::memory_order_relaxed),
1685 0U);
1686 }
1687
RunBackupRefPtrImplAdvanceTest(partition_alloc::PartitionAllocator & allocator,size_t requested_size)1688 void RunBackupRefPtrImplAdvanceTest(
1689 partition_alloc::PartitionAllocator& allocator,
1690 size_t requested_size) {
1691 char* ptr = static_cast<char*>(allocator.root()->Alloc(requested_size, ""));
1692 raw_ptr<char, AllowPtrArithmetic> protected_ptr = ptr;
1693 protected_ptr += 123;
1694 protected_ptr -= 123;
1695 protected_ptr = protected_ptr + 123;
1696 protected_ptr = protected_ptr - 123;
1697 protected_ptr += requested_size / 2;
1698 // end-of-allocation address should not cause an error immediately, but it may
1699 // result in the pointer being poisoned.
1700 protected_ptr = protected_ptr + requested_size / 2;
1701 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1702 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr = ' ', "");
1703 protected_ptr -= 1; // This brings the pointer back within
1704 // bounds, which causes the poison to be removed.
1705 *protected_ptr = ' ';
1706 protected_ptr += 1; // Reposition pointer back past end of allocation.
1707 #endif
1708 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr + 1);
1709 EXPECT_CHECK_DEATH(protected_ptr += 1);
1710 EXPECT_CHECK_DEATH(++protected_ptr);
1711
1712 // Even though |protected_ptr| is already pointing to the end of the
1713 // allocation, assign it explicitly to make sure the underlying implementation
1714 // doesn't "switch" to the next slot.
1715 protected_ptr = ptr + requested_size;
1716 protected_ptr -= requested_size / 2;
1717 protected_ptr = protected_ptr - requested_size / 2;
1718 EXPECT_CHECK_DEATH(protected_ptr = protected_ptr - 1);
1719 EXPECT_CHECK_DEATH(protected_ptr -= 1);
1720 EXPECT_CHECK_DEATH(--protected_ptr);
1721
1722 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1723 // An array type that should be more than a third the size of the available
1724 // memory for the allocation such that incrementing a pointer to this type
1725 // twice causes it to point to a memory location that is too small to fit a
1726 // complete element of this type.
1727 typedef int OverThirdArray[200 / sizeof(int)];
1728 raw_ptr<OverThirdArray> protected_arr_ptr =
1729 reinterpret_cast<OverThirdArray*>(ptr);
1730
1731 protected_arr_ptr++;
1732 **protected_arr_ptr = 4;
1733 protected_arr_ptr++;
1734 EXPECT_DEATH_IF_SUPPORTED(** protected_arr_ptr = 4, "");
1735 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
1736
1737 allocator.root()->Free(ptr);
1738 }
1739
TEST_F(BackupRefPtrTest,Advance)1740 TEST_F(BackupRefPtrTest, Advance) {
1741 // This requires some internal PartitionAlloc knowledge, but for the test to
1742 // work well the allocation + extras have to fill out the entire slot. That's
1743 // because PartitionAlloc doesn't know exact allocation size and bases the
1744 // guards on the slot size.
1745 //
1746 // A power of two is a safe choice for a slot size, then adjust it for extras.
1747 size_t slot_size = 512;
1748 size_t requested_size =
1749 allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
1750 // Verify that we're indeed filling up the slot.
1751 ASSERT_EQ(
1752 requested_size,
1753 allocator_.root()->AllocationCapacityFromRequestedSize(requested_size));
1754 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1755
1756 // We don't have the same worry for single-slot spans, as PartitionAlloc knows
1757 // exactly where the allocation ends.
1758 size_t raw_size = 300003;
1759 ASSERT_GT(raw_size, partition_alloc::internal::MaxRegularSlotSpanSize());
1760 ASSERT_LE(raw_size, partition_alloc::internal::kMaxBucketed);
1761 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
1762 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1763
1764 // Same for direct map.
1765 raw_size = 1001001;
1766 ASSERT_GT(raw_size, partition_alloc::internal::kMaxBucketed);
1767 requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
1768 RunBackupRefPtrImplAdvanceTest(allocator_, requested_size);
1769 }
1770
TEST_F(BackupRefPtrTest,AdvanceAcrossPools)1771 TEST_F(BackupRefPtrTest, AdvanceAcrossPools) {
1772 char array1[1000];
1773 char array2[1000];
1774
1775 char* in_pool_ptr = static_cast<char*>(allocator_.root()->Alloc(123, ""));
1776
1777 raw_ptr<char, AllowPtrArithmetic> protected_ptr = array1;
1778 // Nothing bad happens. Both pointers are outside of the BRP pool, so no
1779 // checks are triggered.
1780 protected_ptr += (array2 - array1);
1781 // A pointer is shifted from outside of the BRP pool into the BRP pool. This
1782 // should trigger death to avoid
1783 EXPECT_CHECK_DEATH(protected_ptr += (in_pool_ptr - array2));
1784
1785 protected_ptr = in_pool_ptr;
1786 // Same when a pointer is shifted from inside the BRP pool out of it.
1787 EXPECT_CHECK_DEATH(protected_ptr += (array1 - in_pool_ptr));
1788
1789 allocator_.root()->Free(in_pool_ptr);
1790 }
1791
TEST_F(BackupRefPtrTest,GetDeltaElems)1792 TEST_F(BackupRefPtrTest, GetDeltaElems) {
1793 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
1794 char* ptr1 = static_cast<char*>(allocator_.root()->Alloc(requested_size, ""));
1795 char* ptr2 = static_cast<char*>(allocator_.root()->Alloc(requested_size, ""));
1796 ASSERT_LT(ptr1, ptr2); // There should be a ref-count between slots.
1797 raw_ptr<char> protected_ptr1 = ptr1;
1798 raw_ptr<char> protected_ptr1_2 = ptr1 + 1;
1799 raw_ptr<char> protected_ptr1_3 = ptr1 + requested_size - 1;
1800 raw_ptr<char> protected_ptr1_4 = ptr1 + requested_size;
1801 raw_ptr<char> protected_ptr2 = ptr2;
1802 raw_ptr<char> protected_ptr2_2 = ptr2 + 1;
1803
1804 EXPECT_EQ(protected_ptr1_2 - protected_ptr1, 1);
1805 EXPECT_EQ(protected_ptr1 - protected_ptr1_2, -1);
1806 EXPECT_EQ(protected_ptr1_3 - protected_ptr1,
1807 checked_cast<ptrdiff_t>(requested_size) - 1);
1808 EXPECT_EQ(protected_ptr1 - protected_ptr1_3,
1809 -checked_cast<ptrdiff_t>(requested_size) + 1);
1810 EXPECT_EQ(protected_ptr1_4 - protected_ptr1,
1811 checked_cast<ptrdiff_t>(requested_size));
1812 EXPECT_EQ(protected_ptr1 - protected_ptr1_4,
1813 -checked_cast<ptrdiff_t>(requested_size));
1814 #if BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1815 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1);
1816 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2);
1817 EXPECT_CHECK_DEATH(protected_ptr2 - protected_ptr1_4);
1818 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2);
1819 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1);
1820 EXPECT_CHECK_DEATH(protected_ptr1 - protected_ptr2_2);
1821 EXPECT_CHECK_DEATH(protected_ptr2_2 - protected_ptr1_4);
1822 EXPECT_CHECK_DEATH(protected_ptr1_4 - protected_ptr2_2);
1823 #endif // BUILDFLAG(ENABLE_POINTER_SUBTRACTION_CHECK)
1824 EXPECT_EQ(protected_ptr2_2 - protected_ptr2, 1);
1825 EXPECT_EQ(protected_ptr2 - protected_ptr2_2, -1);
1826
1827 allocator_.root()->Free(ptr1);
1828 allocator_.root()->Free(ptr2);
1829 }
1830
IsQuarantineEmpty(partition_alloc::PartitionAllocator & allocator)1831 bool IsQuarantineEmpty(partition_alloc::PartitionAllocator& allocator) {
1832 return allocator.root()->total_size_of_brp_quarantined_bytes.load(
1833 std::memory_order_relaxed) == 0;
1834 }
1835
1836 struct BoundRawPtrTestHelper {
Createbase::internal::BoundRawPtrTestHelper1837 static BoundRawPtrTestHelper* Create(
1838 partition_alloc::PartitionAllocator& allocator) {
1839 return new (allocator.root()->Alloc(sizeof(BoundRawPtrTestHelper), ""))
1840 BoundRawPtrTestHelper(allocator);
1841 }
1842
BoundRawPtrTestHelperbase::internal::BoundRawPtrTestHelper1843 explicit BoundRawPtrTestHelper(partition_alloc::PartitionAllocator& allocator)
1844 : owning_allocator(allocator),
1845 once_callback(
1846 BindOnce(&BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1847 Unretained(this))),
1848 repeating_callback(BindRepeating(
1849 &BoundRawPtrTestHelper::DeleteItselfAndCheckIfInQuarantine,
1850 Unretained(this))) {}
1851
DeleteItselfAndCheckIfInQuarantinebase::internal::BoundRawPtrTestHelper1852 void DeleteItselfAndCheckIfInQuarantine() {
1853 auto& allocator = *owning_allocator;
1854 EXPECT_TRUE(IsQuarantineEmpty(allocator));
1855
1856 // Since we use a non-default partition, `delete` has to be simulated.
1857 this->~BoundRawPtrTestHelper();
1858 allocator.root()->Free(this);
1859
1860 EXPECT_FALSE(IsQuarantineEmpty(allocator));
1861 }
1862
1863 const raw_ref<partition_alloc::PartitionAllocator> owning_allocator;
1864 OnceClosure once_callback;
1865 RepeatingClosure repeating_callback;
1866 };
1867
1868 // Check that bound callback arguments remain protected by BRP for the
1869 // entire duration of a callback invocation.
TEST_F(BackupRefPtrTest,Bind)1870 TEST_F(BackupRefPtrTest, Bind) {
1871 // This test requires a separate partition; otherwise, unrelated allocations
1872 // might interfere with `IsQuarantineEmpty`.
1873 auto* object_for_once_callback1 = BoundRawPtrTestHelper::Create(allocator_);
1874 std::move(object_for_once_callback1->once_callback).Run();
1875 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1876
1877 auto* object_for_repeating_callback1 =
1878 BoundRawPtrTestHelper::Create(allocator_);
1879 std::move(object_for_repeating_callback1->repeating_callback).Run();
1880 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1881
1882 // `RepeatingCallback` has both lvalue and rvalue versions of `Run`.
1883 auto* object_for_repeating_callback2 =
1884 BoundRawPtrTestHelper::Create(allocator_);
1885 object_for_repeating_callback2->repeating_callback.Run();
1886 EXPECT_TRUE(IsQuarantineEmpty(allocator_));
1887 }
1888
1889 #if PA_CONFIG(REF_COUNT_CHECK_COOKIE)
TEST_F(BackupRefPtrTest,ReinterpretCast)1890 TEST_F(BackupRefPtrTest, ReinterpretCast) {
1891 void* ptr = allocator_.root()->Alloc(16, "");
1892 allocator_.root()->Free(ptr);
1893
1894 raw_ptr<void>* wrapped_ptr = reinterpret_cast<raw_ptr<void>*>(&ptr);
1895 // The reference count cookie check should detect that the allocation has
1896 // been already freed.
1897 BASE_EXPECT_DEATH(*wrapped_ptr = nullptr, "");
1898 }
1899 #endif
1900
1901 namespace {
1902
1903 // Install dangling raw_ptr handlers and restore them when going out of scope.
1904 class ScopedInstallDanglingRawPtrChecks {
1905 public:
ScopedInstallDanglingRawPtrChecks()1906 ScopedInstallDanglingRawPtrChecks() {
1907 enabled_feature_list_.InitWithFeaturesAndParameters(
1908 {{features::kPartitionAllocDanglingPtr, {{"mode", "crash"}}}},
1909 {/* disabled_features */});
1910 old_detected_fn_ = partition_alloc::GetDanglingRawPtrDetectedFn();
1911 old_dereferenced_fn_ = partition_alloc::GetDanglingRawPtrReleasedFn();
1912 allocator::InstallDanglingRawPtrChecks();
1913 }
~ScopedInstallDanglingRawPtrChecks()1914 ~ScopedInstallDanglingRawPtrChecks() {
1915 partition_alloc::SetDanglingRawPtrDetectedFn(old_detected_fn_);
1916 partition_alloc::SetDanglingRawPtrReleasedFn(old_dereferenced_fn_);
1917 }
1918
1919 private:
1920 test::ScopedFeatureList enabled_feature_list_;
1921 partition_alloc::DanglingRawPtrDetectedFn* old_detected_fn_;
1922 partition_alloc::DanglingRawPtrReleasedFn* old_dereferenced_fn_;
1923 };
1924
1925 } // namespace
1926
TEST_F(BackupRefPtrTest,RawPtrMayDangle)1927 TEST_F(BackupRefPtrTest, RawPtrMayDangle) {
1928 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1929
1930 void* ptr = allocator_.root()->Alloc(16, "");
1931 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr = ptr;
1932 allocator_.root()->Free(ptr); // No dangling raw_ptr reported.
1933 dangling_ptr = nullptr; // No dangling raw_ptr reported.
1934 }
1935
TEST_F(BackupRefPtrTest,RawPtrNotDangling)1936 TEST_F(BackupRefPtrTest, RawPtrNotDangling) {
1937 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1938
1939 void* ptr = allocator_.root()->Alloc(16, "");
1940 raw_ptr<void> dangling_ptr = ptr;
1941 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
1942 !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
1943 BASE_EXPECT_DEATH(
1944 {
1945 allocator_.root()->Free(ptr); // Dangling raw_ptr detected.
1946 dangling_ptr = nullptr; // Dangling raw_ptr released.
1947 },
1948 AllOf(HasSubstr("Detected dangling raw_ptr"),
1949 HasSubstr("The memory was freed at:"),
1950 HasSubstr("The dangling raw_ptr was released at:")));
1951 #else
1952 allocator_.root()->Free(ptr);
1953 dangling_ptr = nullptr;
1954 #endif
1955 }
1956
1957 // Check the comparator operators work, even across raw_ptr with different
1958 // dangling policies.
TEST_F(BackupRefPtrTest,DanglingPtrComparison)1959 TEST_F(BackupRefPtrTest, DanglingPtrComparison) {
1960 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1961
1962 void* ptr_1 = allocator_.root()->Alloc(16, "");
1963 void* ptr_2 = allocator_.root()->Alloc(16, "");
1964
1965 if (ptr_1 > ptr_2) {
1966 std::swap(ptr_1, ptr_2);
1967 }
1968
1969 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_1 = ptr_1;
1970 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr_2 = ptr_2;
1971 raw_ptr<void> not_dangling_ptr_1 = ptr_1;
1972 raw_ptr<void> not_dangling_ptr_2 = ptr_2;
1973
1974 EXPECT_EQ(dangling_ptr_1, not_dangling_ptr_1);
1975 EXPECT_EQ(dangling_ptr_2, not_dangling_ptr_2);
1976 EXPECT_NE(dangling_ptr_1, not_dangling_ptr_2);
1977 EXPECT_NE(dangling_ptr_2, not_dangling_ptr_1);
1978 EXPECT_LT(dangling_ptr_1, not_dangling_ptr_2);
1979 EXPECT_GT(dangling_ptr_2, not_dangling_ptr_1);
1980 EXPECT_LT(not_dangling_ptr_1, dangling_ptr_2);
1981 EXPECT_GT(not_dangling_ptr_2, dangling_ptr_1);
1982
1983 not_dangling_ptr_1 = nullptr;
1984 not_dangling_ptr_2 = nullptr;
1985
1986 allocator_.root()->Free(ptr_1);
1987 allocator_.root()->Free(ptr_2);
1988 }
1989
1990 // Check the assignment operator works, even across raw_ptr with different
1991 // dangling policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrAssignment)1992 TEST_F(BackupRefPtrTest, DanglingPtrAssignment) {
1993 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
1994
1995 void* ptr = allocator_.root()->Alloc(16, "");
1996
1997 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr;
1998 raw_ptr<void> not_dangling_ptr;
1999
2000 not_dangling_ptr = ptr;
2001 dangling_ptr = not_dangling_ptr;
2002 not_dangling_ptr = nullptr;
2003
2004 allocator_.root()->Free(ptr);
2005
2006 dangling_ptr = nullptr;
2007 }
2008
2009 // Check the copy constructor works, even across raw_ptr with different dangling
2010 // policies (only `not dangling` -> `dangling` direction is supported).
TEST_F(BackupRefPtrTest,DanglingPtrCopyContructor)2011 TEST_F(BackupRefPtrTest, DanglingPtrCopyContructor) {
2012 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2013
2014 void* ptr = allocator_.root()->Alloc(16, "");
2015
2016 raw_ptr<void> not_dangling_ptr(ptr);
2017 raw_ptr<void, DisableDanglingPtrDetection> dangling_ptr(not_dangling_ptr);
2018
2019 not_dangling_ptr = nullptr;
2020 dangling_ptr = nullptr;
2021
2022 allocator_.root()->Free(ptr);
2023 }
2024
TEST_F(BackupRefPtrTest,RawPtrExtractAsDangling)2025 TEST_F(BackupRefPtrTest, RawPtrExtractAsDangling) {
2026 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2027
2028 raw_ptr<int> ptr =
2029 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2030 allocator_.root()->Free(
2031 ptr.ExtractAsDangling()); // No dangling raw_ptr reported.
2032 EXPECT_EQ(ptr, nullptr);
2033 }
2034
TEST_F(BackupRefPtrTest,RawPtrDeleteWithoutExtractAsDangling)2035 TEST_F(BackupRefPtrTest, RawPtrDeleteWithoutExtractAsDangling) {
2036 ScopedInstallDanglingRawPtrChecks enable_dangling_raw_ptr_checks;
2037
2038 raw_ptr<int> ptr =
2039 static_cast<int*>(allocator_.root()->Alloc(sizeof(int), ""));
2040 #if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
2041 !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
2042 BASE_EXPECT_DEATH(
2043 {
2044 allocator_.root()->Free(ptr.get()); // Dangling raw_ptr detected.
2045 ptr = nullptr; // Dangling raw_ptr released.
2046 },
2047 AllOf(HasSubstr("Detected dangling raw_ptr"),
2048 HasSubstr("The memory was freed at:"),
2049 HasSubstr("The dangling raw_ptr was released at:")));
2050 #else
2051 allocator_.root()->Free(ptr.get());
2052 ptr = nullptr;
2053 #endif // BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS) && \
2054 // !BUILDFLAG(ENABLE_DANGLING_RAW_PTR_PERF_EXPERIMENT)
2055 }
2056
TEST_F(BackupRefPtrTest,SpatialAlgoCompat)2057 TEST_F(BackupRefPtrTest, SpatialAlgoCompat) {
2058 size_t slot_size = 512;
2059 size_t requested_size =
2060 allocator_.root()->AdjustSizeForExtrasSubtract(slot_size);
2061 // Verify that we're indeed filling up the slot.
2062 ASSERT_EQ(
2063 requested_size,
2064 allocator_.root()->AllocationCapacityFromRequestedSize(requested_size));
2065 size_t requested_elements = requested_size / sizeof(int);
2066
2067 int* ptr =
2068 reinterpret_cast<int*>(allocator_.root()->Alloc(requested_size, ""));
2069 int* ptr_end = ptr + requested_elements;
2070
2071 CountingRawPtr<int> protected_ptr = ptr;
2072 CountingRawPtr<int> protected_ptr_end = protected_ptr + requested_elements;
2073
2074 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
2075 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr_end = 1, "");
2076 #endif
2077
2078 RawPtrCountingImpl::ClearCounters();
2079
2080 int gen_val = 1;
2081 std::generate(protected_ptr, protected_ptr_end, [&gen_val]() {
2082 gen_val ^= gen_val + 1;
2083 return gen_val;
2084 });
2085
2086 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
2087 .get_for_dereference_cnt = requested_elements,
2088 .get_for_extraction_cnt = 0,
2089 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2090 }),
2091 CountersMatch());
2092
2093 RawPtrCountingImpl::ClearCounters();
2094
2095 for (CountingRawPtr<int> protected_ptr_i = protected_ptr;
2096 protected_ptr_i < protected_ptr_end; protected_ptr_i++) {
2097 *protected_ptr_i ^= *protected_ptr_i + 1;
2098 }
2099
2100 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
2101 .get_for_dereference_cnt = requested_elements * 2,
2102 .get_for_extraction_cnt = 0,
2103 .get_for_comparison_cnt = (requested_elements + 1) * 2,
2104 }),
2105 CountersMatch());
2106
2107 RawPtrCountingImpl::ClearCounters();
2108
2109 for (CountingRawPtr<int> protected_ptr_i = protected_ptr;
2110 protected_ptr_i < ptr_end; protected_ptr_i++) {
2111 *protected_ptr_i ^= *protected_ptr_i + 1;
2112 }
2113
2114 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
2115 .get_for_dereference_cnt = requested_elements * 2,
2116 .get_for_extraction_cnt = 0,
2117 .get_for_comparison_cnt = requested_elements + 1,
2118 }),
2119 CountersMatch());
2120
2121 RawPtrCountingImpl::ClearCounters();
2122
2123 for (int* ptr_i = ptr; ptr_i < protected_ptr_end; ptr_i++) {
2124 *ptr_i ^= *ptr_i + 1;
2125 }
2126
2127 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
2128 .get_for_dereference_cnt = 0,
2129 .get_for_extraction_cnt = 0,
2130 .get_for_comparison_cnt = requested_elements + 1,
2131 }),
2132 CountersMatch());
2133
2134 RawPtrCountingImpl::ClearCounters();
2135
2136 size_t iter_cnt = 0;
2137 for (int *ptr_i = protected_ptr, *ptr_i_end = protected_ptr_end;
2138 ptr_i < ptr_i_end; ptr_i++) {
2139 *ptr_i ^= *ptr_i + 1;
2140 iter_cnt++;
2141 }
2142 EXPECT_EQ(iter_cnt, requested_elements);
2143
2144 EXPECT_THAT((CountingRawPtrExpectations<RawPtrCountingImpl>{
2145 .get_for_dereference_cnt = 0,
2146 .get_for_extraction_cnt = 2,
2147 .get_for_comparison_cnt = 0,
2148 }),
2149 CountersMatch());
2150
2151 allocator_.root()->Free(ptr);
2152 }
2153
2154 #if BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
TEST_F(BackupRefPtrTest,Duplicate)2155 TEST_F(BackupRefPtrTest, Duplicate) {
2156 size_t requested_size = allocator_.root()->AdjustSizeForExtrasSubtract(512);
2157 char* ptr = static_cast<char*>(allocator_.root()->Alloc(requested_size, ""));
2158 raw_ptr<char> protected_ptr1 = ptr;
2159 protected_ptr1 += requested_size; // Pointer should now be poisoned.
2160
2161 // Duplicating a poisoned pointer should be allowed.
2162 raw_ptr<char> protected_ptr2 = protected_ptr1;
2163
2164 // The poison bit should be propagated to the duplicate such that the OOB
2165 // access is disallowed:
2166 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr2 = ' ', "");
2167
2168 // Assignment from a poisoned pointer should be allowed.
2169 raw_ptr<char> protected_ptr3;
2170 protected_ptr3 = protected_ptr1;
2171
2172 // The poison bit should be propagated via the assignment such that the OOB
2173 // access is disallowed:
2174 EXPECT_DEATH_IF_SUPPORTED(*protected_ptr3 = ' ', "");
2175
2176 allocator_.root()->Free(ptr);
2177 }
2178 #endif // BUILDFLAG(BACKUP_REF_PTR_POISON_OOB_PTR)
2179
2180 #if BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
TEST_F(BackupRefPtrTest,WriteAfterFree)2181 TEST_F(BackupRefPtrTest, WriteAfterFree) {
2182 constexpr uint64_t kPayload = 0x1234567890ABCDEF;
2183
2184 raw_ptr<uint64_t> ptr =
2185 static_cast<uint64_t*>(allocator_.root()->Alloc(sizeof(uint64_t), ""));
2186
2187 // Now |ptr| should be quarantined.
2188 allocator_.root()->Free(ptr);
2189
2190 EXPECT_DEATH_IF_SUPPORTED(
2191 {
2192 // Write something different from |kQuarantinedByte|.
2193 *ptr = kPayload;
2194 // Write-after-Free should lead to crash
2195 // on |PartitionAllocFreeForRefCounting|.
2196 ptr = nullptr;
2197 },
2198 "");
2199 }
2200 #endif // BUILDFLAG(PA_EXPENSIVE_DCHECKS_ARE_ON)
2201
2202 namespace {
2203 constexpr uint8_t kCustomQuarantineByte = 0xff;
2204 static_assert(kCustomQuarantineByte !=
2205 partition_alloc::internal::kQuarantinedByte);
2206
CustomQuarantineHook(void * address,size_t size)2207 void CustomQuarantineHook(void* address, size_t size) {
2208 partition_alloc::internal::SecureMemset(address, kCustomQuarantineByte, size);
2209 }
2210 } // namespace
2211
TEST_F(BackupRefPtrTest,QuarantineHook)2212 TEST_F(BackupRefPtrTest, QuarantineHook) {
2213 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(
2214 CustomQuarantineHook);
2215 uint8_t* native_ptr =
2216 static_cast<uint8_t*>(allocator_.root()->Alloc(sizeof(uint8_t), ""));
2217 *native_ptr = 0;
2218 {
2219 raw_ptr<uint8_t> smart_ptr = native_ptr;
2220
2221 allocator_.root()->Free(smart_ptr);
2222 // Access the allocation through the native pointer to avoid triggering
2223 // dereference checks in debug builds.
2224 EXPECT_EQ(*partition_alloc::internal::TagPtr(native_ptr),
2225 kCustomQuarantineByte);
2226
2227 // Leaving |smart_ptr| filled with |kCustomQuarantineByte| can
2228 // cause a crash because we have a DCHECK that expects it to be filled with
2229 // |kQuarantineByte|. We need to ensure it is unquarantined before
2230 // unregistering the hook.
2231 } // <- unquarantined here
2232
2233 partition_alloc::PartitionAllocHooks::SetQuarantineOverrideHook(nullptr);
2234 }
2235
2236 #endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT) &&
2237 // !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
2238
2239 #if BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2240
2241 namespace {
2242 #define FOR_EACH_RAW_PTR_OPERATION(F) \
2243 F(wrap_ptr) \
2244 F(release_wrapped_ptr) \
2245 F(safely_unwrap_for_dereference) \
2246 F(safely_unwrap_for_extraction) \
2247 F(unsafely_unwrap_for_comparison) \
2248 F(advance) \
2249 F(duplicate)
2250
2251 // Can't use gMock to count the number of invocations because
2252 // gMock itself triggers raw_ptr<T> operations.
2253 struct CountingHooks {
ResetCountsbase::internal::__anon811eace80611::CountingHooks2254 void ResetCounts() {
2255 #define F(name) name##_count = 0;
2256 FOR_EACH_RAW_PTR_OPERATION(F)
2257 #undef F
2258 }
2259
Getbase::internal::__anon811eace80611::CountingHooks2260 static CountingHooks* Get() {
2261 static thread_local CountingHooks instance;
2262 return &instance;
2263 }
2264
2265 // The adapter method is templated to accept any number of arguments.
2266 #define F(name) \
2267 template <typename... T> \
2268 static void name##_adapter(T...) { \
2269 Get()->name##_count++; \
2270 } \
2271 size_t name##_count = 0;
2272 FOR_EACH_RAW_PTR_OPERATION(F)
2273 #undef F
2274 };
2275
2276 constexpr RawPtrHooks raw_ptr_hooks{
2277 #define F(name) .name = CountingHooks::name##_adapter,
2278 FOR_EACH_RAW_PTR_OPERATION(F)
2279 #undef F
2280 };
2281 } // namespace
2282
2283 class HookableRawPtrImplTest : public testing::Test {
2284 protected:
SetUp()2285 void SetUp() override { InstallRawPtrHooks(&raw_ptr_hooks); }
TearDown()2286 void TearDown() override { ResetRawPtrHooks(); }
2287 };
2288
TEST_F(HookableRawPtrImplTest,WrapPtr)2289 TEST_F(HookableRawPtrImplTest, WrapPtr) {
2290 // Can't call `ResetCounts` in `SetUp` because gTest triggers
2291 // raw_ptr<T> operations between `SetUp` and the test body.
2292 CountingHooks::Get()->ResetCounts();
2293 {
2294 int* ptr = new int;
2295 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2296 delete ptr;
2297 }
2298 EXPECT_EQ(CountingHooks::Get()->wrap_ptr_count, 1u);
2299 }
2300
TEST_F(HookableRawPtrImplTest,ReleaseWrappedPtr)2301 TEST_F(HookableRawPtrImplTest, ReleaseWrappedPtr) {
2302 CountingHooks::Get()->ResetCounts();
2303 {
2304 int* ptr = new int;
2305 [[maybe_unused]] raw_ptr<int> interesting_ptr = ptr;
2306 delete ptr;
2307 }
2308 EXPECT_EQ(CountingHooks::Get()->release_wrapped_ptr_count, 1u);
2309 }
2310
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForDereference)2311 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForDereference) {
2312 CountingHooks::Get()->ResetCounts();
2313 {
2314 int* ptr = new int;
2315 raw_ptr<int> interesting_ptr = ptr;
2316 *interesting_ptr = 1;
2317 delete ptr;
2318 }
2319 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_dereference_count, 1u);
2320 }
2321
TEST_F(HookableRawPtrImplTest,SafelyUnwrapForExtraction)2322 TEST_F(HookableRawPtrImplTest, SafelyUnwrapForExtraction) {
2323 CountingHooks::Get()->ResetCounts();
2324 {
2325 int* ptr = new int;
2326 raw_ptr<int> interesting_ptr = ptr;
2327 ptr = interesting_ptr;
2328 delete ptr;
2329 }
2330 EXPECT_EQ(CountingHooks::Get()->safely_unwrap_for_extraction_count, 1u);
2331 }
2332
TEST_F(HookableRawPtrImplTest,UnsafelyUnwrapForComparison)2333 TEST_F(HookableRawPtrImplTest, UnsafelyUnwrapForComparison) {
2334 CountingHooks::Get()->ResetCounts();
2335 {
2336 int* ptr = new int;
2337 raw_ptr<int> interesting_ptr = ptr;
2338 EXPECT_EQ(interesting_ptr, ptr);
2339 delete ptr;
2340 }
2341 EXPECT_EQ(CountingHooks::Get()->unsafely_unwrap_for_comparison_count, 1u);
2342 }
2343
TEST_F(HookableRawPtrImplTest,Advance)2344 TEST_F(HookableRawPtrImplTest, Advance) {
2345 CountingHooks::Get()->ResetCounts();
2346 {
2347 int* ptr = new int[10];
2348 raw_ptr<int, AllowPtrArithmetic> interesting_ptr = ptr;
2349 interesting_ptr += 1;
2350 delete[] ptr;
2351 }
2352 EXPECT_EQ(CountingHooks::Get()->advance_count, 1u);
2353 }
2354
TEST_F(HookableRawPtrImplTest,Duplicate)2355 TEST_F(HookableRawPtrImplTest, Duplicate) {
2356 CountingHooks::Get()->ResetCounts();
2357 {
2358 int* ptr = new int;
2359 raw_ptr<int> interesting_ptr = ptr;
2360 raw_ptr<int> interesting_ptr2 = interesting_ptr;
2361 delete ptr;
2362 }
2363 EXPECT_EQ(CountingHooks::Get()->duplicate_count, 1u);
2364 }
2365
2366 #endif // BUILDFLAG(USE_HOOKABLE_RAW_PTR)
2367
TEST(DanglingPtrTest,DetectAndReset)2368 TEST(DanglingPtrTest, DetectAndReset) {
2369 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2370 if (!instrumentation.has_value()) {
2371 GTEST_SKIP() << instrumentation.error();
2372 }
2373
2374 auto owned_ptr = std::make_unique<int>(42);
2375 raw_ptr<int> dangling_ptr = owned_ptr.get();
2376 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2377 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2378 owned_ptr.reset();
2379 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2380 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2381 dangling_ptr = nullptr;
2382 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2383 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2384 }
2385
TEST(DanglingPtrTest,DetectAndDestructor)2386 TEST(DanglingPtrTest, DetectAndDestructor) {
2387 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2388 if (!instrumentation.has_value()) {
2389 GTEST_SKIP() << instrumentation.error();
2390 }
2391
2392 auto owned_ptr = std::make_unique<int>(42);
2393 {
2394 [[maybe_unused]] raw_ptr<int> dangling_ptr = owned_ptr.get();
2395 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2396 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2397 owned_ptr.reset();
2398 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2399 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2400 }
2401 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2402 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2403 }
2404
TEST(DanglingPtrTest,DetectResetAndDestructor)2405 TEST(DanglingPtrTest, DetectResetAndDestructor) {
2406 auto instrumentation = test::DanglingPtrInstrumentation::Create();
2407 if (!instrumentation.has_value()) {
2408 GTEST_SKIP() << instrumentation.error();
2409 }
2410
2411 auto owned_ptr = std::make_unique<int>(42);
2412 {
2413 raw_ptr<int> dangling_ptr = owned_ptr.get();
2414 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 0u);
2415 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2416 owned_ptr.reset();
2417 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2418 EXPECT_EQ(instrumentation->dangling_ptr_released(), 0u);
2419 dangling_ptr = nullptr;
2420 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2421 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2422 }
2423 EXPECT_EQ(instrumentation->dangling_ptr_detected(), 1u);
2424 EXPECT_EQ(instrumentation->dangling_ptr_released(), 1u);
2425 }
2426
2427 } // namespace internal
2428 } // namespace base
2429