• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include <cstdint>
6 
7 #if !defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
8 
9 #include "partition_alloc/starscan/pcscan.h"
10 
11 #include "build/build_config.h"
12 #include "partition_alloc/partition_alloc-inl.h"
13 #include "partition_alloc/partition_alloc_base/bits.h"
14 #include "partition_alloc/partition_alloc_base/compiler_specific.h"
15 #include "partition_alloc/partition_alloc_base/cpu.h"
16 #include "partition_alloc/partition_alloc_base/logging.h"
17 #include "partition_alloc/partition_alloc_buildflags.h"
18 #include "partition_alloc/partition_alloc_config.h"
19 #include "partition_alloc/partition_alloc_constants.h"
20 #include "partition_alloc/partition_alloc_for_testing.h"
21 #include "partition_alloc/partition_root.h"
22 #include "partition_alloc/starscan/stack/stack.h"
23 #include "partition_alloc/tagging.h"
24 #include "testing/gtest/include/gtest/gtest.h"
25 
26 #if BUILDFLAG(USE_STARSCAN)
27 
28 namespace partition_alloc::internal {
29 
30 namespace {
31 
32 struct DisableStackScanningScope final {
DisableStackScanningScopepartition_alloc::internal::__anon8c19f2850111::DisableStackScanningScope33   DisableStackScanningScope() {
34     if (PCScan::IsStackScanningEnabled()) {
35       PCScan::DisableStackScanning();
36       changed_ = true;
37     }
38   }
~DisableStackScanningScopepartition_alloc::internal::__anon8c19f2850111::DisableStackScanningScope39   ~DisableStackScanningScope() {
40     if (changed_) {
41       PCScan::EnableStackScanning();
42     }
43   }
44 
45  private:
46   bool changed_ = false;
47 };
48 
49 }  // namespace
50 
51 class PartitionAllocPCScanTestBase : public testing::Test {
52  public:
PartitionAllocPCScanTestBase()53   PartitionAllocPCScanTestBase()
54       : allocator_([]() {
55           PartitionOptions opts;
56           opts.aligned_alloc = PartitionOptions::kAllowed;
57           opts.star_scan_quarantine = PartitionOptions::kAllowed;
58           opts.memory_tagging = {
59               .enabled = base::CPU::GetInstanceNoAllocation().has_mte()
60                              ? partition_alloc::PartitionOptions::kEnabled
61                              : partition_alloc::PartitionOptions::kDisabled};
62           return opts;
63         }()) {
__anon8c19f2850302(size_t) 64     PartitionAllocGlobalInit([](size_t) { PA_LOG(FATAL) << "Out of memory"; });
65     // Previous test runs within the same process decommit pools, therefore
66     // we need to make sure that the card table is recommitted for each run.
67     PCScan::ReinitForTesting(
68         {PCScan::InitConfig::WantedWriteProtectionMode::kDisabled,
69          PCScan::InitConfig::SafepointMode::kEnabled});
70     allocator_.root()->UncapEmptySlotSpanMemoryForTesting();
71     allocator_.root()->SwitchToDenserBucketDistribution();
72 
73     PCScan::RegisterScannableRoot(allocator_.root());
74   }
75 
~PartitionAllocPCScanTestBase()76   ~PartitionAllocPCScanTestBase() override {
77     allocator_.root()->PurgeMemory(PurgeFlags::kDecommitEmptySlotSpans |
78                                    PurgeFlags::kDiscardUnusedSystemPages);
79     PartitionAllocGlobalUninitForTesting();
80   }
81 
RunPCScan()82   void RunPCScan() {
83     PCScan::Instance().PerformScan(PCScan::InvocationMode::kBlocking);
84   }
85 
SchedulePCScan()86   void SchedulePCScan() {
87     PCScan::Instance().PerformScan(
88         PCScan::InvocationMode::kScheduleOnlyForTesting);
89   }
90 
JoinPCScanAsMutator()91   void JoinPCScanAsMutator() {
92     auto& instance = PCScan::Instance();
93     PA_CHECK(instance.IsJoinable());
94     instance.JoinScan();
95   }
96 
FinishPCScanAsScanner()97   void FinishPCScanAsScanner() { PCScan::FinishScanForTesting(); }
98 
IsInQuarantine(void * object) const99   bool IsInQuarantine(void* object) const {
100     uintptr_t slot_start = root().ObjectToSlotStart(object);
101     return StateBitmapFromAddr(slot_start)->IsQuarantined(slot_start);
102   }
103 
root()104   PartitionRoot& root() { return *allocator_.root(); }
root() const105   const PartitionRoot& root() const { return *allocator_.root(); }
106 
107  private:
108   // Leverage the already-templated version outside `internal::`.
109   partition_alloc::PartitionAllocatorAllowLeaksForTesting allocator_;
110 };
111 
112 namespace {
113 
114 // The test that expects free() being quarantined only when tag overflow occurs.
115 using PartitionAllocPCScanWithMTETest = PartitionAllocPCScanTestBase;
116 
117 // The test that expects every free() being quarantined.
118 class PartitionAllocPCScanTest : public PartitionAllocPCScanTestBase {
119  public:
PartitionAllocPCScanTest()120   PartitionAllocPCScanTest() { root().SetQuarantineAlwaysForTesting(true); }
~PartitionAllocPCScanTest()121   ~PartitionAllocPCScanTest() override {
122     root().SetQuarantineAlwaysForTesting(false);
123   }
124 };
125 
126 using SlotSpan = PartitionRoot::SlotSpan;
127 
128 struct FullSlotSpanAllocation {
129   SlotSpan* slot_span;
130   void* first;
131   void* last;
132 };
133 
134 // Assumes heap is purged.
GetFullSlotSpan(PartitionRoot & root,size_t object_size)135 FullSlotSpanAllocation GetFullSlotSpan(PartitionRoot& root,
136                                        size_t object_size) {
137   PA_CHECK(0u == root.get_total_size_of_committed_pages());
138 
139   const size_t raw_size = root.AdjustSizeForExtrasAdd(object_size);
140   const size_t bucket_index =
141       root.SizeToBucketIndex(raw_size, root.GetBucketDistribution());
142   PartitionRoot::Bucket& bucket = root.buckets[bucket_index];
143   const size_t num_slots = (bucket.get_bytes_per_span()) / bucket.slot_size;
144 
145   uintptr_t first = 0;
146   uintptr_t last = 0;
147   for (size_t i = 0; i < num_slots; ++i) {
148     void* ptr = root.Alloc<partition_alloc::AllocFlags::kNoHooks>(object_size);
149     EXPECT_TRUE(ptr);
150     if (i == 0) {
151       first = root.ObjectToSlotStart(ptr);
152     } else if (i == num_slots - 1) {
153       last = root.ObjectToSlotStart(ptr);
154     }
155   }
156 
157   EXPECT_EQ(SlotSpan::FromSlotStart(first), SlotSpan::FromSlotStart(last));
158   if (bucket.num_system_pages_per_slot_span ==
159       NumSystemPagesPerPartitionPage()) {
160     // Pointers are expected to be in the same partition page, but have a
161     // different MTE-tag.
162     EXPECT_EQ(UntagAddr(first & PartitionPageBaseMask()),
163               UntagAddr(last & PartitionPageBaseMask()));
164   }
165   EXPECT_EQ(num_slots, bucket.active_slot_spans_head->num_allocated_slots);
166   EXPECT_EQ(nullptr, bucket.active_slot_spans_head->get_freelist_head());
167   EXPECT_TRUE(bucket.is_valid());
168   EXPECT_TRUE(bucket.active_slot_spans_head !=
169               SlotSpan::get_sentinel_slot_span());
170 
171   return {bucket.active_slot_spans_head, root.SlotStartToObject(first),
172           root.SlotStartToObject(last)};
173 }
174 
IsInFreeList(uintptr_t slot_start)175 bool IsInFreeList(uintptr_t slot_start) {
176   // slot_start isn't MTE-tagged, whereas pointers in the freelist are.
177   void* slot_start_tagged = SlotStartAddr2Ptr(slot_start);
178   auto* slot_span = SlotSpan::FromSlotStart(slot_start);
179   for (auto* entry = slot_span->get_freelist_head(); entry;
180        entry = entry->GetNext(slot_span->bucket->slot_size)) {
181     if (entry == slot_start_tagged) {
182       return true;
183     }
184   }
185   return false;
186 }
187 
188 struct ListBase {
189   // Volatile to prevent the compiler from doing dead store elimination.
190   ListBase* volatile next = nullptr;
191 };
192 
193 template <size_t Size, size_t Alignment = 0>
194 struct List final : ListBase {
195   char buffer[Size];
196 
Createpartition_alloc::internal::__anon8c19f2850411::List197   static List* Create(PartitionRoot& root, ListBase* next = nullptr) {
198     List* list;
199     if (Alignment) {
200       list = static_cast<List*>(root.AlignedAlloc(Alignment, sizeof(List)));
201     } else {
202       list = static_cast<List*>(root.Alloc(sizeof(List), nullptr));
203     }
204     list->next = next;
205     return list;
206   }
207 
Destroypartition_alloc::internal::__anon8c19f2850411::List208   static void Destroy(PartitionRoot& root, List* list) { root.Free(list); }
209 };
210 
__anon8c19f2850502() 211 constexpr auto kPartitionOptionWithStarScan = []() {
212   PartitionOptions opts;
213   opts.star_scan_quarantine = PartitionOptions::kAllowed;
214   return opts;
215 }();
216 
TEST_F(PartitionAllocPCScanTest,ArbitraryObjectInQuarantine)217 TEST_F(PartitionAllocPCScanTest, ArbitraryObjectInQuarantine) {
218   using ListType = List<8>;
219 
220   auto* obj1 = ListType::Create(root());
221   auto* obj2 = ListType::Create(root());
222   EXPECT_FALSE(IsInQuarantine(obj1));
223   EXPECT_FALSE(IsInQuarantine(obj2));
224 
225   ListType::Destroy(root(), obj2);
226   EXPECT_FALSE(IsInQuarantine(obj1));
227   EXPECT_TRUE(IsInQuarantine(obj2));
228 }
229 
TEST_F(PartitionAllocPCScanTest,FirstObjectInQuarantine)230 TEST_F(PartitionAllocPCScanTest, FirstObjectInQuarantine) {
231   static constexpr size_t kAllocationSize = 16;
232 
233   FullSlotSpanAllocation full_slot_span =
234       GetFullSlotSpan(root(), kAllocationSize);
235   EXPECT_FALSE(IsInQuarantine(full_slot_span.first));
236 
237   root().Free<FreeFlags::kNoHooks>(full_slot_span.first);
238   EXPECT_TRUE(IsInQuarantine(full_slot_span.first));
239 }
240 
TEST_F(PartitionAllocPCScanTest,LastObjectInQuarantine)241 TEST_F(PartitionAllocPCScanTest, LastObjectInQuarantine) {
242   static constexpr size_t kAllocationSize = 16;
243 
244   FullSlotSpanAllocation full_slot_span =
245       GetFullSlotSpan(root(), kAllocationSize);
246   EXPECT_FALSE(IsInQuarantine(full_slot_span.last));
247 
248   root().Free<FreeFlags::kNoHooks>(full_slot_span.last);
249   EXPECT_TRUE(IsInQuarantine(full_slot_span.last));
250 }
251 
252 template <typename SourceList, typename ValueList>
TestDanglingReference(PartitionAllocPCScanTest & test,SourceList * source,ValueList * value,PartitionRoot & value_root)253 void TestDanglingReference(PartitionAllocPCScanTest& test,
254                            SourceList* source,
255                            ValueList* value,
256                            PartitionRoot& value_root) {
257   {
258     // Free |value| and leave the dangling reference in |source|.
259     ValueList::Destroy(value_root, value);
260     // Check that |value| is in the quarantine now.
261     EXPECT_TRUE(test.IsInQuarantine(value));
262     // Run PCScan.
263     test.RunPCScan();
264     // Check that the object is still quarantined since it's referenced by
265     // |source|.
266     EXPECT_TRUE(test.IsInQuarantine(value));
267   }
268   {
269     // Get rid of the dangling reference.
270     source->next = nullptr;
271     // Run PCScan again.
272     test.RunPCScan();
273     // Check that the object is no longer in the quarantine.
274     EXPECT_FALSE(test.IsInQuarantine(value));
275     // Check that the object is in the freelist now.
276     EXPECT_TRUE(IsInFreeList(value_root.ObjectToSlotStart(value)));
277   }
278 }
279 
TestDanglingReferenceNotVisited(PartitionAllocPCScanTest & test,void * value,PartitionRoot & value_root)280 void TestDanglingReferenceNotVisited(PartitionAllocPCScanTest& test,
281                                      void* value,
282                                      PartitionRoot& value_root) {
283   value_root.Free(value);
284   // Check that |value| is in the quarantine now.
285   EXPECT_TRUE(test.IsInQuarantine(value));
286   // Run PCScan.
287   test.RunPCScan();
288   // Check that the object is no longer in the quarantine since the pointer to
289   // it was not scanned from the non-scannable partition.
290   EXPECT_FALSE(test.IsInQuarantine(value));
291   // Check that the object is in the freelist now.
292   EXPECT_TRUE(IsInFreeList(value_root.ObjectToSlotStart(value)));
293 }
294 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceSameBucket)295 TEST_F(PartitionAllocPCScanTest, DanglingReferenceSameBucket) {
296   using SourceList = List<8>;
297   using ValueList = SourceList;
298 
299   // Create two objects, where |source| references |value|.
300   auto* value = ValueList::Create(root(), nullptr);
301   auto* source = SourceList::Create(root(), value);
302 
303   TestDanglingReference(*this, source, value, root());
304 }
305 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceDifferentBuckets)306 TEST_F(PartitionAllocPCScanTest, DanglingReferenceDifferentBuckets) {
307   using SourceList = List<8>;
308   using ValueList = List<128>;
309 
310   // Create two objects, where |source| references |value|.
311   auto* value = ValueList::Create(root(), nullptr);
312   auto* source = SourceList::Create(root(), value);
313 
314   TestDanglingReference(*this, source, value, root());
315 }
316 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceDifferentBucketsAligned)317 TEST_F(PartitionAllocPCScanTest, DanglingReferenceDifferentBucketsAligned) {
318   // Choose a high alignment that almost certainly will cause a gap between slot
319   // spans. But make it less than kMaxSupportedAlignment, or else two
320   // allocations will end up on different super pages.
321   constexpr size_t alignment = kMaxSupportedAlignment / 2;
322   using SourceList = List<8, alignment>;
323   using ValueList = List<128, alignment>;
324 
325   // Create two objects, where |source| references |value|.
326   auto* value = ValueList::Create(root(), nullptr);
327   auto* source = SourceList::Create(root(), value);
328 
329   // Double check the setup -- make sure that exactly two slot spans were
330   // allocated, within the same super page, with a gap in between.
331   {
332     ::partition_alloc::internal::ScopedGuard guard{root().lock_};
333 
334     uintptr_t value_slot_start = root().ObjectToSlotStart(value);
335     uintptr_t source_slot_start = root().ObjectToSlotStart(source);
336     auto super_page = value_slot_start & kSuperPageBaseMask;
337     ASSERT_EQ(super_page, source_slot_start & kSuperPageBaseMask);
338     size_t i = 0;
339     uintptr_t first_slot_span_end = 0;
340     uintptr_t second_slot_span_start = 0;
341     IterateSlotSpans(super_page, true, [&](SlotSpan* slot_span) -> bool {
342       if (i == 0) {
343         first_slot_span_end =
344             SlotSpan::ToSlotSpanStart(slot_span) +
345             slot_span->bucket->get_pages_per_slot_span() * PartitionPageSize();
346       } else {
347         second_slot_span_start = SlotSpan::ToSlotSpanStart(slot_span);
348       }
349       ++i;
350       return false;
351     });
352     ASSERT_EQ(i, 2u);
353     ASSERT_GT(second_slot_span_start, first_slot_span_end);
354   }
355 
356   TestDanglingReference(*this, source, value, root());
357 }
358 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceSameSlotSpanButDifferentPages)359 TEST_F(PartitionAllocPCScanTest,
360        DanglingReferenceSameSlotSpanButDifferentPages) {
361   using SourceList = List<8>;
362   using ValueList = SourceList;
363 
364   static const size_t kObjectSizeForSlotSpanConsistingOfMultiplePartitionPages =
365       static_cast<size_t>(PartitionPageSize() * 0.75);
366 
367   FullSlotSpanAllocation full_slot_span = GetFullSlotSpan(
368       root(), root().AdjustSizeForExtrasSubtract(
369                   kObjectSizeForSlotSpanConsistingOfMultiplePartitionPages));
370 
371   // Assert that the first and the last objects are in the same slot span but on
372   // different partition pages.
373   // Converting to slot start also takes care of the MTE-tag difference.
374   ASSERT_EQ(SlotSpan::FromObject(full_slot_span.first),
375             SlotSpan::FromObject(full_slot_span.last));
376   uintptr_t first_slot_start = root().ObjectToSlotStart(full_slot_span.first);
377   uintptr_t last_slot_start = root().ObjectToSlotStart(full_slot_span.last);
378   ASSERT_NE(first_slot_start & PartitionPageBaseMask(),
379             last_slot_start & PartitionPageBaseMask());
380 
381   // Create two objects, on different partition pages.
382   auto* value = new (full_slot_span.first) ValueList;
383   auto* source = new (full_slot_span.last) SourceList;
384   source->next = value;
385 
386   TestDanglingReference(*this, source, value, root());
387 }
388 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceFromFullPage)389 TEST_F(PartitionAllocPCScanTest, DanglingReferenceFromFullPage) {
390   using SourceList = List<64>;
391   using ValueList = SourceList;
392 
393   FullSlotSpanAllocation full_slot_span =
394       GetFullSlotSpan(root(), sizeof(SourceList));
395   void* source_buffer = full_slot_span.first;
396   // This allocation must go through the slow path and call SetNewActivePage(),
397   // which will flush the full page from the active page list.
398   void* value_buffer =
399       root().Alloc<partition_alloc::AllocFlags::kNoHooks>(sizeof(ValueList));
400 
401   // Assert that the first and the last objects are in different slot spans but
402   // in the same bucket.
403   SlotSpan* source_slot_span =
404       PartitionRoot::SlotSpan::FromObject(source_buffer);
405   SlotSpan* value_slot_span = PartitionRoot::SlotSpan::FromObject(value_buffer);
406   ASSERT_NE(source_slot_span, value_slot_span);
407   ASSERT_EQ(source_slot_span->bucket, value_slot_span->bucket);
408 
409   // Create two objects, where |source| is in a full detached page.
410   auto* value = new (value_buffer) ValueList;
411   auto* source = new (source_buffer) SourceList;
412   source->next = value;
413 
414   TestDanglingReference(*this, source, value, root());
415 }
416 
417 template <size_t Size>
418 struct ListWithInnerReference {
419   char buffer1[Size];
420   // Volatile to prevent the compiler from doing dead store elimination.
421   char* volatile next = nullptr;
422   char buffer2[Size];
423 
Createpartition_alloc::internal::__anon8c19f2850411::ListWithInnerReference424   static ListWithInnerReference* Create(PartitionRoot& root) {
425     auto* list = static_cast<ListWithInnerReference*>(
426         root.Alloc(sizeof(ListWithInnerReference), nullptr));
427     return list;
428   }
429 
Destroypartition_alloc::internal::__anon8c19f2850411::ListWithInnerReference430   static void Destroy(PartitionRoot& root, ListWithInnerReference* list) {
431     root.Free(list);
432   }
433 };
434 
435 // Disabled due to consistent failure http://crbug.com/1242407
436 #if BUILDFLAG(IS_ANDROID)
437 #define MAYBE_DanglingInnerReference DISABLED_DanglingInnerReference
438 #else
439 #define MAYBE_DanglingInnerReference DanglingInnerReference
440 #endif
TEST_F(PartitionAllocPCScanTest,MAYBE_DanglingInnerReference)441 TEST_F(PartitionAllocPCScanTest, MAYBE_DanglingInnerReference) {
442   using SourceList = ListWithInnerReference<64>;
443   using ValueList = SourceList;
444 
445   auto* source = SourceList::Create(root());
446   auto* value = ValueList::Create(root());
447   source->next = value->buffer2;
448 
449   TestDanglingReference(*this, source, value, root());
450 }
451 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceFromSingleSlotSlotSpan)452 TEST_F(PartitionAllocPCScanTest, DanglingReferenceFromSingleSlotSlotSpan) {
453   using SourceList = List<kMaxBucketed - 4096>;
454   using ValueList = SourceList;
455 
456   auto* source = SourceList::Create(root());
457   auto* slot_span = SlotSpanMetadata::FromObject(source);
458   ASSERT_TRUE(slot_span->CanStoreRawSize());
459 
460   auto* value = ValueList::Create(root());
461   source->next = value;
462 
463   TestDanglingReference(*this, source, value, root());
464 }
465 
TEST_F(PartitionAllocPCScanTest,DanglingInterPartitionReference)466 TEST_F(PartitionAllocPCScanTest, DanglingInterPartitionReference) {
467   using SourceList = List<64>;
468   using ValueList = SourceList;
469 
470   PartitionRoot source_root(kPartitionOptionWithStarScan);
471   source_root.UncapEmptySlotSpanMemoryForTesting();
472   PartitionRoot value_root(kPartitionOptionWithStarScan);
473   value_root.UncapEmptySlotSpanMemoryForTesting();
474 
475   PCScan::RegisterScannableRoot(&source_root);
476   source_root.SetQuarantineAlwaysForTesting(true);
477   PCScan::RegisterScannableRoot(&value_root);
478   value_root.SetQuarantineAlwaysForTesting(true);
479 
480   auto* source = SourceList::Create(source_root);
481   auto* value = ValueList::Create(value_root);
482   source->next = value;
483 
484   TestDanglingReference(*this, source, value, value_root);
485 }
486 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceToNonScannablePartition)487 TEST_F(PartitionAllocPCScanTest, DanglingReferenceToNonScannablePartition) {
488   using SourceList = List<64>;
489   using ValueList = SourceList;
490 
491   PartitionRoot source_root(kPartitionOptionWithStarScan);
492   source_root.UncapEmptySlotSpanMemoryForTesting();
493   PartitionRoot value_root(kPartitionOptionWithStarScan);
494   value_root.UncapEmptySlotSpanMemoryForTesting();
495 
496   PCScan::RegisterScannableRoot(&source_root);
497   source_root.SetQuarantineAlwaysForTesting(true);
498   PCScan::RegisterNonScannableRoot(&value_root);
499   value_root.SetQuarantineAlwaysForTesting(true);
500 
501   auto* source = SourceList::Create(source_root);
502   auto* value = ValueList::Create(value_root);
503   source->next = value;
504 
505   TestDanglingReference(*this, source, value, value_root);
506 }
507 
TEST_F(PartitionAllocPCScanTest,DanglingReferenceFromNonScannablePartition)508 TEST_F(PartitionAllocPCScanTest, DanglingReferenceFromNonScannablePartition) {
509   using SourceList = List<64>;
510   using ValueList = SourceList;
511 
512   PartitionRoot source_root(kPartitionOptionWithStarScan);
513   source_root.UncapEmptySlotSpanMemoryForTesting();
514   PartitionRoot value_root(kPartitionOptionWithStarScan);
515   value_root.UncapEmptySlotSpanMemoryForTesting();
516 
517   PCScan::RegisterNonScannableRoot(&source_root);
518   value_root.SetQuarantineAlwaysForTesting(true);
519   PCScan::RegisterScannableRoot(&value_root);
520   source_root.SetQuarantineAlwaysForTesting(true);
521 
522   auto* source = SourceList::Create(source_root);
523   auto* value = ValueList::Create(value_root);
524   source->next = value;
525 
526   TestDanglingReferenceNotVisited(*this, value, value_root);
527 }
528 
529 // Death tests misbehave on Android, http://crbug.com/643760.
530 #if defined(GTEST_HAS_DEATH_TEST) && !BUILDFLAG(IS_ANDROID)
531 #if PA_CONFIG(STARSCAN_EAGER_DOUBLE_FREE_DETECTION_ENABLED)
TEST_F(PartitionAllocPCScanTest,DoubleFree)532 TEST_F(PartitionAllocPCScanTest, DoubleFree) {
533   auto* list = List<1>::Create(root());
534   List<1>::Destroy(root(), list);
535   EXPECT_DEATH(List<1>::Destroy(root(), list), "");
536 }
537 #endif  // PA_CONFIG(STARSCAN_EAGER_DOUBLE_FREE_DETECTION_ENABLED)
538 #endif  // defined(GTEST_HAS_DEATH_TEST) && !BUILDFLAG(IS_ANDROID)
539 
540 template <typename SourceList, typename ValueList>
TestDanglingReferenceWithSafepoint(PartitionAllocPCScanTest & test,SourceList * source,ValueList * value,PartitionRoot & value_root)541 void TestDanglingReferenceWithSafepoint(PartitionAllocPCScanTest& test,
542                                         SourceList* source,
543                                         ValueList* value,
544                                         PartitionRoot& value_root) {
545   {
546     // Free |value| and leave the dangling reference in |source|.
547     ValueList::Destroy(value_root, value);
548     // Check that |value| is in the quarantine now.
549     EXPECT_TRUE(test.IsInQuarantine(value));
550     // Schedule PCScan but don't scan.
551     test.SchedulePCScan();
552     // Enter safepoint and scan from mutator.
553     test.JoinPCScanAsMutator();
554     // Check that the object is still quarantined since it's referenced by
555     // |source|.
556     EXPECT_TRUE(test.IsInQuarantine(value));
557     // Check that |value| is not in the freelist.
558     EXPECT_FALSE(IsInFreeList(test.root().ObjectToSlotStart(value)));
559     // Run sweeper.
560     test.FinishPCScanAsScanner();
561     // Check that |value| still exists.
562     EXPECT_FALSE(IsInFreeList(test.root().ObjectToSlotStart(value)));
563   }
564   {
565     // Get rid of the dangling reference.
566     source->next = nullptr;
567     // Schedule PCScan but don't scan.
568     test.SchedulePCScan();
569     // Enter safepoint and scan from mutator.
570     test.JoinPCScanAsMutator();
571     // Check that |value| is not in the freelist yet, since sweeper didn't run.
572     EXPECT_FALSE(IsInFreeList(test.root().ObjectToSlotStart(value)));
573     test.FinishPCScanAsScanner();
574     // Check that the object is no longer in the quarantine.
575     EXPECT_FALSE(test.IsInQuarantine(value));
576     // Check that |value| is in the freelist now.
577     EXPECT_TRUE(IsInFreeList(test.root().ObjectToSlotStart(value)));
578   }
579 }
580 
TEST_F(PartitionAllocPCScanTest,Safepoint)581 TEST_F(PartitionAllocPCScanTest, Safepoint) {
582   using SourceList = List<64>;
583   using ValueList = SourceList;
584 
585   DisableStackScanningScope no_stack_scanning;
586 
587   auto* source = SourceList::Create(root());
588   auto* value = ValueList::Create(root());
589   source->next = value;
590 
591   TestDanglingReferenceWithSafepoint(*this, source, value, root());
592 }
593 
594 class PartitionAllocPCScanStackScanningTest : public PartitionAllocPCScanTest {
595  protected:
596   // Creates and sets a dangling reference in `dangling_reference_`.
CreateDanglingReference()597   PA_NOINLINE void CreateDanglingReference() {
598     using ValueList = List<8>;
599     auto* value = ValueList::Create(root(), nullptr);
600     ValueList::Destroy(root(), value);
601     dangling_reference_ = value;
602   }
603 
SetupAndRunTest()604   PA_NOINLINE void SetupAndRunTest() {
605     // Register the top of the stack to be the current pointer.
606     PCScan::NotifyThreadCreated(GetStackPointer());
607     RunTest();
608   }
609 
RunTest()610   PA_NOINLINE void RunTest() {
611     // This writes the pointer to the stack.
612     [[maybe_unused]] auto* volatile stack_ref = dangling_reference_;
613     // Call the non-inline function that would scan the stack. Don't execute
614     // the rest of the actions inside the function, since otherwise it would
615     // be tail-call optimized and the parent frame's stack with the dangling
616     // pointer would be missed.
617     ScanStack();
618     // Check that the object is still quarantined since it's referenced by
619     // |dangling_reference_|.
620     EXPECT_TRUE(IsInQuarantine(dangling_reference_));
621     // Check that value is not in the freelist.
622     EXPECT_FALSE(IsInFreeList(root().ObjectToSlotStart(dangling_reference_)));
623     // Run sweeper.
624     FinishPCScanAsScanner();
625     // Check that |dangling_reference_| still exists.
626     EXPECT_FALSE(IsInFreeList(root().ObjectToSlotStart(dangling_reference_)));
627   }
628 
ScanStack()629   PA_NOINLINE void ScanStack() {
630     // Schedule PCScan but don't scan.
631     SchedulePCScan();
632     // Enter safepoint and scan from mutator. This will scan the stack.
633     JoinPCScanAsMutator();
634   }
635 
636   static void* dangling_reference_;
637 };
638 
639 // static
640 void* PartitionAllocPCScanStackScanningTest::dangling_reference_ = nullptr;
641 
642 // The test currently fails on some platform due to the stack dangling reference
643 // not being found.
TEST_F(PartitionAllocPCScanStackScanningTest,DISABLED_StackScanning)644 TEST_F(PartitionAllocPCScanStackScanningTest, DISABLED_StackScanning) {
645   PCScan::EnableStackScanning();
646 
647   // Set to nullptr if the test is retried.
648   dangling_reference_ = nullptr;
649 
650   CreateDanglingReference();
651 
652   SetupAndRunTest();
653 }
654 
TEST_F(PartitionAllocPCScanTest,DontScanUnusedRawSize)655 TEST_F(PartitionAllocPCScanTest, DontScanUnusedRawSize) {
656   using ValueList = List<8>;
657 
658   // Make sure to commit more memory than requested to have slack for storing
659   // dangling reference outside of the raw size.
660   const size_t big_size = kMaxBucketed - SystemPageSize() + 1;
661   void* ptr = root().Alloc(big_size);
662 
663   uintptr_t slot_start = root().ObjectToSlotStart(ptr);
664   auto* slot_span = SlotSpanMetadata::FromSlotStart(slot_start);
665   ASSERT_TRUE(big_size + sizeof(void*) <=
666               root().AllocationCapacityFromSlotStart(slot_start));
667   ASSERT_TRUE(slot_span->CanStoreRawSize());
668 
669   auto* value = ValueList::Create(root());
670 
671   // This not only points past the object, but past all extras around it.
672   // However, there should be enough space between this and the end of slot, to
673   // store some data.
674   uintptr_t source_end =
675       slot_start +
676       base::bits::AlignUp(slot_span->GetRawSize(), alignof(ValueList*));
677   // Write the pointer.
678   // Since we stripped the MTE-tag to get |slot_start|, we need to retag it.
679   *static_cast<ValueList**>(TagAddr(source_end)) = value;
680 
681   TestDanglingReferenceNotVisited(*this, value, root());
682 }
683 
TEST_F(PartitionAllocPCScanTest,PointersToGuardPages)684 TEST_F(PartitionAllocPCScanTest, PointersToGuardPages) {
685   struct Pointers {
686     void* super_page;
687     void* metadata_page;
688     void* guard_page1;
689     void* scan_bitmap;
690     void* guard_page2;
691   };
692   auto* const pointers = static_cast<Pointers*>(
693       root().Alloc<partition_alloc::AllocFlags::kNoHooks>(sizeof(Pointers)));
694 
695   // Converting to slot start strips MTE tag.
696   const uintptr_t super_page =
697       root().ObjectToSlotStart(pointers) & kSuperPageBaseMask;
698 
699   // Initialize scannable pointers with addresses of guard pages and metadata.
700   // None of these point to an MTE-tagged area, so no need for retagging.
701   pointers->super_page = reinterpret_cast<void*>(super_page);
702   pointers->metadata_page = PartitionSuperPageToMetadataArea(super_page);
703   pointers->guard_page1 =
704       static_cast<char*>(pointers->metadata_page) + SystemPageSize();
705   pointers->scan_bitmap = SuperPageStateBitmap(super_page);
706   pointers->guard_page2 = reinterpret_cast<void*>(super_page + kSuperPageSize -
707                                                   PartitionPageSize());
708 
709   // Simply run PCScan and expect no crashes.
710   RunPCScan();
711 }
712 
TEST_F(PartitionAllocPCScanTest,TwoDanglingPointersToSameObject)713 TEST_F(PartitionAllocPCScanTest, TwoDanglingPointersToSameObject) {
714   using SourceList = List<8>;
715   using ValueList = List<128>;
716 
717   auto* value = ValueList::Create(root(), nullptr);
718   // Create two source objects referring to |value|.
719   SourceList::Create(root(), value);
720   SourceList::Create(root(), value);
721 
722   // Destroy |value| and run PCScan.
723   ValueList::Destroy(root(), value);
724   RunPCScan();
725   EXPECT_TRUE(IsInQuarantine(value));
726 
727   // Check that accounted size after the cycle is only sizeof ValueList.
728   auto* slot_span_metadata = SlotSpan::FromObject(value);
729   const auto& quarantine =
730       PCScan::scheduler().scheduling_backend().GetQuarantineData();
731   EXPECT_EQ(slot_span_metadata->bucket->slot_size, quarantine.current_size);
732 }
733 
TEST_F(PartitionAllocPCScanTest,DanglingPointerToInaccessibleArea)734 TEST_F(PartitionAllocPCScanTest, DanglingPointerToInaccessibleArea) {
735   static const size_t kObjectSizeForSlotSpanConsistingOfMultiplePartitionPages =
736       static_cast<size_t>(PartitionPageSize() * 1.25);
737 
738   FullSlotSpanAllocation full_slot_span = GetFullSlotSpan(
739       root(), root().AdjustSizeForExtrasSubtract(
740                   kObjectSizeForSlotSpanConsistingOfMultiplePartitionPages));
741 
742   // Assert that number of allocatable bytes for this bucket is smaller or equal
743   // to all allocated partition pages.
744   auto* bucket = full_slot_span.slot_span->bucket;
745   ASSERT_LE(bucket->get_bytes_per_span(),
746             bucket->get_pages_per_slot_span() * PartitionPageSize());
747 
748   // Let the first object point past the end of the last one + some random
749   // offset.
750   // It should fall within the same slot, so no need for MTE-retagging.
751   static constexpr size_t kOffsetPastEnd = 7;
752   *reinterpret_cast<uint8_t**>(full_slot_span.first) =
753       reinterpret_cast<uint8_t*>(full_slot_span.last) +
754       kObjectSizeForSlotSpanConsistingOfMultiplePartitionPages + kOffsetPastEnd;
755 
756   // Destroy the last object and put it in quarantine.
757   root().Free(full_slot_span.last);
758   EXPECT_TRUE(IsInQuarantine(full_slot_span.last));
759 
760   // Run PCScan. After it, the quarantined object should not be promoted.
761   RunPCScan();
762   EXPECT_FALSE(IsInQuarantine(full_slot_span.last));
763 }
764 
TEST_F(PartitionAllocPCScanTest,DanglingPointerOutsideUsablePart)765 TEST_F(PartitionAllocPCScanTest, DanglingPointerOutsideUsablePart) {
766   using ValueList = List<kMaxBucketed - 4096>;
767   using SourceList = List<64>;
768 
769   auto* value = ValueList::Create(root());
770   auto* slot_span = SlotSpanMetadata::FromObject(value);
771   ASSERT_TRUE(slot_span->CanStoreRawSize());
772 
773   auto* source = SourceList::Create(root());
774 
775   // Let the |source| object point to the unused area of |value| and expect
776   // |value| to be nevertheless marked during scanning.
777   // It should fall within the same slot, so no need for MTE-retagging.
778   static constexpr size_t kOffsetPastEnd = 7;
779   source->next = reinterpret_cast<ListBase*>(
780       reinterpret_cast<uint8_t*>(value + 1) + kOffsetPastEnd);
781 
782   TestDanglingReference(*this, source, value, root());
783 }
784 
785 #if PA_CONFIG(HAS_MEMORY_TAGGING)
TEST_F(PartitionAllocPCScanWithMTETest,QuarantineOnlyOnTagOverflow)786 TEST_F(PartitionAllocPCScanWithMTETest, QuarantineOnlyOnTagOverflow) {
787   using ListType = List<64>;
788 
789   if (!base::CPU::GetInstanceNoAllocation().has_mte()) {
790     return;
791   }
792 
793   {
794     auto* obj1 = ListType::Create(root());
795     ListType::Destroy(root(), obj1);
796     auto* obj2 = ListType::Create(root());
797     // The test relies on unrandomized freelist! If the slot was not moved to
798     // quarantine, assert that the obj2 is the same as obj1 and the tags are
799     // different.
800     // MTE-retag |obj1|, as the tag changed when freeing it.
801     if (!HasOverflowTag(TagPtr(obj1))) {
802       // Assert that the pointer is the same.
803       ASSERT_EQ(UntagPtr(obj1), UntagPtr(obj2));
804       // Assert that the tag is different.
805       ASSERT_NE(obj1, obj2);
806     }
807   }
808 
809   for (size_t i = 0; i < 16; ++i) {
810     auto* obj = ListType::Create(root());
811     ListType::Destroy(root(), obj);
812     // MTE-retag |obj|, as the tag changed when freeing it.
813     obj = TagPtr(obj);
814     // Check if the tag overflows. If so, the object must be in quarantine.
815     if (HasOverflowTag(obj)) {
816       EXPECT_TRUE(IsInQuarantine(obj));
817       EXPECT_FALSE(IsInFreeList(root().ObjectToSlotStart(obj)));
818       return;
819     } else {
820       EXPECT_FALSE(IsInQuarantine(obj));
821       EXPECT_TRUE(IsInFreeList(root().ObjectToSlotStart(obj)));
822     }
823   }
824 
825   EXPECT_FALSE(true && "Should never be reached");
826 }
827 #endif  // PA_CONFIG(HAS_MEMORY_TAGGING)
828 
829 }  // namespace
830 
831 }  // namespace partition_alloc::internal
832 
833 #endif  // BUILDFLAG(USE_STARSCAN)
834 #endif  // defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
835