• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/debug/allocation_trace.h"
6 
7 #include <algorithm>
8 #include <cstddef>
9 #include <iterator>
10 #include <memory>
11 #include <sstream>
12 #include <string>
13 
14 #include "base/allocator/dispatcher/dispatcher.h"
15 #include "base/allocator/dispatcher/testing/tools.h"
16 #include "base/debug/stack_trace.h"
17 #include "partition_alloc/partition_alloc_allocation_data.h"
18 #include "partition_alloc/partition_alloc_config.h"
19 #include "testing/gmock/include/gmock/gmock-matchers.h"
20 #include "testing/gtest/include/gtest/gtest.h"
21 
22 using base::allocator::dispatcher::AllocationNotificationData;
23 using base::allocator::dispatcher::AllocationSubsystem;
24 using base::allocator::dispatcher::FreeNotificationData;
25 using base::allocator::dispatcher::MTEMode;
26 using testing::Combine;
27 using testing::ContainerEq;
28 using testing::Message;
29 using testing::Test;
30 using testing::Values;
31 
32 namespace base::debug::tracer {
33 namespace {
34 
35 template <typename Iterator>
MakeString(Iterator begin,Iterator end)36 std::string MakeString(Iterator begin, Iterator end) {
37   using value_type = decltype(*begin);
38   std::ostringstream oss;
39   oss << '[';
40   if (begin != end) {
41     auto last_element = end - 1;
42     std::copy(begin, last_element, std::ostream_iterator<value_type>(oss, ","));
43     oss << *last_element;
44   }
45   oss << ']';
46 
47   return oss.str();
48 }
49 
50 template <typename C>
MakeString(const C & data)51 std::string MakeString(const C& data) {
52   return MakeString(std::begin(data), std::end(data));
53 }
54 
AreEqual(const base::debug::tracer::OperationRecord & expected,const base::debug::tracer::OperationRecord & is)55 void AreEqual(const base::debug::tracer::OperationRecord& expected,
56               const base::debug::tracer::OperationRecord& is) {
57   EXPECT_EQ(is.GetOperationType(), expected.GetOperationType());
58   EXPECT_EQ(is.GetAddress(), expected.GetAddress());
59   EXPECT_EQ(is.GetSize(), expected.GetSize());
60   EXPECT_THAT(is.GetStackTrace(), ContainerEq(expected.GetStackTrace()));
61 }
62 
63 }  // namespace
64 
65 class AllocationTraceRecorderTest : public Test {
66  protected:
GetSubjectUnderTest() const67   AllocationTraceRecorder& GetSubjectUnderTest() const {
68     return *subject_under_test_;
69   }
70   // During test, Buffer will hold a binary copy of the AllocationTraceRecorder
71   // under test.
72   struct Buffer {
73     alignas(AllocationTraceRecorder)
74         std::array<uint8_t, sizeof(AllocationTraceRecorder)> data;
75   };
76 
77  protected:
CreateAllocationData(void * address,size_t size,MTEMode mte_mode=MTEMode::kUndefined)78   AllocationNotificationData CreateAllocationData(
79       void* address,
80       size_t size,
81       MTEMode mte_mode = MTEMode::kUndefined) {
82     return AllocationNotificationData(address, size, nullptr,
83                                       AllocationSubsystem::kPartitionAllocator)
84 #if PA_BUILDFLAG(HAS_MEMORY_TAGGING)
85         .SetMteReportingMode(mte_mode)
86 #endif
87         ;
88   }
89 
CreateFreeData(void * address,MTEMode mte_mode=MTEMode::kUndefined)90   FreeNotificationData CreateFreeData(void* address,
91                                       MTEMode mte_mode = MTEMode::kUndefined) {
92     return FreeNotificationData(address,
93                                 AllocationSubsystem::kPartitionAllocator)
94 #if PA_BUILDFLAG(HAS_MEMORY_TAGGING)
95         .SetMteReportingMode(mte_mode)
96 #endif
97         ;
98   }
99 
100  private:
101   // The recorder under test. Depending on number and size of traces, it
102   // requires quite a lot of space. Therefore, we create it on heap to avoid any
103   // out-of-stack scenarios.
104   std::unique_ptr<AllocationTraceRecorder> const subject_under_test_ =
105       std::make_unique<AllocationTraceRecorder>();
106 };
107 
TEST_F(AllocationTraceRecorderTest,VerifyBinaryCopy)108 TEST_F(AllocationTraceRecorderTest, VerifyBinaryCopy) {
109   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
110 
111   // Fill the recorder with some fake allocations and frees.
112   constexpr size_t number_of_records = 100;
113 
114   for (size_t index = 0; index < number_of_records; ++index) {
115     if (index & 0x1) {
116       subject_under_test.OnAllocation(
117           CreateAllocationData(this, sizeof(*this)));
118     } else {
119       subject_under_test.OnFree(CreateFreeData(this));
120     }
121   }
122 
123   ASSERT_EQ(number_of_records, subject_under_test.size());
124 
125   // Create a copy of the recorder using buffer as storage for the copy.
126   auto const buffer = std::make_unique<Buffer>();
127 
128   ASSERT_TRUE(buffer);
129 
130   AllocationTraceRecorder* const buffered_recorder =
131       new (buffer->data.data()) AllocationTraceRecorder();
132 
133   static_assert(std::is_trivially_copyable_v<AllocationTraceRecorder>);
134   base::byte_span_from_ref(*buffered_recorder)
135       .copy_from(base::byte_span_from_ref(subject_under_test));
136 
137   // Verify that the original recorder and the buffered recorder are equal.
138   ASSERT_EQ(subject_under_test.size(), buffered_recorder->size());
139 
140   for (size_t index = 0; index < subject_under_test.size(); ++index) {
141     SCOPED_TRACE(Message("difference detected at index ") << index);
142     AreEqual(subject_under_test[index], (*buffered_recorder)[index]);
143   }
144 
145   buffered_recorder->~AllocationTraceRecorder();
146 }
147 
TEST_F(AllocationTraceRecorderTest,VerifySingleAllocation)148 TEST_F(AllocationTraceRecorderTest, VerifySingleAllocation) {
149   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
150 
151   subject_under_test.OnAllocation(
152       CreateAllocationData(&subject_under_test, sizeof(subject_under_test)));
153 
154   EXPECT_EQ(1ul, subject_under_test.size());
155 
156   const auto& record_data = subject_under_test[0];
157   const auto& stack_trace = record_data.GetStackTrace();
158 
159   EXPECT_EQ(OperationType::kAllocation, record_data.GetOperationType());
160   EXPECT_EQ(&subject_under_test, record_data.GetAddress());
161   EXPECT_EQ(sizeof(subject_under_test), record_data.GetSize());
162   EXPECT_NE(nullptr, stack_trace.at(0));
163 }
164 
TEST_F(AllocationTraceRecorderTest,VerifySingleFree)165 TEST_F(AllocationTraceRecorderTest, VerifySingleFree) {
166   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
167 
168   subject_under_test.OnFree(CreateFreeData(&subject_under_test));
169 
170   EXPECT_EQ(1ul, subject_under_test.size());
171 
172   const auto& record_data = subject_under_test[0];
173   const auto& stack_trace = record_data.GetStackTrace();
174 
175   EXPECT_EQ(OperationType::kFree, record_data.GetOperationType());
176   EXPECT_EQ(&subject_under_test, record_data.GetAddress());
177   EXPECT_EQ(0ul, record_data.GetSize());
178   EXPECT_NE(nullptr, stack_trace.at(0));
179 }
180 
TEST_F(AllocationTraceRecorderTest,VerifyMultipleOperations)181 TEST_F(AllocationTraceRecorderTest, VerifyMultipleOperations) {
182   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
183 
184   // Some (valid) pointers to use in the allocation operations.
185   std::vector<uint8_t> addrs_buf(sizeof(*this) * 7u);
186   uint8_t* addr0 = &addrs_buf[0u * sizeof(*this)];
187   // uint8_t* addr1 = &addrs_buf[1u * sizeof(*this)];
188   uint8_t* addr2 = &addrs_buf[2u * sizeof(*this)];
189   uint8_t* addr3 = &addrs_buf[3u * sizeof(*this)];
190   uint8_t* addr4 = &addrs_buf[4u * sizeof(*this)];
191   uint8_t* addr5 = &addrs_buf[5u * sizeof(*this)];
192   uint8_t* addr6 = &addrs_buf[6u * sizeof(*this)];
193 
194   // We perform a number of operations.
195   subject_under_test.OnAllocation(
196       CreateAllocationData(addr0, 1 * sizeof(*this)));
197   subject_under_test.OnFree(CreateFreeData(addr2));
198   subject_under_test.OnAllocation(
199       CreateAllocationData(addr3, 3 * sizeof(*this)));
200   subject_under_test.OnAllocation(
201       CreateAllocationData(addr4, 4 * sizeof(*this)));
202   subject_under_test.OnFree(CreateFreeData(addr5));
203   subject_under_test.OnFree(CreateFreeData(addr6));
204 
205   ASSERT_EQ(subject_under_test.size(), 6ul);
206 
207   // Verify that the stored operations match the expected.
208   {
209     const auto& entry = subject_under_test[0];
210     ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
211     ASSERT_EQ(entry.GetAddress(), addr0);
212     ASSERT_EQ(entry.GetSize(), 1 * sizeof(*this));
213     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
214   }
215   {
216     const auto& entry = subject_under_test[1];
217     ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
218     ASSERT_EQ(entry.GetAddress(), addr2);
219     ASSERT_EQ(entry.GetSize(), 0ul);
220     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
221   }
222   {
223     const auto& entry = subject_under_test[2];
224     ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
225     ASSERT_EQ(entry.GetAddress(), addr3);
226     ASSERT_EQ(entry.GetSize(), 3 * sizeof(*this));
227     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
228   }
229   {
230     const auto& entry = subject_under_test[3];
231     ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
232     ASSERT_EQ(entry.GetAddress(), addr4);
233     ASSERT_EQ(entry.GetSize(), 4 * sizeof(*this));
234     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
235   }
236   {
237     const auto& entry = subject_under_test[4];
238     ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
239     ASSERT_EQ(entry.GetAddress(), addr5);
240     ASSERT_EQ(entry.GetSize(), 0ul);
241     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
242   }
243   {
244     const auto& entry = subject_under_test[5];
245     ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
246     ASSERT_EQ(entry.GetAddress(), addr6);
247     ASSERT_EQ(entry.GetSize(), 0ul);
248     ASSERT_NE(entry.GetStackTrace()[0], nullptr);
249   }
250 }
251 
TEST_F(AllocationTraceRecorderTest,VerifyOverflowOfOperations)252 TEST_F(AllocationTraceRecorderTest, VerifyOverflowOfOperations) {
253   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
254 
255   auto num_traces = subject_under_test.GetMaximumNumberOfTraces();
256 
257   // Some (valid) pointers to use in the allocation operations.
258   std::vector<uint8_t> addrs_buf(sizeof(*this) * (num_traces + 1));
259   auto addr = [&](auto idx) { return &addrs_buf[idx * sizeof(*this)]; };
260 
261   decltype(num_traces) idx;
262   for (idx = 0; idx < subject_under_test.GetMaximumNumberOfTraces(); ++idx) {
263     ASSERT_EQ(subject_under_test.size(), idx);
264     const bool is_allocation = !(idx & 0x1);
265 
266     // Record an allocation or free.
267     if (is_allocation) {
268       subject_under_test.OnAllocation(CreateAllocationData(addr(idx), idx));
269     } else {
270       subject_under_test.OnFree(CreateFreeData(addr(idx)));
271     }
272 
273     // Some verifications.
274     {
275       ASSERT_EQ(subject_under_test.size(), (idx + 1));
276 
277       // Some verification on the added entry.
278       {
279         const auto& last_entry = subject_under_test[idx];
280         ASSERT_EQ(last_entry.GetAddress(), addr(idx));
281         // No full verification intended, just a check that something has been
282         // written.
283         ASSERT_NE(last_entry.GetStackTrace()[0], nullptr);
284         if (is_allocation) {
285           ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
286           ASSERT_EQ(last_entry.GetSize(), idx);
287         } else {
288           ASSERT_EQ(last_entry.GetOperationType(), OperationType::kFree);
289           ASSERT_EQ(last_entry.GetSize(), 0ul);
290         }
291       }
292 
293       // No changes on the first entry must be done.
294       {
295         const auto& first_entry = subject_under_test[0];
296         ASSERT_EQ(first_entry.GetOperationType(), OperationType::kAllocation);
297         ASSERT_EQ(first_entry.GetAddress(), addr(0));
298         ASSERT_EQ(first_entry.GetSize(), 0ul);
299       }
300     }
301   }
302 
303   // By now we have written all available records including the last one.
304   // So the following allocation should overwrite the first record.
305   {
306     const auto& old_second_entry = subject_under_test[1];
307 
308     subject_under_test.OnAllocation(CreateAllocationData(addr(idx), idx));
309     ASSERT_EQ(subject_under_test.size(),
310               subject_under_test.GetMaximumNumberOfTraces());
311     const auto& last_entry =
312         subject_under_test[subject_under_test.GetMaximumNumberOfTraces() - 1];
313     ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
314     ASSERT_EQ(last_entry.GetAddress(), addr(idx));
315 
316     // Check that the previous first entry (an allocation) is gone. Accessing
317     // the first record now yields what was previously the second record (a free
318     // operation).
319     const auto& first_entry = subject_under_test[0];
320 
321     ASSERT_EQ(&old_second_entry, &first_entry);
322     ASSERT_EQ(first_entry.GetOperationType(), OperationType::kFree);
323     ASSERT_EQ(first_entry.GetAddress(), addr(1));
324   }
325 }
326 
TEST_F(AllocationTraceRecorderTest,VerifyWithHooks)327 TEST_F(AllocationTraceRecorderTest, VerifyWithHooks) {
328   auto& dispatcher = base::allocator::dispatcher::Dispatcher::GetInstance();
329   AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
330 
331   dispatcher.InitializeForTesting(&subject_under_test);
332 
333   // Perform an allocation and free.
334   std::make_unique<std::string>(
335       "Just enforce an allocation and free to trigger notification of the "
336       "subject under test. Hopefully this string is long enough to bypass any "
337       "small string optimizations that the STL implementation might use.");
338 
339   dispatcher.ResetForTesting();
340 
341   // We only test for greater-equal since allocation from other parts of GTest
342   // might have interfered.
343   EXPECT_GE(subject_under_test.size(), 2ul);
344 }
345 
346 class OperationRecordTest : public Test {
347  protected:
348   using ReferenceStackTrace = std::vector<const void*>;
349 
GetReferenceTrace()350   ReferenceStackTrace GetReferenceTrace() {
351     ReferenceStackTrace frame_pointers(128);
352     const auto num_frames =
353         base::debug::TraceStackFramePointers(frame_pointers, 0);
354     frame_pointers.resize(num_frames);
355     frame_pointers.shrink_to_fit();
356     return frame_pointers;
357   }
358 
VerifyStackTrace(const ReferenceStackTrace & reference_stack_trace,const base::debug::tracer::StackTraceContainer & stack_trace)359   void VerifyStackTrace(
360       const ReferenceStackTrace& reference_stack_trace,
361       const base::debug::tracer::StackTraceContainer& stack_trace) {
362     // Verify we have at least one entry in the stack.
363     ASSERT_NE(nullptr, stack_trace.at(0));
364     ASSERT_GT(stack_trace.size(), 0ul);
365 
366     // Although functions are marked ALWAYS_INLINE, the compiler may choose not
367     // to inline, depending i.e. on the optimization level. Therefore, we search
368     // for the first common frame in both stack-traces. From there on, both must
369     // be equal for the remaining number of frames.
370     auto const it_stack_trace_begin = std::begin(stack_trace);
371     auto const it_stack_trace_end =
372         std::find(it_stack_trace_begin, std::end(stack_trace), nullptr);
373     auto const it_reference_stack_trace_end = std::end(reference_stack_trace);
374 
375     auto const it_stack_trace = std::find_first_of(
376         it_stack_trace_begin, it_stack_trace_end,
377         std::begin(reference_stack_trace), it_reference_stack_trace_end);
378 
379     ASSERT_NE(it_stack_trace, it_stack_trace_end)
380         << "stack-trace and reference-stack-trace share no common frame!\n"
381         << "stack trace = " << MakeString(stack_trace) << '\n'
382         << "reference stack trace = " << MakeString(reference_stack_trace);
383 
384     // Find the common frame in the reference-stack-trace.
385     const auto it_reference_stack_trace =
386         std::find(std::begin(reference_stack_trace),
387                   it_reference_stack_trace_end, *it_stack_trace);
388 
389     const auto number_of_expected_common_frames = std::min(
390         std::distance(it_stack_trace, it_stack_trace_end),
391         std::distance(it_reference_stack_trace, it_reference_stack_trace_end));
392 
393     // Check if we have any difference within the section of frames that we
394     // expect to be equal.
395     const auto mismatch = std::mismatch(
396         it_reference_stack_trace,
397         it_reference_stack_trace + number_of_expected_common_frames,
398         it_stack_trace);
399 
400     ASSERT_EQ(mismatch.first,
401               (it_reference_stack_trace + number_of_expected_common_frames))
402         << "found difference in the range of frames expected to be equal!\n"
403         << "position = "
404         << std::distance(it_reference_stack_trace, mismatch.first) << '\n'
405         << "stack trace = "
406         << MakeString(it_stack_trace,
407                       it_stack_trace + number_of_expected_common_frames)
408         << '\n'
409         << "reference stack trace = "
410         << MakeString(
411                it_reference_stack_trace,
412                it_reference_stack_trace + number_of_expected_common_frames);
413   }
414 };
415 
TEST_F(OperationRecordTest,VerifyConstructor)416 TEST_F(OperationRecordTest, VerifyConstructor) {
417   OperationRecord subject_under_test;
418 
419   EXPECT_EQ(subject_under_test.GetOperationType(), OperationType::kNone);
420   EXPECT_EQ(subject_under_test.GetAddress(), nullptr);
421   EXPECT_EQ(subject_under_test.GetSize(), 0ul);
422   EXPECT_FALSE(subject_under_test.IsRecording());
423 
424   // The stack trace is not initialized by the constructor. Therefore, we do not
425   // check here.
426 }
427 
TEST_F(OperationRecordTest,VerifyRecordAllocation)428 TEST_F(OperationRecordTest, VerifyRecordAllocation) {
429   const ReferenceStackTrace reference_trace = GetReferenceTrace();
430 
431   void* const address = this;
432   size_t const size = sizeof(*this);
433 
434   OperationRecord subject_under_test;
435 
436   ASSERT_TRUE(subject_under_test.InitializeAllocation(address, size));
437 
438   EXPECT_EQ(OperationType::kAllocation, subject_under_test.GetOperationType());
439   EXPECT_EQ(address, subject_under_test.GetAddress());
440   EXPECT_EQ(size, subject_under_test.GetSize());
441   EXPECT_FALSE(subject_under_test.IsRecording());
442 
443   VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
444 }
445 
TEST_F(OperationRecordTest,VerifyRecordFree)446 TEST_F(OperationRecordTest, VerifyRecordFree) {
447   const ReferenceStackTrace reference_trace = GetReferenceTrace();
448 
449   void* const address = this;
450   size_t const size = 0;
451 
452   OperationRecord subject_under_test;
453 
454   ASSERT_TRUE(subject_under_test.InitializeFree(address));
455 
456   EXPECT_EQ(OperationType::kFree, subject_under_test.GetOperationType());
457   EXPECT_EQ(address, subject_under_test.GetAddress());
458   EXPECT_EQ(size, subject_under_test.GetSize());
459   EXPECT_FALSE(subject_under_test.IsRecording());
460 
461   VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
462 }
463 
464 }  // namespace base::debug::tracer
465