1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/allocation_trace.h"
6
7 #include <algorithm>
8 #include <cstddef>
9 #include <iterator>
10 #include <memory>
11 #include <sstream>
12 #include <string>
13
14 #include "base/allocator/dispatcher/dispatcher.h"
15 #include "base/debug/stack_trace.h"
16 #include "testing/gmock/include/gmock/gmock-matchers.h"
17 #include "testing/gtest/include/gtest/gtest.h"
18
19 using base::allocator::dispatcher::AllocationSubsystem;
20 using testing::ContainerEq;
21 using testing::Message;
22 using testing::Test;
23
24 namespace base::debug::tracer {
25 namespace {
26
27 template <typename Iterator>
MakeString(Iterator begin,Iterator end)28 std::string MakeString(Iterator begin, Iterator end) {
29 using value_type = decltype(*begin);
30 std::ostringstream oss;
31 oss << '[';
32 if (begin != end) {
33 auto last_element = end - 1;
34 std::copy(begin, last_element, std::ostream_iterator<value_type>(oss, ","));
35 oss << *last_element;
36 }
37 oss << ']';
38
39 return oss.str();
40 }
41
42 template <typename C>
MakeString(const C & data)43 std::string MakeString(const C& data) {
44 return MakeString(std::begin(data), std::end(data));
45 }
46
AreEqual(const base::debug::tracer::OperationRecord & expected,const base::debug::tracer::OperationRecord & is)47 void AreEqual(const base::debug::tracer::OperationRecord& expected,
48 const base::debug::tracer::OperationRecord& is) {
49 EXPECT_EQ(is.GetOperationType(), expected.GetOperationType());
50 EXPECT_EQ(is.GetAddress(), expected.GetAddress());
51 EXPECT_EQ(is.GetSize(), expected.GetSize());
52 EXPECT_THAT(is.GetStackTrace(), ContainerEq(expected.GetStackTrace()));
53 }
54
55 } // namespace
56
57 struct AllocationTraceRecorderTest : public Test {
GetSubjectUnderTestbase::debug::tracer::AllocationTraceRecorderTest58 AllocationTraceRecorder& GetSubjectUnderTest() const {
59 return *subject_under_test_;
60 }
61
62 protected:
63 // During test, Buffer will hold a binary copy of the AllocationTraceRecorder
64 // under test.
65 struct Buffer {
66 alignas(
67 AllocationTraceRecorder) uint8_t data[sizeof(AllocationTraceRecorder)];
68 };
69
70 private:
71 // The recorder under test. Depending on number and size of traces, it
72 // requires quite a lot of space. Therefore, we create it on heap to avoid any
73 // out-of-stack scenarios.
74 std::unique_ptr<AllocationTraceRecorder> const subject_under_test_ =
75 std::make_unique<AllocationTraceRecorder>();
76 };
77
TEST_F(AllocationTraceRecorderTest,VerifyBinaryCopy)78 TEST_F(AllocationTraceRecorderTest, VerifyBinaryCopy) {
79 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
80
81 // Fill the recorder with some fake allocations and frees.
82 constexpr size_t number_of_records = 100;
83
84 for (size_t index = 0; index < number_of_records; ++index) {
85 if (index & 0x1) {
86 subject_under_test.OnAllocation(this, sizeof(*this),
87 AllocationSubsystem::kPartitionAllocator,
88 nullptr);
89 } else {
90 subject_under_test.OnFree(this);
91 }
92 }
93
94 ASSERT_EQ(number_of_records, subject_under_test.size());
95
96 // Create a copy of the recorder using buffer as storage for the copy.
97 auto const buffer = std::make_unique<Buffer>();
98
99 ASSERT_TRUE(buffer);
100
101 auto* const buffered_recorder =
102 reinterpret_cast<AllocationTraceRecorder*>(&(buffer->data[0]));
103
104 memcpy(buffered_recorder, &subject_under_test,
105 sizeof(AllocationTraceRecorder));
106
107 // Verify that the original recorder and the buffered recorder are equal.
108 ASSERT_EQ(subject_under_test.size(), buffered_recorder->size());
109
110 for (size_t index = 0; index < subject_under_test.size(); ++index) {
111 SCOPED_TRACE(Message("difference detected at index ") << index);
112 AreEqual(subject_under_test[index], (*buffered_recorder)[index]);
113 }
114 }
115
TEST_F(AllocationTraceRecorderTest,VerifySingleAllocation)116 TEST_F(AllocationTraceRecorderTest, VerifySingleAllocation) {
117 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
118
119 subject_under_test.OnAllocation(
120 &subject_under_test, sizeof(subject_under_test),
121 AllocationSubsystem::kPartitionAllocator, nullptr);
122
123 EXPECT_EQ(1ul, subject_under_test.size());
124
125 const auto& record_data = subject_under_test[0];
126 const auto& stack_trace = record_data.GetStackTrace();
127
128 EXPECT_EQ(OperationType::kAllocation, record_data.GetOperationType());
129 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
130 EXPECT_EQ(sizeof(subject_under_test), record_data.GetSize());
131 EXPECT_NE(nullptr, stack_trace.at(0));
132 }
133
TEST_F(AllocationTraceRecorderTest,VerifySingleFree)134 TEST_F(AllocationTraceRecorderTest, VerifySingleFree) {
135 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
136
137 subject_under_test.OnFree(&subject_under_test);
138
139 EXPECT_EQ(1ul, subject_under_test.size());
140
141 const auto& record_data = subject_under_test[0];
142 const auto& stack_trace = record_data.GetStackTrace();
143
144 EXPECT_EQ(OperationType::kFree, record_data.GetOperationType());
145 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
146 EXPECT_EQ(0ul, record_data.GetSize());
147 EXPECT_NE(nullptr, stack_trace.at(0));
148 }
149
TEST_F(AllocationTraceRecorderTest,VerifyMultipleOperations)150 TEST_F(AllocationTraceRecorderTest, VerifyMultipleOperations) {
151 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
152
153 // We perform a number of operations.
154 subject_under_test.OnAllocation(this, 1 * sizeof(*this),
155 AllocationSubsystem::kPartitionAllocator,
156 nullptr);
157
158 subject_under_test.OnFree(this + 2);
159 subject_under_test.OnAllocation(this + 3, 3 * sizeof(*this),
160 AllocationSubsystem::kPartitionAllocator,
161 nullptr);
162 subject_under_test.OnAllocation(this + 4, 4 * sizeof(*this),
163 AllocationSubsystem::kPartitionAllocator,
164 nullptr);
165 subject_under_test.OnFree(this + 5);
166 subject_under_test.OnFree(this + 6);
167
168 ASSERT_EQ(subject_under_test.size(), 6ul);
169
170 // Verify that the stored operations match the expected.
171 {
172 const auto& entry = subject_under_test[0];
173 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
174 ASSERT_EQ(entry.GetAddress(), this);
175 ASSERT_EQ(entry.GetSize(), 1 * sizeof(*this));
176 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
177 }
178 {
179 const auto& entry = subject_under_test[1];
180 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
181 ASSERT_EQ(entry.GetAddress(), (this + 2));
182 ASSERT_EQ(entry.GetSize(), 0ul);
183 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
184 }
185 {
186 const auto& entry = subject_under_test[2];
187 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
188 ASSERT_EQ(entry.GetAddress(), (this + 3));
189 ASSERT_EQ(entry.GetSize(), 3 * sizeof(*this));
190 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
191 }
192 {
193 const auto& entry = subject_under_test[3];
194 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
195 ASSERT_EQ(entry.GetAddress(), (this + 4));
196 ASSERT_EQ(entry.GetSize(), 4 * sizeof(*this));
197 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
198 }
199 {
200 const auto& entry = subject_under_test[4];
201 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
202 ASSERT_EQ(entry.GetAddress(), (this + 5));
203 ASSERT_EQ(entry.GetSize(), 0ul);
204 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
205 }
206 {
207 const auto& entry = subject_under_test[5];
208 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
209 ASSERT_EQ(entry.GetAddress(), (this + 6));
210 ASSERT_EQ(entry.GetSize(), 0ul);
211 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
212 }
213 }
214
TEST_F(AllocationTraceRecorderTest,VerifyOverflowOfOperations)215 TEST_F(AllocationTraceRecorderTest, VerifyOverflowOfOperations) {
216 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
217
218 decltype(subject_under_test.GetMaximumNumberOfTraces()) idx;
219 for (idx = 0; idx < subject_under_test.GetMaximumNumberOfTraces(); ++idx) {
220 ASSERT_EQ(subject_under_test.size(), idx);
221 const bool is_allocation = !(idx & 0x1);
222
223 // Record an allocation or free.
224 if (is_allocation) {
225 subject_under_test.OnAllocation(
226 this + idx, idx, AllocationSubsystem::kPartitionAllocator, nullptr);
227 } else {
228 subject_under_test.OnFree(this + idx);
229 }
230
231 // Some verifications.
232 {
233 ASSERT_EQ(subject_under_test.size(), (idx + 1));
234
235 // Some verification on the added entry.
236 {
237 const auto& last_entry = subject_under_test[idx];
238 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
239 // No full verification intended, just a check that something has been
240 // written.
241 ASSERT_NE(last_entry.GetStackTrace()[0], nullptr);
242 if (is_allocation) {
243 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
244 ASSERT_EQ(last_entry.GetSize(), idx);
245 } else {
246 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kFree);
247 ASSERT_EQ(last_entry.GetSize(), 0ul);
248 }
249 }
250
251 // No changes on the first entry must be done.
252 {
253 const auto& first_entry = subject_under_test[0];
254 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kAllocation);
255 ASSERT_EQ(first_entry.GetAddress(), this);
256 ASSERT_EQ(first_entry.GetSize(), 0ul);
257 }
258 }
259 }
260
261 // By now we have written all available records including the last one.
262 // So the following allocation should overwrite the first record.
263 {
264 const auto& old_second_entry = subject_under_test[1];
265
266 subject_under_test.OnAllocation(
267 this + idx, idx, AllocationSubsystem::kPartitionAllocator, nullptr);
268 ASSERT_EQ(subject_under_test.size(),
269 subject_under_test.GetMaximumNumberOfTraces());
270 const auto& last_entry =
271 subject_under_test[subject_under_test.GetMaximumNumberOfTraces() - 1];
272 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
273 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
274
275 // Check that the previous first entry (an allocation) is gone. Accessing
276 // the first record now yields what was previously the second record (a free
277 // operation).
278 const auto& first_entry = subject_under_test[0];
279
280 ASSERT_EQ(&old_second_entry, &first_entry);
281 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kFree);
282 ASSERT_EQ(first_entry.GetAddress(), (this + 1));
283 }
284 }
285
TEST_F(AllocationTraceRecorderTest,VerifyWithHooks)286 TEST_F(AllocationTraceRecorderTest, VerifyWithHooks) {
287 auto& dispatcher = base::allocator::dispatcher::Dispatcher::GetInstance();
288 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
289
290 dispatcher.InitializeForTesting(&subject_under_test);
291
292 // Perform an allocation and free.
293 std::make_unique<std::string>(
294 "Just enforce an allocation and free to trigger notification of the "
295 "subject under test. Hopefully this string is long enough to bypass any "
296 "small string optimizations that the STL implementation might use.");
297
298 dispatcher.ResetForTesting();
299
300 // We only test for greater-equal since allocation from other parts of GTest
301 // might have interfered.
302 EXPECT_GE(subject_under_test.size(), 2ul);
303 }
304
305 class OperationRecordTest : public Test {
306 protected:
307 using ReferenceStackTrace = std::vector<const void*>;
308
GetReferenceTrace()309 ReferenceStackTrace GetReferenceTrace() {
310 constexpr size_t max_trace_size = 128;
311 const void* frame_pointers[max_trace_size]{nullptr};
312 const auto num_frames = base::debug::TraceStackFramePointers(
313 &frame_pointers[0], max_trace_size, 0);
314 ReferenceStackTrace trace;
315 std::copy_n(std::begin(frame_pointers), num_frames,
316 std::back_inserter(trace));
317 return trace;
318 }
319
VerifyStackTrace(const ReferenceStackTrace & reference_stack_trace,const base::debug::tracer::StackTraceContainer & stack_trace)320 void VerifyStackTrace(
321 const ReferenceStackTrace& reference_stack_trace,
322 const base::debug::tracer::StackTraceContainer& stack_trace) {
323 // Verify we have at least one entry in the stack.
324 ASSERT_NE(nullptr, stack_trace.at(0));
325 ASSERT_GT(stack_trace.size(), 0ul);
326
327 // Although functions are marked ALWAYS_INLINE, the compiler may choose not
328 // to inline, depending i.e. on the optimization level. Therefore, we search
329 // for the first common frame in both stack-traces. From there on, both must
330 // be equal for the remaining number of frames.
331 auto* const* const it_stack_trace_begin = std::begin(stack_trace);
332 auto* const* const it_stack_trace_end =
333 std::find(it_stack_trace_begin, std::end(stack_trace), nullptr);
334 auto const it_reference_stack_trace_end = std::end(reference_stack_trace);
335
336 auto* const* it_stack_trace = std::find_first_of(
337 it_stack_trace_begin, it_stack_trace_end,
338 std::begin(reference_stack_trace), it_reference_stack_trace_end);
339
340 ASSERT_NE(it_stack_trace, it_stack_trace_end)
341 << "stack-trace and reference-stack-trace share no common frame!\n"
342 << "stack trace = " << MakeString(stack_trace) << '\n'
343 << "reference stack trace = " << MakeString(reference_stack_trace);
344
345 // Find the common frame in the reference-stack-trace.
346 const auto it_reference_stack_trace =
347 std::find(std::begin(reference_stack_trace),
348 it_reference_stack_trace_end, *it_stack_trace);
349
350 const auto number_of_expected_common_frames = std::min(
351 std::distance(it_stack_trace, it_stack_trace_end),
352 std::distance(it_reference_stack_trace, it_reference_stack_trace_end));
353
354 // Check if we have any difference within the section of frames that we
355 // expect to be equal.
356 const auto mismatch = std::mismatch(
357 it_reference_stack_trace,
358 it_reference_stack_trace + number_of_expected_common_frames,
359 it_stack_trace);
360
361 ASSERT_EQ(mismatch.first,
362 (it_reference_stack_trace + number_of_expected_common_frames))
363 << "found difference in the range of frames expected to be equal!\n"
364 << "position = "
365 << std::distance(it_reference_stack_trace, mismatch.first) << '\n'
366 << "stack trace = "
367 << MakeString(it_stack_trace,
368 it_stack_trace + number_of_expected_common_frames)
369 << '\n'
370 << "reference stack trace = "
371 << MakeString(
372 it_reference_stack_trace,
373 it_reference_stack_trace + number_of_expected_common_frames);
374 }
375 };
376
TEST_F(OperationRecordTest,VerifyConstructor)377 TEST_F(OperationRecordTest, VerifyConstructor) {
378 OperationRecord subject_under_test;
379
380 EXPECT_EQ(subject_under_test.GetOperationType(), OperationType::kNone);
381 EXPECT_EQ(subject_under_test.GetAddress(), nullptr);
382 EXPECT_EQ(subject_under_test.GetSize(), 0ul);
383 EXPECT_FALSE(subject_under_test.IsRecording());
384
385 // The stack trace is not initialized by the constructor. Therefore, we do not
386 // check here.
387 }
388
TEST_F(OperationRecordTest,VerifyRecordAllocation)389 TEST_F(OperationRecordTest, VerifyRecordAllocation) {
390 const ReferenceStackTrace reference_trace = GetReferenceTrace();
391
392 void* const address = this;
393 size_t const size = sizeof(*this);
394
395 OperationRecord subject_under_test;
396
397 ASSERT_TRUE(subject_under_test.InitializeAllocation(address, size));
398
399 EXPECT_EQ(OperationType::kAllocation, subject_under_test.GetOperationType());
400 EXPECT_EQ(address, subject_under_test.GetAddress());
401 EXPECT_EQ(size, subject_under_test.GetSize());
402 EXPECT_FALSE(subject_under_test.IsRecording());
403
404 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
405 }
406
TEST_F(OperationRecordTest,VerifyRecordFree)407 TEST_F(OperationRecordTest, VerifyRecordFree) {
408 const ReferenceStackTrace reference_trace = GetReferenceTrace();
409
410 void* const address = this;
411 size_t const size = 0;
412
413 OperationRecord subject_under_test;
414
415 ASSERT_TRUE(subject_under_test.InitializeFree(address));
416
417 EXPECT_EQ(OperationType::kFree, subject_under_test.GetOperationType());
418 EXPECT_EQ(address, subject_under_test.GetAddress());
419 EXPECT_EQ(size, subject_under_test.GetSize());
420 EXPECT_FALSE(subject_under_test.IsRecording());
421
422 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
423 }
424
425 } // namespace base::debug::tracer
426