1 // Copyright 2023 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/debug/allocation_trace.h"
6 #include "base/allocator/dispatcher/dispatcher.h"
7 #include "base/debug/stack_trace.h"
8
9 #include "testing/gtest/include/gtest/gtest.h"
10
11 #include <algorithm>
12 #include <cstddef>
13 #include <iterator>
14 #include <memory>
15 #include <sstream>
16 #include <string>
17
18 using base::allocator::dispatcher::AllocationSubsystem;
19 using testing::AssertionResult;
20 using testing::Test;
21
22 namespace {
23
24 template <typename Iterator>
MakeString(Iterator begin,Iterator end)25 std::string MakeString(Iterator begin, Iterator end) {
26 using value_type = decltype(*begin);
27 std::ostringstream oss;
28 oss << '[';
29 if (begin != end) {
30 auto last_element = end - 1;
31 std::copy(begin, last_element, std::ostream_iterator<value_type>(oss, ","));
32 oss << *last_element;
33 }
34 oss << ']';
35
36 return oss.str();
37 }
38
39 template <typename C>
MakeString(const C & data)40 std::string MakeString(const C& data) {
41 return MakeString(std::begin(data), std::end(data));
42 }
43
44 } // namespace
45
46 namespace base::debug::tracer {
47
48 using base::allocator::dispatcher::AllocationSubsystem;
49
50 struct AllocationTraceRecorderTest : public Test {
GetSubjectUnderTestbase::debug::tracer::AllocationTraceRecorderTest51 AllocationTraceRecorder& GetSubjectUnderTest() const {
52 return *subject_under_test_;
53 }
54
55 protected:
56 // During test, Buffer will hold a binary copy of the AllocationTraceRecorder
57 // under test.
58 struct Buffer {
59 alignas(
60 AllocationTraceRecorder) uint8_t data[sizeof(AllocationTraceRecorder)];
61 };
62
63 private:
64 // The recorder under test. Depending on number and size of traces, it
65 // requires quite a lot of space. Therefore, we create it on heap to avoid any
66 // out-of-stack scenarios.
67 std::unique_ptr<AllocationTraceRecorder> const subject_under_test_ =
68 std::make_unique<AllocationTraceRecorder>();
69 };
70
TEST_F(AllocationTraceRecorderTest,VerifyIsValid)71 TEST_F(AllocationTraceRecorderTest, VerifyIsValid) {
72 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
73
74 auto const buffer = std::make_unique<Buffer>();
75
76 ASSERT_TRUE(buffer);
77
78 auto* const buffered_recorder =
79 reinterpret_cast<AllocationTraceRecorder*>(&(buffer->data[0]));
80
81 // Verify IsValid returns true on the copied image.
82 {
83 memcpy(buffered_recorder, &subject_under_test,
84 sizeof(AllocationTraceRecorder));
85 EXPECT_TRUE(buffered_recorder->IsValid());
86 }
87
88 // Verify IsValid returns false when the prologue has been altered on the
89 // copied image.
90 {
91 memcpy(buffered_recorder, &subject_under_test,
92 sizeof(AllocationTraceRecorder));
93 buffer->data[2] ^= 0xff;
94 EXPECT_FALSE(buffered_recorder->IsValid());
95 }
96
97 // Verify IsValid returns false when the epilogue has been altered on the
98 // copied image.
99 {
100 memcpy(buffered_recorder, &subject_under_test,
101 sizeof(AllocationTraceRecorder));
102 buffer->data[sizeof(AllocationTraceRecorder) - 2] ^= 0xff;
103 EXPECT_FALSE(buffered_recorder->IsValid());
104 }
105 }
106
TEST_F(AllocationTraceRecorderTest,VerifySingleAllocation)107 TEST_F(AllocationTraceRecorderTest, VerifySingleAllocation) {
108 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
109
110 subject_under_test.OnAllocation(
111 &subject_under_test, sizeof(subject_under_test),
112 AllocationSubsystem::kPartitionAllocator, nullptr);
113
114 EXPECT_EQ(1ul, subject_under_test.size());
115
116 const auto& record_data = subject_under_test[0];
117 const auto& stack_trace = record_data.GetStackTrace();
118
119 EXPECT_EQ(OperationType::kAllocation, record_data.GetOperationType());
120 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
121 EXPECT_EQ(sizeof(subject_under_test), record_data.GetSize());
122 EXPECT_NE(nullptr, stack_trace.at(0));
123 }
124
TEST_F(AllocationTraceRecorderTest,VerifySingleFree)125 TEST_F(AllocationTraceRecorderTest, VerifySingleFree) {
126 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
127
128 subject_under_test.OnFree(&subject_under_test);
129
130 EXPECT_EQ(1ul, subject_under_test.size());
131
132 const auto& record_data = subject_under_test[0];
133 const auto& stack_trace = record_data.GetStackTrace();
134
135 EXPECT_EQ(OperationType::kFree, record_data.GetOperationType());
136 EXPECT_EQ(&subject_under_test, record_data.GetAddress());
137 EXPECT_EQ(0ul, record_data.GetSize());
138 EXPECT_NE(nullptr, stack_trace.at(0));
139 }
140
TEST_F(AllocationTraceRecorderTest,VerifyMultipleOperations)141 TEST_F(AllocationTraceRecorderTest, VerifyMultipleOperations) {
142 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
143
144 // We perform a number of operations.
145 subject_under_test.OnAllocation(this, 1 * sizeof(*this),
146 AllocationSubsystem::kPartitionAllocator,
147 nullptr);
148
149 subject_under_test.OnFree(this + 2);
150 subject_under_test.OnAllocation(this + 3, 3 * sizeof(*this),
151 AllocationSubsystem::kPartitionAllocator,
152 nullptr);
153 subject_under_test.OnAllocation(this + 4, 4 * sizeof(*this),
154 AllocationSubsystem::kPartitionAllocator,
155 nullptr);
156 subject_under_test.OnFree(this + 5);
157 subject_under_test.OnFree(this + 6);
158
159 ASSERT_EQ(subject_under_test.size(), 6ul);
160
161 // Verify that the stored operations match the expected.
162 {
163 const auto& entry = subject_under_test[0];
164 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
165 ASSERT_EQ(entry.GetAddress(), this);
166 ASSERT_EQ(entry.GetSize(), 1 * sizeof(*this));
167 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
168 }
169 {
170 const auto& entry = subject_under_test[1];
171 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
172 ASSERT_EQ(entry.GetAddress(), (this + 2));
173 ASSERT_EQ(entry.GetSize(), 0ul);
174 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
175 }
176 {
177 const auto& entry = subject_under_test[2];
178 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
179 ASSERT_EQ(entry.GetAddress(), (this + 3));
180 ASSERT_EQ(entry.GetSize(), 3 * sizeof(*this));
181 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
182 }
183 {
184 const auto& entry = subject_under_test[3];
185 ASSERT_EQ(entry.GetOperationType(), OperationType::kAllocation);
186 ASSERT_EQ(entry.GetAddress(), (this + 4));
187 ASSERT_EQ(entry.GetSize(), 4 * sizeof(*this));
188 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
189 }
190 {
191 const auto& entry = subject_under_test[4];
192 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
193 ASSERT_EQ(entry.GetAddress(), (this + 5));
194 ASSERT_EQ(entry.GetSize(), 0ul);
195 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
196 }
197 {
198 const auto& entry = subject_under_test[5];
199 ASSERT_EQ(entry.GetOperationType(), OperationType::kFree);
200 ASSERT_EQ(entry.GetAddress(), (this + 6));
201 ASSERT_EQ(entry.GetSize(), 0ul);
202 ASSERT_NE(entry.GetStackTrace()[0], nullptr);
203 }
204 }
205
TEST_F(AllocationTraceRecorderTest,VerifyOverflowOfOperations)206 TEST_F(AllocationTraceRecorderTest, VerifyOverflowOfOperations) {
207 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
208
209 decltype(subject_under_test.GetMaximumNumberOfTraces()) idx;
210 for (idx = 0; idx < subject_under_test.GetMaximumNumberOfTraces(); ++idx) {
211 ASSERT_EQ(subject_under_test.size(), idx);
212 const bool is_allocation = !(idx & 0x1);
213
214 // Record an allocation or free.
215 if (is_allocation) {
216 subject_under_test.OnAllocation(
217 this + idx, idx, AllocationSubsystem::kPartitionAllocator, nullptr);
218 } else {
219 subject_under_test.OnFree(this + idx);
220 }
221
222 // Some verifications.
223 {
224 ASSERT_EQ(subject_under_test.size(), (idx + 1));
225
226 // Some verification on the added entry.
227 {
228 const auto& last_entry = subject_under_test[idx];
229 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
230 // No full verification intended, just a check that something has been
231 // written.
232 ASSERT_NE(last_entry.GetStackTrace()[0], nullptr);
233 if (is_allocation) {
234 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
235 ASSERT_EQ(last_entry.GetSize(), idx);
236 } else {
237 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kFree);
238 ASSERT_EQ(last_entry.GetSize(), 0ul);
239 }
240 }
241
242 // No changes on the first entry must be done.
243 {
244 const auto& first_entry = subject_under_test[0];
245 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kAllocation);
246 ASSERT_EQ(first_entry.GetAddress(), this);
247 ASSERT_EQ(first_entry.GetSize(), 0ul);
248 }
249 }
250 }
251
252 // By now we have written all available records including the last one.
253 // So the following allocation should overwrite the first record.
254 {
255 const auto& old_second_entry = subject_under_test[1];
256
257 subject_under_test.OnAllocation(
258 this + idx, idx, AllocationSubsystem::kPartitionAllocator, nullptr);
259 ASSERT_EQ(subject_under_test.size(),
260 subject_under_test.GetMaximumNumberOfTraces());
261 const auto& last_entry =
262 subject_under_test[subject_under_test.GetMaximumNumberOfTraces() - 1];
263 ASSERT_EQ(last_entry.GetOperationType(), OperationType::kAllocation);
264 ASSERT_EQ(last_entry.GetAddress(), (this + idx));
265
266 // Check that the previous first entry (an allocation) is gone. Accessing
267 // the first record now yields what was previously the second record (a free
268 // operation).
269 const auto& first_entry = subject_under_test[0];
270
271 ASSERT_EQ(&old_second_entry, &first_entry);
272 ASSERT_EQ(first_entry.GetOperationType(), OperationType::kFree);
273 ASSERT_EQ(first_entry.GetAddress(), (this + 1));
274 }
275 }
276
TEST_F(AllocationTraceRecorderTest,VerifyWithHooks)277 TEST_F(AllocationTraceRecorderTest, VerifyWithHooks) {
278 auto& dispatcher = base::allocator::dispatcher::Dispatcher::GetInstance();
279 AllocationTraceRecorder& subject_under_test = GetSubjectUnderTest();
280
281 dispatcher.InitializeForTesting(&subject_under_test);
282
283 // Perform an allocation and free.
284 std::make_unique<std::string>(
285 "Just enforce an allocation and free to trigger notification of the "
286 "subject under test. Hopefully this string is long enough to bypass any "
287 "small string optimizations that the STL implementation might use.");
288
289 dispatcher.ResetForTesting();
290
291 // We only test for greater-equal since allocation from other parts of GTest
292 // might have interfered.
293 EXPECT_GE(subject_under_test.size(), 2ul);
294 }
295
296 class OperationRecordTest : public Test {
297 protected:
298 using ReferenceStackTrace = std::vector<const void*>;
299
GetReferenceTrace()300 ReferenceStackTrace GetReferenceTrace() {
301 constexpr size_t max_trace_size = 128;
302 const void* frame_pointers[max_trace_size]{nullptr};
303 const auto num_frames = base::debug::TraceStackFramePointers(
304 &frame_pointers[0], max_trace_size, 0);
305 ReferenceStackTrace trace;
306 std::copy_n(std::begin(frame_pointers), num_frames,
307 std::back_inserter(trace));
308 return trace;
309 }
310
VerifyStackTrace(const ReferenceStackTrace & reference_stack_trace,const base::debug::tracer::StackTraceContainer & stack_trace)311 void VerifyStackTrace(
312 const ReferenceStackTrace& reference_stack_trace,
313 const base::debug::tracer::StackTraceContainer& stack_trace) {
314 // Verify we have at least one entry in the stack.
315 ASSERT_NE(nullptr, stack_trace.at(0));
316 ASSERT_GT(stack_trace.size(), 0ul);
317
318 // Although functions are marked ALWAYS_INLINE, the compiler may choose not
319 // to inline, depending i.e. on the optimization level. Therefore, we search
320 // for the first common frame in both stack-traces. From there on, both must
321 // be equal for the remaining number of frames.
322 auto* const* const it_stack_trace_begin = std::begin(stack_trace);
323 auto* const* const it_stack_trace_end =
324 std::find(it_stack_trace_begin, std::end(stack_trace), nullptr);
325 auto const it_reference_stack_trace_end = std::end(reference_stack_trace);
326
327 auto* const* it_stack_trace = std::find_first_of(
328 it_stack_trace_begin, it_stack_trace_end,
329 std::begin(reference_stack_trace), it_reference_stack_trace_end);
330
331 ASSERT_NE(it_stack_trace, it_stack_trace_end)
332 << "stack-trace and reference-stack-trace share no common frame!\n"
333 << "stack trace = " << MakeString(stack_trace) << '\n'
334 << "reference stack trace = " << MakeString(reference_stack_trace);
335
336 // Find the common frame in the reference-stack-trace.
337 const auto it_reference_stack_trace =
338 std::find(std::begin(reference_stack_trace),
339 it_reference_stack_trace_end, *it_stack_trace);
340
341 const auto number_of_expected_common_frames = std::min(
342 std::distance(it_stack_trace, it_stack_trace_end),
343 std::distance(it_reference_stack_trace, it_reference_stack_trace_end));
344
345 // Check if we have any difference within the section of frames that we
346 // expect to be equal.
347 const auto mismatch = std::mismatch(
348 it_reference_stack_trace,
349 it_reference_stack_trace + number_of_expected_common_frames,
350 it_stack_trace);
351
352 ASSERT_EQ(mismatch.first,
353 (it_reference_stack_trace + number_of_expected_common_frames))
354 << "found difference in the range of frames expected to be equal!\n"
355 << "position = "
356 << std::distance(it_reference_stack_trace, mismatch.first) << '\n'
357 << "stack trace = "
358 << MakeString(it_stack_trace,
359 it_stack_trace + number_of_expected_common_frames)
360 << '\n'
361 << "reference stack trace = "
362 << MakeString(
363 it_reference_stack_trace,
364 it_reference_stack_trace + number_of_expected_common_frames);
365 }
366 };
367
TEST_F(OperationRecordTest,VerifyConstructor)368 TEST_F(OperationRecordTest, VerifyConstructor) {
369 OperationRecord subject_under_test;
370
371 EXPECT_EQ(subject_under_test.GetOperationType(), OperationType::kNone);
372 EXPECT_EQ(subject_under_test.GetAddress(), nullptr);
373 EXPECT_EQ(subject_under_test.GetSize(), 0ul);
374 EXPECT_FALSE(subject_under_test.IsRecording());
375
376 // The stack trace is not initialized by the constructor. Therefore, we do not
377 // check here.
378 }
379
TEST_F(OperationRecordTest,VerifyRecordAllocation)380 TEST_F(OperationRecordTest, VerifyRecordAllocation) {
381 const ReferenceStackTrace reference_trace = GetReferenceTrace();
382
383 void* const address = this;
384 size_t const size = sizeof(*this);
385
386 OperationRecord subject_under_test;
387
388 ASSERT_TRUE(subject_under_test.InitializeAllocation(address, size));
389
390 EXPECT_EQ(OperationType::kAllocation, subject_under_test.GetOperationType());
391 EXPECT_EQ(address, subject_under_test.GetAddress());
392 EXPECT_EQ(size, subject_under_test.GetSize());
393 EXPECT_FALSE(subject_under_test.IsRecording());
394
395 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
396 }
397
TEST_F(OperationRecordTest,VerifyRecordFree)398 TEST_F(OperationRecordTest, VerifyRecordFree) {
399 const ReferenceStackTrace reference_trace = GetReferenceTrace();
400
401 void* const address = this;
402 size_t const size = 0;
403
404 OperationRecord subject_under_test;
405
406 ASSERT_TRUE(subject_under_test.InitializeFree(address));
407
408 EXPECT_EQ(OperationType::kFree, subject_under_test.GetOperationType());
409 EXPECT_EQ(address, subject_under_test.GetAddress());
410 EXPECT_EQ(size, subject_under_test.GetSize());
411 EXPECT_FALSE(subject_under_test.IsRecording());
412
413 VerifyStackTrace(reference_trace, subject_under_test.GetStackTrace());
414 }
415
416 } // namespace base::debug::tracer
417