1 // Copyright 2024 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 #pragma once
15
16 #include <cstddef>
17 #include <cstdint>
18 #include <limits>
19
20 #include "pw_allocator/block.h"
21 #include "pw_allocator/block_allocator.h"
22 #include "pw_assert/assert.h"
23 #include "pw_bytes/alignment.h"
24 #include "pw_status/status.h"
25 #include "pw_unit_test/framework.h"
26
27 namespace pw::allocator::test {
28
29 // Forward declaration.
30 struct Preallocation;
31
32 /// Test fixture responsible for managing a memory region and an allocator that
33 /// allocates block of memory from it.
34 ///
35 /// This base class contains all the code that does not depend specific
36 /// `Block` or `BlockAllocator` types.
37 class BlockAllocatorTestBase : public ::testing::Test {
38 public:
39 static constexpr size_t kDefaultBlockOverhead = Block<>::kBlockOverhead;
40
41 // Size of the memory region to use in the tests below.
42 // This must be large enough so that BlockType::Init does not fail.
43 static constexpr size_t kCapacity = 1024;
44
45 // The number of allocated pointers cached by the test fixture.
46 static constexpr size_t kNumPtrs = 16;
47
48 // Represents the sizes of various allocations.
49 static constexpr size_t kLargeInnerSize = kCapacity / 8;
50 static constexpr size_t kLargeOuterSize =
51 kDefaultBlockOverhead + kLargeInnerSize;
52
53 static constexpr size_t kSmallInnerSize = kDefaultBlockOverhead * 2;
54 static constexpr size_t kSmallOuterSize =
55 kDefaultBlockOverhead + kSmallInnerSize;
56
57 static constexpr size_t kSmallerOuterSize = kSmallInnerSize;
58 static constexpr size_t kLargerOuterSize =
59 kLargeOuterSize + kSmallerOuterSize;
60
61 protected:
62 // Test fixtures.
63 void SetUp() override;
64
65 /// Returns the underlying memory region.
66 virtual ByteSpan GetBytes() = 0;
67
68 /// Initialize the allocator with a region of memory and return it.
69 virtual Allocator& GetAllocator() = 0;
70
71 /// Initialize the allocator with a sequence of preallocated blocks and return
72 /// it.
73 ///
74 /// See also ``Preallocation``.
75 virtual Allocator& GetAllocator(
76 std::initializer_list<Preallocation> preallocations) = 0;
77
78 /// Gets the next allocation from an allocated pointer.
79 virtual void* NextAfter(size_t index) = 0;
80
81 /// Store an allocated pointer in the test's cache of pointers.
82 void Store(size_t index, void* ptr);
83
84 /// Retrieve an allocated pointer from the test's cache of pointers.
85 void* Fetch(size_t index);
86
87 /// Ensures the memory is usable by writing to it.
88 void UseMemory(void* ptr, size_t size);
89
90 // Unit tests.
91 void GetCapacity();
92 void AllocateLarge();
93 void AllocateSmall();
94 void AllocateTooLarge();
95 void AllocateLargeAlignment();
96 void AllocateAlignmentFailure();
97 void DeallocateNull();
98 void DeallocateShuffled();
99 void ResizeNull();
100 void ResizeLargeSame();
101 void ResizeLargeSmaller();
102 void ResizeLargeLarger();
103 void ResizeLargeLargerFailure();
104 void ResizeSmallSame();
105 void ResizeSmallSmaller();
106 void ResizeSmallLarger();
107 void ResizeSmallLargerFailure();
108
109 private:
110 std::array<void*, kNumPtrs> ptrs_;
111 };
112
113 /// Test fixture responsible for managing a memory region and an allocator that
114 /// allocates block of memory from it.
115 ///
116 /// This derived class contains all the code that depends specific `Block` or
117 /// `BlockAllocator` types.
118 ///
119 /// @tparam BlockAllocatorType The type of the `BlockAllocator` being tested.
120 template <typename BlockAllocatorType>
121 class BlockAllocatorTest : public BlockAllocatorTestBase {
122 public:
123 using BlockType = typename BlockAllocatorType::BlockType;
124
125 protected:
126 // Test fixtures.
BlockAllocatorTest(BlockAllocatorType & allocator)127 BlockAllocatorTest(BlockAllocatorType& allocator) : allocator_(allocator) {}
128
GetBytes()129 ByteSpan GetBytes() override { return ByteSpan(buffer_); }
130
131 Allocator& GetAllocator() override;
132
133 Allocator& GetAllocator(
134 std::initializer_list<Preallocation> preallocations) override;
135
136 void* NextAfter(size_t index) override;
137
138 void TearDown() override;
139
140 // Unit tests.
141 static void CanAutomaticallyInit(BlockAllocatorType& allocator);
142 void CanExplicitlyInit(BlockAllocatorType& allocator);
143 void IterateOverBlocks();
144 void CanMeasureFragmentation();
145
146 private:
147 BlockAllocatorType& allocator_;
148 alignas(BlockType::kAlignment) std::array<std::byte, kCapacity> buffer_;
149 };
150
151 /// Represents an initial state for a memory block.
152 ///
153 /// Unit tests can specify an initial block layout by passing a list of these
154 /// structs to `Preallocate`.
155 ///
156 /// The outer size of each block must be at least `kBlockOverhead` for the
157 /// block type in use. The special `kSizeRemaining` may be used for at most
158 /// one block to give it any space not assigned to other blocks.
159 ///
160 /// The index must be less than `BlockAllocatorBlockAllocatorTest::kNumPtrs` or
161 /// one of the special values `kIndexFree` or `kIndexNext`. A regular index will
162 /// mark the block as "used" and cache the pointer to its usable space in
163 /// `ptrs[index]`. The special value `kIndexFree` will leave the block as
164 /// "free". The special value `kIndexNext` will mark the block as "used" and
165 /// cache its pointer in the next available slot in `test_fixture`. This
166 /// may be used/ when the pointer is not needed for the test but should still
167 /// be automatically freed at the end of the test.
168 ///
169 /// Example:
170 /// @code{.cpp}
171 /// // BlockType = UnpoisonedBlock<uint32_t>, so kBlockOverhead == 8.
172 /// ASSERT_EQ(Preallocate({
173 /// {32, 0}, // ptrs[0] == 24 byte region.
174 /// {24, kIndexFree}, // Free block of 16 bytes.
175 /// {48, 2}, // ptrs[2] == 40 byte region.
176 /// {kSizeRemaining, kIndesFree}, // Free block of leftover space.
177 /// {64, 4}, // ptrs[4] == 56 byte region from the
178 /// // end of the allocator.
179 /// }), OkStatus());
180 /// @endcode
181 struct Preallocation {
182 /// The outer size of the block to preallocate.
183 size_t outer_size;
184
185 // Index into the `test_fixture` array where the pointer to the block's
186 // space should be cached.
187 size_t index;
188
189 /// Special value indicating the block should comprise of the all remaining
190 /// space not preallocated to any other block. May be used at most once.
191 static constexpr size_t kSizeRemaining = std::numeric_limits<size_t>::max();
192
193 /// Special value indicating the block should be treated as unallocated,
194 /// i.e. it's pointer should not be cached.
195 static constexpr size_t kIndexFree = BlockAllocatorTestBase::kNumPtrs + 1;
196
197 /// Special value indicating to use the next available index.
198 static constexpr size_t kIndexNext = BlockAllocatorTestBase::kNumPtrs + 2;
199 };
200
201 // Test fixture template method implementations.
202
203 template <typename BlockAllocatorType>
GetAllocator()204 Allocator& BlockAllocatorTest<BlockAllocatorType>::GetAllocator() {
205 allocator_.Init(GetBytes());
206 return allocator_;
207 }
208
209 template <typename BlockAllocatorType>
GetAllocator(std::initializer_list<Preallocation> preallocations)210 Allocator& BlockAllocatorTest<BlockAllocatorType>::GetAllocator(
211 std::initializer_list<Preallocation> preallocations) {
212 // First, look if any blocks use kSizeRemaining, and calculate how large
213 // that will be.
214 size_t remaining_outer_size = kCapacity;
215 for (auto& preallocation : preallocations) {
216 if (preallocation.outer_size != Preallocation::kSizeRemaining) {
217 size_t outer_size =
218 AlignUp(preallocation.outer_size, BlockType::kAlignment);
219 PW_ASSERT(remaining_outer_size >= outer_size);
220 remaining_outer_size -= outer_size;
221 }
222 }
223
224 Result<BlockType*> result = BlockType::Init(GetBytes());
225 BlockType* block = *result;
226 void* begin = block->UsableSpace();
227
228 // To prevent free blocks being merged back into the block of available
229 // space, treat the available space as being used.
230 block->MarkUsed();
231
232 size_t next_index = 0;
233 for (auto& preallocation : preallocations) {
234 PW_ASSERT(block != nullptr);
235
236 // Perform the allocation.
237 size_t outer_size = preallocation.outer_size;
238 if (outer_size == Preallocation::kSizeRemaining) {
239 outer_size = remaining_outer_size;
240 remaining_outer_size = 0;
241 }
242 size_t inner_size = outer_size - BlockType::kBlockOverhead;
243
244 block->MarkFree();
245 Layout layout(inner_size, 1);
246 PW_ASSERT(BlockType::AllocFirst(block, layout).ok());
247 if (!block->Last()) {
248 block->Next()->MarkUsed();
249 }
250
251 // Free the block or cache the allocated pointer.
252 if (preallocation.index == Preallocation::kIndexFree) {
253 BlockType::Free(block);
254
255 } else if (preallocation.index == Preallocation::kIndexNext) {
256 while (true) {
257 PW_ASSERT(next_index < kNumPtrs);
258 if (Fetch(next_index) == nullptr &&
259 std::all_of(preallocations.begin(),
260 preallocations.end(),
261 [next_index](const Preallocation& other) {
262 return other.index != next_index;
263 })) {
264 break;
265 }
266 ++next_index;
267 }
268 Store(next_index, block->UsableSpace());
269
270 } else {
271 Store(preallocation.index, block->UsableSpace());
272 }
273 block = block->Next();
274 }
275 if (block != nullptr) {
276 block->MarkFree();
277 }
278 block = BlockType::FromUsableSpace(begin);
279 allocator_.Init(block);
280 return allocator_;
281 }
282
283 template <typename BlockAllocatorType>
NextAfter(size_t index)284 void* BlockAllocatorTest<BlockAllocatorType>::NextAfter(size_t index) {
285 void* ptr = Fetch(index);
286 if (ptr == nullptr) {
287 return nullptr;
288 }
289
290 auto* block = BlockType::FromUsableSpace(ptr);
291 while (!block->Last()) {
292 block = block->Next();
293 if (block->Used()) {
294 return block->UsableSpace();
295 }
296 }
297 return nullptr;
298 }
299
300 template <typename BlockAllocatorType>
TearDown()301 void BlockAllocatorTest<BlockAllocatorType>::TearDown() {
302 for (size_t i = 0; i < kNumPtrs; ++i) {
303 if (Fetch(i) != nullptr) {
304 allocator_.Deallocate(Fetch(i));
305 }
306 }
307 allocator_.Reset();
308 }
309
310 // Unit tests template method implementations.
311
312 template <typename BlockAllocatorType>
CanAutomaticallyInit(BlockAllocatorType & allocator)313 void BlockAllocatorTest<BlockAllocatorType>::CanAutomaticallyInit(
314 BlockAllocatorType& allocator) {
315 EXPECT_NE(*(allocator.blocks().begin()), nullptr);
316 }
317
318 template <typename BlockAllocatorType>
CanExplicitlyInit(BlockAllocatorType & allocator)319 void BlockAllocatorTest<BlockAllocatorType>::CanExplicitlyInit(
320 BlockAllocatorType& allocator) {
321 EXPECT_EQ(*(allocator.blocks().begin()), nullptr);
322 allocator.Init(GetBytes());
323 EXPECT_NE(*(allocator.blocks().begin()), nullptr);
324 }
325
326 template <typename BlockAllocatorType>
IterateOverBlocks()327 void BlockAllocatorTest<BlockAllocatorType>::IterateOverBlocks() {
328 Allocator& allocator = GetAllocator({
329 {kSmallOuterSize, Preallocation::kIndexFree},
330 {kLargeOuterSize, Preallocation::kIndexNext},
331 {kSmallOuterSize, Preallocation::kIndexFree},
332 {kLargeOuterSize, Preallocation::kIndexNext},
333 {kSmallOuterSize, Preallocation::kIndexFree},
334 {kLargeOuterSize, Preallocation::kIndexNext},
335 {Preallocation::kSizeRemaining, Preallocation::kIndexFree},
336 });
337
338 // Count the blocks. The unallocated ones vary in size, but the allocated ones
339 // should all be the same.
340 size_t free_count = 0;
341 size_t used_count = 0;
342 auto& block_allocator = static_cast<BlockAllocatorType&>(allocator);
343 for (auto* block : block_allocator.blocks()) {
344 if (block->Used()) {
345 EXPECT_EQ(block->OuterSize(), kLargeOuterSize);
346 ++used_count;
347 } else {
348 ++free_count;
349 }
350 }
351 EXPECT_EQ(used_count, 3U);
352 EXPECT_EQ(free_count, 4U);
353 }
354
355 template <typename BlockAllocatorType>
CanMeasureFragmentation()356 void BlockAllocatorTest<BlockAllocatorType>::CanMeasureFragmentation() {
357 Allocator& allocator = GetAllocator({
358 {0x020, Preallocation::kIndexFree},
359 {0x040, Preallocation::kIndexNext},
360 {0x080, Preallocation::kIndexFree},
361 {0x100, Preallocation::kIndexNext},
362 {0x200, Preallocation::kIndexFree},
363 {Preallocation::kSizeRemaining, Preallocation::kIndexNext},
364 });
365
366 auto& block_allocator = static_cast<BlockAllocatorType&>(allocator);
367 constexpr size_t alignment = BlockAllocatorType::BlockType::kAlignment;
368 size_t sum_of_squares = 0;
369 size_t sum = 0;
370 for (const auto block : block_allocator.blocks()) {
371 if (!block->Used()) {
372 size_t inner_size = block->InnerSize() / alignment;
373 sum_of_squares += inner_size * inner_size;
374 sum += inner_size;
375 }
376 }
377
378 Fragmentation fragmentation = block_allocator.MeasureFragmentation();
379 EXPECT_EQ(fragmentation.sum_of_squares.hi, 0U);
380 EXPECT_EQ(fragmentation.sum_of_squares.lo, sum_of_squares);
381 EXPECT_EQ(fragmentation.sum, sum);
382 }
383
384 } // namespace pw::allocator::test
385