• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 #pragma once
15 
16 #include <cstddef>
17 
18 #include "pw_allocator/allocator.h"
19 #include "pw_allocator/block/basic.h"
20 #include "pw_allocator/block/iterable.h"
21 #include "pw_allocator/block/poisonable.h"
22 #include "pw_allocator/block/result.h"
23 #include "pw_allocator/block/with_layout.h"
24 #include "pw_allocator/capability.h"
25 #include "pw_allocator/config.h"
26 #include "pw_allocator/fragmentation.h"
27 #include "pw_allocator/hardening.h"
28 #include "pw_assert/assert.h"
29 #include "pw_bytes/span.h"
30 #include "pw_result/result.h"
31 #include "pw_status/status.h"
32 
33 namespace pw::allocator {
34 namespace internal {
35 
36 /// Block-independent base class of ``BlockAllocator``.
37 ///
38 /// This class contains static methods which do not depend on the template
39 /// parameters of ``BlockAllocator`` that are used to determine block/ type.
40 /// This allows the methods to be defined in a separate source file and use
41 /// macros that cannot be used in headers, e.g. PW_CHECK.
42 ///
43 /// This class should not be used directly. Instead, use ``BlockAllocator`` or
44 /// one of its specializations.
45 class GenericBlockAllocator : public Allocator {
46  public:
47   // Not copyable or movable.
48   GenericBlockAllocator(const GenericBlockAllocator&) = delete;
49   GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
50   GenericBlockAllocator(GenericBlockAllocator&&) = delete;
51   GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
52 
53  protected:
54   template <typename BlockType>
GetCapabilities()55   static constexpr Capabilities GetCapabilities() {
56     Capabilities common = kImplementsGetUsableLayout |
57                           kImplementsGetAllocatedLayout |
58                           kImplementsGetCapacity | kImplementsRecognizes;
59     if constexpr (has_layout_v<BlockType>) {
60       return common | kImplementsGetRequestedLayout;
61     } else {
62       return common;
63     }
64   }
65 
GenericBlockAllocator(Capabilities capabilities)66   constexpr explicit GenericBlockAllocator(Capabilities capabilities)
67       : Allocator(capabilities) {}
68 
69   /// Crashes with an informational message that a given block is allocated.
70   ///
71   /// This method is meant to be called by ``SplitFreeListAllocator``s
72   /// destructor. There must not be any outstanding allocations from an
73   /// when it is destroyed.
74   [[noreturn]] static void CrashOnAllocated(const void* allocated);
75 
76   /// Crashes with an informational message that a given pointer does not belong
77   /// to this allocator.
78   [[noreturn]] static void CrashOnOutOfRange(const void* freed);
79 
80   /// Crashes with an informational message that a given block was freed twice.
81   [[noreturn]] static void CrashOnDoubleFree(const void* freed);
82 };
83 
84 }  // namespace internal
85 
86 namespace test {
87 
88 // Forward declaration for friending.
89 template <typename, size_t>
90 class BlockAllocatorTest;
91 
92 }  // namespace test
93 
94 /// A memory allocator that uses a list of blocks.
95 ///
96 /// This class does not implement `ChooseBlock` and cannot be used directly.
97 /// Instead, use one of its specializations.
98 ///
99 /// NOTE!! Do NOT use memory returned from this allocator as the backing for
100 /// another allocator. If this is done, the `Query` method may incorrectly
101 /// think pointers returned by that allocator were created by this one, and
102 /// report that this allocator can de/reallocate them.
103 template <typename BlockType_>
104 class BlockAllocator : public internal::GenericBlockAllocator {
105  private:
106   using Base = internal::GenericBlockAllocator;
107 
108  public:
109   using BlockType = BlockType_;
110   using Range = typename BlockType::Range;
111 
112   static constexpr Capabilities kCapabilities =
113       Base::GetCapabilities<BlockType>();
114   static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
115 
116   ~BlockAllocator() override;
117 
118   /// Returns a ``Range`` of blocks tracking the memory of this allocator.
119   Range blocks() const;
120 
121   /// Sets the memory region to be used by this allocator.
122   ///
123   /// This method will instantiate an initial block using the memory region.
124   ///
125   /// @param[in]  region  Region of memory to use when satisfying allocation
126   ///                     requests. The region MUST be valid as an argument to
127   ///                     `BlockType::Init`.
128   void Init(ByteSpan region);
129 
130   /// Returns fragmentation information for the block allocator's memory region.
131   Fragmentation MeasureFragmentation() const;
132 
133  protected:
BlockAllocator()134   constexpr explicit BlockAllocator() : Base(kCapabilities) {}
135 
136   /// Sets the blocks to be used by this allocator.
137   ///
138   /// This method will use the sequence of blocks including and following
139   /// `begin`. These blocks which must be valid.
140   ///
141   /// @param[in]  begin               The first block for this allocator.
142   ///                                 The block must not have a previous block.
143   void Init(BlockType* begin);
144 
145   /// Returns the block associated with a pointer.
146   ///
147   /// If the given pointer is to this allocator's memory region, but not to a
148   /// valid block, the memory is corrupted and this method will crash to assist
149   /// in uncovering the underlying bug.
150   ///
151   /// @param  ptr           Pointer to an allocated block's usable space.
152   ///
153   /// @returns @rst
154   ///
155   /// .. pw-status-codes::
156   ///
157   ///    OK: Result contains a pointer to the block.
158   ///
159   ///    OUT_OF_RANGE: Given pointer is outside the allocator's memory.
160   ///
161   /// @endrst
162   template <typename Ptr>
163   internal::copy_const_ptr_t<Ptr, BlockType*> FromUsableSpace(Ptr ptr) const;
164 
165   /// Frees the given block.
166   ///
167   /// Derived classes may override this method to hook or even defer freeing
168   /// blocks.
169   virtual void DeallocateBlock(BlockType*&& block);
170 
171  private:
172   using BlockResultPrev = internal::GenericBlockResult::Prev;
173   using BlockResultNext = internal::GenericBlockResult::Next;
174 
175   // Let unit tests call internal methods in order to "preallocate" blocks..
176   template <typename, size_t>
177   friend class test::BlockAllocatorTest;
178 
179   /// @copydoc Allocator::Allocate
180   void* DoAllocate(Layout layout) override;
181 
182   /// @copydoc Allocator::Deallocate
183   void DoDeallocate(void* ptr) override;
184 
185   /// @copydoc Allocator::Deallocate
DoDeallocate(void * ptr,Layout)186   void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
187 
188   /// @copydoc Allocator::Resize
189   bool DoResize(void* ptr, size_t new_size) override;
190 
191   /// @copydoc Allocator::GetAllocated
DoGetAllocated()192   size_t DoGetAllocated() const override { return allocated_; }
193 
194   /// @copydoc Deallocator::GetInfo
195   Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
196 
197   /// Selects a free block to allocate from.
198   ///
199   /// This method represents the allocator-specific strategy of choosing which
200   /// block should be used to satisfy allocation requests. If the returned
201   /// result indicates success, `block` will be replaced by the chosen block.
202   ///
203   /// @param  block   Used to return the chosen block.
204   /// @param  layout  Same as ``Allocator::Allocate``.
205   virtual BlockResult<BlockType> ChooseBlock(Layout layout) = 0;
206 
207   /// Indicates that a block will no longer be free.
208   ///
209   /// Does nothing by default. Derived class may overload to do additional
210   /// bookkeeeping.
211   ///
212   /// @param  block   The block being freed.
ReserveBlock(BlockType &)213   virtual void ReserveBlock(BlockType&) {}
214 
215   /// Indicates that a block is now free.
216   ///
217   /// Does nothing by default. Derived class may overload to do additional
218   /// bookkeeeping.
219   ///
220   /// @param  block   The block being freed.
RecycleBlock(BlockType &)221   virtual void RecycleBlock(BlockType&) {}
222 
223   /// Completes any pending deallocations.
224   ///
225   /// After calling this method, all memory that has been passed to `Deallocate`
226   /// will be free and available for subsequent allocations.
227   ///
228   /// An allocator implementation may wish to defer doing the work of freeing a
229   /// block and potentially merging it with its neighbors in case it
230   /// subsequently receives a request for the block's exact size. In this case
231   /// it must override this method and clear any caches of freed blocks.
232   ///
233   /// Allocators that free blocks immediately upon a call to `DoDeallocate` can
234   /// simply use the default implementation that does nothing.
Flush()235   virtual void Flush() {}
236 
237   /// Returns if the previous block exists and is free.
PrevIsFree(const BlockType * block)238   static bool PrevIsFree(const BlockType* block) {
239     auto* prev = block->Prev();
240     return prev != nullptr && prev->IsFree();
241   }
242 
243   /// Returns if the next block exists and is free.
NextIsFree(const BlockType * block)244   static bool NextIsFree(const BlockType* block) {
245     auto* next = block->Next();
246     return next != nullptr && next->IsFree();
247   }
248 
249   /// Ensures the pointer to the last block is correct after the given block is
250   /// allocated or freed.
251   void UpdateLast(BlockType* block);
252 
253   // Represents the range of blocks for this allocator.
254   size_t capacity_ = 0;
255   size_t allocated_ = 0;
256   BlockType* first_ = nullptr;
257   BlockType* last_ = nullptr;
258   uint16_t unpoisoned_ = 0;
259 };
260 
261 // Template method implementations
262 
263 template <typename BlockType>
~BlockAllocator()264 BlockAllocator<BlockType>::~BlockAllocator() {
265   if constexpr (Hardening::kIncludesRobustChecks) {
266     for (auto* block : blocks()) {
267       if (!block->IsFree()) {
268         CrashOnAllocated(block);
269       }
270     }
271   }
272 }
273 
274 template <typename BlockType>
blocks()275 typename BlockAllocator<BlockType>::Range BlockAllocator<BlockType>::blocks()
276     const {
277   return Range(first_);
278 }
279 
280 template <typename BlockType>
Init(ByteSpan region)281 void BlockAllocator<BlockType>::Init(ByteSpan region) {
282   Result<BlockType*> result = BlockType::Init(region);
283   Init(*result);
284 }
285 
286 template <typename BlockType>
Init(BlockType * begin)287 void BlockAllocator<BlockType>::Init(BlockType* begin) {
288   if constexpr (Hardening::kIncludesRobustChecks) {
289     PW_ASSERT(begin != nullptr);
290     PW_ASSERT(begin->Prev() == nullptr);
291   }
292   first_ = begin;
293   for (auto* block : blocks()) {
294     last_ = block;
295     capacity_ += block->OuterSize();
296     if (block->IsFree()) {
297       RecycleBlock(*block);
298     }
299   }
300 }
301 
302 template <typename BlockType>
DoAllocate(Layout layout)303 void* BlockAllocator<BlockType>::DoAllocate(Layout layout) {
304   if (capacity_ == 0) {
305     // Not initialized.
306     return nullptr;
307   }
308 
309   if constexpr (Hardening::kIncludesDebugChecks) {
310     PW_ASSERT(last_->Next() == nullptr);
311   }
312   auto result = ChooseBlock(layout);
313   if (!result.ok()) {
314     // No valid block for request.
315     return nullptr;
316   }
317   BlockType* block = result.block();
318   allocated_ += block->OuterSize();
319   switch (result.prev()) {
320     case BlockResultPrev::kSplitNew:
321       // New free blocks may be created when allocating.
322       RecycleBlock(*(block->Prev()));
323       break;
324     case BlockResultPrev::kResizedLarger:
325       // Extra bytes may be appended to the previous block.
326       allocated_ += result.size();
327       break;
328     case BlockResultPrev::kUnchanged:
329     case BlockResultPrev::kResizedSmaller:
330       break;
331   }
332   if (result.next() == BlockResultNext::kSplitNew) {
333     RecycleBlock(*(block->Next()));
334   }
335 
336   UpdateLast(block);
337   if constexpr (Hardening::kIncludesDebugChecks) {
338     PW_ASSERT(block <= last_);
339   }
340 
341   return block->UsableSpace();
342 }
343 
344 template <typename BlockType>
DoDeallocate(void * ptr)345 void BlockAllocator<BlockType>::DoDeallocate(void* ptr) {
346   BlockType* block = FromUsableSpace(ptr);
347   if (block->IsFree()) {
348     if constexpr (Hardening::kIncludesBasicChecks) {
349       CrashOnDoubleFree(block);
350     } else {
351       return;
352     }
353   }
354   DeallocateBlock(std::move(block));
355 }
356 
357 template <typename BlockType>
DeallocateBlock(BlockType * && block)358 void BlockAllocator<BlockType>::DeallocateBlock(BlockType*&& block) {
359   // Neighboring blocks may be merged when freeing.
360   if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) {
361     ReserveBlock(*prev);
362   }
363   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
364     ReserveBlock(*next);
365   }
366 
367   // Free the block and merge it with its neighbors, if possible.
368   allocated_ -= block->OuterSize();
369   auto free_result = BlockType::Free(std::move(block));
370   block = free_result.block();
371   UpdateLast(block);
372 
373   if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
374     // Bytes were reclaimed from the previous block.
375     allocated_ -= free_result.size();
376   }
377 
378   if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
379     ++unpoisoned_;
380     if (unpoisoned_ >= kPoisonInterval) {
381       block->Poison();
382       unpoisoned_ = 0;
383     }
384   }
385 
386   RecycleBlock(*block);
387 }
388 
389 template <typename BlockType>
DoResize(void * ptr,size_t new_size)390 bool BlockAllocator<BlockType>::DoResize(void* ptr, size_t new_size) {
391   BlockType* block = FromUsableSpace(ptr);
392 
393   // Neighboring blocks may be merged when resizing.
394   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
395     ReserveBlock(*next);
396   }
397 
398   size_t old_size = block->OuterSize();
399   if (!block->Resize(new_size).ok()) {
400     return false;
401   }
402   allocated_ -= old_size;
403   allocated_ += block->OuterSize();
404   UpdateLast(block);
405 
406   if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
407     RecycleBlock(*next);
408   }
409 
410   return true;
411 }
412 
413 template <typename BlockType>
DoGetInfo(InfoType info_type,const void * ptr)414 Result<Layout> BlockAllocator<BlockType>::DoGetInfo(InfoType info_type,
415                                                     const void* ptr) const {
416   // Handle types not related to a block first.
417   if (info_type == InfoType::kCapacity) {
418     return Layout(capacity_);
419   }
420   // Get a block from the given pointer.
421   if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
422     return Status::NotFound();
423   }
424   const auto* block = BlockType::FromUsableSpace(ptr);
425   if (!block->IsValid()) {
426     return Status::DataLoss();
427   }
428   if (block->IsFree()) {
429     return Status::FailedPrecondition();
430   }
431   if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
432     if (info_type == InfoType::kRequestedLayoutOf) {
433       return block->RequestedLayout();
434     }
435   }
436   switch (info_type) {
437     case InfoType::kUsableLayoutOf:
438       return Layout(block->InnerSize(), BlockType::kAlignment);
439     case InfoType::kAllocatedLayoutOf:
440       return Layout(block->OuterSize(), BlockType::kAlignment);
441     case InfoType::kRecognizes:
442       return Layout();
443     case InfoType::kCapacity:
444     case InfoType::kRequestedLayoutOf:
445     default:
446       return Status::Unimplemented();
447   }
448 }
449 
450 template <typename BlockType>
MeasureFragmentation()451 Fragmentation BlockAllocator<BlockType>::MeasureFragmentation() const {
452   Fragmentation fragmentation;
453   for (auto block : blocks()) {
454     if (block->IsFree()) {
455       fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
456     }
457   }
458   return fragmentation;
459 }
460 
461 template <typename BlockType>
462 template <typename Ptr>
463 internal::copy_const_ptr_t<Ptr, BlockType*>
FromUsableSpace(Ptr ptr)464 BlockAllocator<BlockType>::FromUsableSpace(Ptr ptr) const {
465   if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
466     if constexpr (Hardening::kIncludesBasicChecks) {
467       CrashOnOutOfRange(ptr);
468     }
469     return nullptr;
470   }
471   auto* block = BlockType::FromUsableSpace(ptr);
472   if (!block->IsValid()) {
473     if constexpr (Hardening::kIncludesBasicChecks) {
474       block->CheckInvariants();
475     }
476     return nullptr;
477   }
478   return block;
479 }
480 
481 template <typename BlockType>
UpdateLast(BlockType * block)482 void BlockAllocator<BlockType>::UpdateLast(BlockType* block) {
483   BlockType* next = block->Next();
484   if (next == nullptr) {
485     last_ = block;
486   } else if (next->Next() == nullptr) {
487     last_ = next;
488   }
489 }
490 
491 }  // namespace pw::allocator
492