• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 
15 #include "pw_allocator/block.h"
16 
17 #include <cstring>
18 
19 #include "pw_assert/check.h"
20 
21 namespace pw::allocator {
22 
Init(const std::span<std::byte> region,Block ** block)23 Status Block::Init(const std::span<std::byte> region, Block** block) {
24   // Ensure the region we're given is aligned and sized accordingly
25   if (reinterpret_cast<uintptr_t>(region.data()) % alignof(Block) != 0) {
26     return Status::InvalidArgument();
27   }
28 
29   if (region.size() < sizeof(Block)) {
30     return Status::InvalidArgument();
31   }
32 
33   union {
34     Block* block;
35     std::byte* bytes;
36   } aliased;
37   aliased.bytes = region.data();
38 
39   // Make "next" point just past the end of this block; forming a linked list
40   // with the following storage. Since the space between this block and the
41   // next are implicitly part of the raw data, size can be computed by
42   // subtracting the pointers.
43   aliased.block->next_ = reinterpret_cast<Block*>(region.end());
44   aliased.block->MarkLast();
45 
46   aliased.block->prev_ = nullptr;
47   *block = aliased.block;
48 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
49   (*block)->PoisonBlock();
50 #endif  // PW_ALLOCATOR_POISON_ENABLE
51   return OkStatus();
52 }
53 
Split(size_t head_block_inner_size,Block ** new_block)54 Status Block::Split(size_t head_block_inner_size, Block** new_block) {
55   if (new_block == nullptr) {
56     return Status::InvalidArgument();
57   }
58 
59   // Don't split used blocks.
60   // TODO: Relax this restriction? Flag to enable/disable this check?
61   if (Used()) {
62     return Status::FailedPrecondition();
63   }
64 
65   // First round the head_block_inner_size up to a alignof(Block) bounary.
66   // This ensures that the next block header is aligned accordingly.
67   // Alignment must be a power of two, hence align()-1 will return the
68   // remainder.
69   auto align_bit_mask = alignof(Block) - 1;
70   size_t aligned_head_block_inner_size = head_block_inner_size;
71   if ((head_block_inner_size & align_bit_mask) != 0) {
72     aligned_head_block_inner_size =
73         (head_block_inner_size & ~align_bit_mask) + alignof(Block);
74   }
75 
76   // (1) Are we trying to allocate a head block larger than the current head
77   // block? This may happen because of the alignment above.
78   if (aligned_head_block_inner_size > InnerSize()) {
79     return Status::OutOfRange();
80   }
81 
82   // (2) Does the resulting block have enough space to store the header?
83   // TODO: What to do if the returned section is empty (i.e. remaining
84   // size == sizeof(Block))?
85   if (InnerSize() - aligned_head_block_inner_size <
86       sizeof(Block) + 2 * PW_ALLOCATOR_POISON_OFFSET) {
87     return Status::ResourceExhausted();
88   }
89 
90   // Create the new block inside the current one.
91   Block* new_next = reinterpret_cast<Block*>(
92       // From the current position...
93       reinterpret_cast<intptr_t>(this) +
94       // skip past the current header...
95       sizeof(*this) +
96       // add the poison bytes before usable space ...
97       PW_ALLOCATOR_POISON_OFFSET +
98       // into the usable bytes by the new inner size...
99       aligned_head_block_inner_size +
100       // add the poison bytes after the usable space ...
101       PW_ALLOCATOR_POISON_OFFSET);
102 
103   // If we're inserting in the middle, we need to update the current next
104   // block to point to what we're inserting
105   if (!Last()) {
106     Next()->prev_ = new_next;
107   }
108 
109   // Copy next verbatim so the next block also gets the "last"-ness
110   new_next->next_ = next_;
111   new_next->prev_ = this;
112 
113   // Update the current block to point to the new head.
114   next_ = new_next;
115 
116   *new_block = next_;
117 
118 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
119   PoisonBlock();
120   (*new_block)->PoisonBlock();
121 #endif  // PW_ALLOCATOR_POISON_ENABLE
122 
123   return OkStatus();
124 }
125 
MergeNext()126 Status Block::MergeNext() {
127   // Anything to merge with?
128   if (Last()) {
129     return Status::OutOfRange();
130   }
131 
132   // Is this or the next block in use?
133   if (Used() || Next()->Used()) {
134     return Status::FailedPrecondition();
135   }
136 
137   // Simply enough, this block's next pointer becomes the next block's
138   // next pointer. We then need to re-wire the "next next" block's prev
139   // pointer to point back to us though.
140   next_ = Next()->next_;
141 
142   // Copying the pointer also copies the "last" status, so this is safe.
143   if (!Last()) {
144     Next()->prev_ = this;
145   }
146 
147   return OkStatus();
148 }
149 
MergePrev()150 Status Block::MergePrev() {
151   // We can't merge if we have no previous. After that though, merging with
152   // the previous block is just MergeNext from the previous block.
153   if (prev_ == nullptr) {
154     return Status::OutOfRange();
155   }
156 
157   // WARNING: This class instance will still exist, but technically be invalid
158   // after this has been invoked. Be careful when doing anything with `this`
159   // After doing the below.
160   return prev_->MergeNext();
161 }
162 
163 // TODO(pwbug/234): Add stack tracing to locate which call to the heap operation
164 // caused the corruption.
165 // TODO: Add detailed information to log report and leave succinct messages
166 // in the crash message.
CrashIfInvalid()167 void Block::CrashIfInvalid() {
168   switch (CheckStatus()) {
169     case VALID:
170       break;
171     case MISALIGNED:
172       PW_DCHECK(false, "The block at address %p is not aligned.", this);
173       break;
174     case NEXT_MISMATCHED:
175       PW_DCHECK(false,
176                 "The 'prev' field in the next block (%p) does not match the "
177                 "address of the current block (%p).",
178                 Next()->Prev(),
179                 this);
180       break;
181     case PREV_MISMATCHED:
182       PW_DCHECK(false,
183                 "The 'next' field in the previous block (%p) does not match "
184                 "the address of the current block (%p).",
185                 Prev()->Next(),
186                 this);
187       break;
188     case POISON_CORRUPTED:
189       PW_DCHECK(
190           false, "The poisoned pattern in the block at %p is corrupted.", this);
191       break;
192   }
193 }
194 
195 // This function will return a Block::BlockStatus that is either VALID or
196 // indicates the reason why the Block is invalid. If the Block is invalid at
197 // multiple points, this function will only return one of the reasons.
CheckStatus() const198 Block::BlockStatus Block::CheckStatus() const {
199   // Make sure the Block is aligned.
200   if (reinterpret_cast<uintptr_t>(this) % alignof(Block) != 0) {
201     return BlockStatus::MISALIGNED;
202   }
203 
204   // Test if the prev/next pointer for this Block matches.
205   if (!Last() && (this >= Next() || this != Next()->Prev())) {
206     return BlockStatus::NEXT_MISMATCHED;
207   }
208 
209   if (Prev() && (this <= Prev() || this != Prev()->Next())) {
210     return BlockStatus::PREV_MISMATCHED;
211   }
212 
213 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
214   if (!this->CheckPoisonBytes()) {
215     return BlockStatus::POISON_CORRUPTED;
216   }
217 #endif  // PW_ALLOCATOR_POISON_ENABLE
218   return BlockStatus::VALID;
219 }
220 
221 // Paint sizeof(void*) bytes before and after the usable space in Block as the
222 // randomized function pattern.
PoisonBlock()223 void Block::PoisonBlock() {
224 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
225   std::byte* front_region = reinterpret_cast<std::byte*>(this) + sizeof(*this);
226   memcpy(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);
227 
228   std::byte* end_region =
229       reinterpret_cast<std::byte*>(Next()) - PW_ALLOCATOR_POISON_OFFSET;
230   memcpy(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);
231 #endif  // PW_ALLOCATOR_POISON_ENABLE
232 }
233 
CheckPoisonBytes() const234 bool Block::CheckPoisonBytes() const {
235 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
236   std::byte* front_region = reinterpret_cast<std::byte*>(
237       reinterpret_cast<intptr_t>(this) + sizeof(*this));
238   if (std::memcmp(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
239     return false;
240   }
241   std::byte* end_region = reinterpret_cast<std::byte*>(
242       reinterpret_cast<intptr_t>(this->Next()) - PW_ALLOCATOR_POISON_OFFSET);
243   if (std::memcmp(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
244     return false;
245   }
246 #endif  // PW_ALLOCATOR_POISON_ENABLE
247   return true;
248 }
249 
250 }  // namespace pw::allocator
251