• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Pigweed Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License"); you may not
4 // use this file except in compliance with the License. You may obtain a copy of
5 // the License at
6 //
7 //     https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 // License for the specific language governing permissions and limitations under
13 // the License.
14 
15 #include "pw_allocator/block.h"
16 
17 #include <cstring>
18 
19 #include "pw_assert/check.h"
20 #include "pw_span/span.h"
21 
22 namespace pw::allocator {
23 
Init(const span<std::byte> region,Block ** block)24 Status Block::Init(const span<std::byte> region, Block** block) {
25   // Ensure the region we're given is aligned and sized accordingly
26   if (reinterpret_cast<uintptr_t>(region.data()) % alignof(Block) != 0) {
27     return Status::InvalidArgument();
28   }
29 
30   if (region.size() < sizeof(Block)) {
31     return Status::InvalidArgument();
32   }
33 
34   union {
35     Block* block;
36     std::byte* bytes;
37   } aliased;
38   aliased.bytes = region.data();
39 
40   // Make "next" point just past the end of this block; forming a linked list
41   // with the following storage. Since the space between this block and the
42   // next are implicitly part of the raw data, size can be computed by
43   // subtracting the pointers.
44   aliased.block->next_ =
45       reinterpret_cast<Block*>(region.data() + region.size_bytes());
46   aliased.block->MarkLast();
47 
48   aliased.block->prev_ = nullptr;
49   *block = aliased.block;
50 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
51   (*block)->PoisonBlock();
52 #endif  // PW_ALLOCATOR_POISON_ENABLE
53   return OkStatus();
54 }
55 
Split(size_t head_block_inner_size,Block ** new_block)56 Status Block::Split(size_t head_block_inner_size, Block** new_block) {
57   if (new_block == nullptr) {
58     return Status::InvalidArgument();
59   }
60 
61   // Don't split used blocks.
62   // TODO(jgarside): Relax this restriction? Flag to enable/disable this check?
63   if (Used()) {
64     return Status::FailedPrecondition();
65   }
66 
67   // First round the head_block_inner_size up to a alignof(Block) bounary.
68   // This ensures that the next block header is aligned accordingly.
69   // Alignment must be a power of two, hence align()-1 will return the
70   // remainder.
71   auto align_bit_mask = alignof(Block) - 1;
72   size_t aligned_head_block_inner_size = head_block_inner_size;
73   if ((head_block_inner_size & align_bit_mask) != 0) {
74     aligned_head_block_inner_size =
75         (head_block_inner_size & ~align_bit_mask) + alignof(Block);
76   }
77 
78   // (1) Are we trying to allocate a head block larger than the current head
79   // block? This may happen because of the alignment above.
80   if (aligned_head_block_inner_size > InnerSize()) {
81     return Status::OutOfRange();
82   }
83 
84   // (2) Does the resulting block have enough space to store the header?
85   // TODO(jgarside): What to do if the returned section is empty (i.e. remaining
86   // size == sizeof(Block))?
87   if (InnerSize() - aligned_head_block_inner_size <
88       sizeof(Block) + 2 * PW_ALLOCATOR_POISON_OFFSET) {
89     return Status::ResourceExhausted();
90   }
91 
92   // Create the new block inside the current one.
93   Block* new_next = reinterpret_cast<Block*>(
94       // From the current position...
95       reinterpret_cast<intptr_t>(this) +
96       // skip past the current header...
97       sizeof(*this) +
98       // add the poison bytes before usable space ...
99       PW_ALLOCATOR_POISON_OFFSET +
100       // into the usable bytes by the new inner size...
101       aligned_head_block_inner_size +
102       // add the poison bytes after the usable space ...
103       PW_ALLOCATOR_POISON_OFFSET);
104 
105   // If we're inserting in the middle, we need to update the current next
106   // block to point to what we're inserting
107   if (!Last()) {
108     Next()->prev_ = new_next;
109   }
110 
111   // Copy next verbatim so the next block also gets the "last"-ness
112   new_next->next_ = next_;
113   new_next->prev_ = this;
114 
115   // Update the current block to point to the new head.
116   next_ = new_next;
117 
118   *new_block = next_;
119 
120 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
121   PoisonBlock();
122   (*new_block)->PoisonBlock();
123 #endif  // PW_ALLOCATOR_POISON_ENABLE
124 
125   return OkStatus();
126 }
127 
MergeNext()128 Status Block::MergeNext() {
129   // Anything to merge with?
130   if (Last()) {
131     return Status::OutOfRange();
132   }
133 
134   // Is this or the next block in use?
135   if (Used() || Next()->Used()) {
136     return Status::FailedPrecondition();
137   }
138 
139   // Simply enough, this block's next pointer becomes the next block's
140   // next pointer. We then need to re-wire the "next next" block's prev
141   // pointer to point back to us though.
142   next_ = Next()->next_;
143 
144   // Copying the pointer also copies the "last" status, so this is safe.
145   if (!Last()) {
146     Next()->prev_ = this;
147   }
148 
149   return OkStatus();
150 }
151 
MergePrev()152 Status Block::MergePrev() {
153   // We can't merge if we have no previous. After that though, merging with
154   // the previous block is just MergeNext from the previous block.
155   if (prev_ == nullptr) {
156     return Status::OutOfRange();
157   }
158 
159   // WARNING: This class instance will still exist, but technically be invalid
160   // after this has been invoked. Be careful when doing anything with `this`
161   // After doing the below.
162   return prev_->MergeNext();
163 }
164 
165 // TODO(b/234875269): Add stack tracing to locate which call to the heap
166 // operation caused the corruption.
167 // TODO(jgarside): Add detailed information to log report and leave succinct
168 // messages in the crash message.
CrashIfInvalid()169 void Block::CrashIfInvalid() {
170   switch (CheckStatus()) {
171     case VALID:
172       break;
173     case MISALIGNED:
174       PW_DCHECK(false,
175                 "The block at address %p is not aligned.",
176                 static_cast<void*>(this));
177       break;
178     case NEXT_MISMATCHED:
179       PW_DCHECK(false,
180                 "The 'prev' field in the next block (%p) does not match the "
181                 "address of the current block (%p).",
182                 static_cast<void*>(Next()->Prev()),
183                 static_cast<void*>(this));
184       break;
185     case PREV_MISMATCHED:
186       PW_DCHECK(false,
187                 "The 'next' field in the previous block (%p) does not match "
188                 "the address of the current block (%p).",
189                 static_cast<void*>(Prev()->Next()),
190                 static_cast<void*>(this));
191       break;
192     case POISON_CORRUPTED:
193       PW_DCHECK(false,
194                 "The poisoned pattern in the block at %p is corrupted.",
195                 static_cast<void*>(this));
196       break;
197   }
198 }
199 
200 // This function will return a Block::BlockStatus that is either VALID or
201 // indicates the reason why the Block is invalid. If the Block is invalid at
202 // multiple points, this function will only return one of the reasons.
CheckStatus() const203 Block::BlockStatus Block::CheckStatus() const {
204   // Make sure the Block is aligned.
205   if (reinterpret_cast<uintptr_t>(this) % alignof(Block) != 0) {
206     return BlockStatus::MISALIGNED;
207   }
208 
209   // Test if the prev/next pointer for this Block matches.
210   if (!Last() && (this >= Next() || this != Next()->Prev())) {
211     return BlockStatus::NEXT_MISMATCHED;
212   }
213 
214   if (Prev() && (this <= Prev() || this != Prev()->Next())) {
215     return BlockStatus::PREV_MISMATCHED;
216   }
217 
218 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
219   if (!this->CheckPoisonBytes()) {
220     return BlockStatus::POISON_CORRUPTED;
221   }
222 #endif  // PW_ALLOCATOR_POISON_ENABLE
223   return BlockStatus::VALID;
224 }
225 
226 // Paint sizeof(void*) bytes before and after the usable space in Block as the
227 // randomized function pattern.
PoisonBlock()228 void Block::PoisonBlock() {
229 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
230   std::byte* front_region = reinterpret_cast<std::byte*>(this) + sizeof(*this);
231   memcpy(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);
232 
233   std::byte* end_region =
234       reinterpret_cast<std::byte*>(Next()) - PW_ALLOCATOR_POISON_OFFSET;
235   memcpy(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);
236 #endif  // PW_ALLOCATOR_POISON_ENABLE
237 }
238 
CheckPoisonBytes() const239 bool Block::CheckPoisonBytes() const {
240 #if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
241   std::byte* front_region = reinterpret_cast<std::byte*>(
242       reinterpret_cast<intptr_t>(this) + sizeof(*this));
243   if (std::memcmp(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
244     return false;
245   }
246   std::byte* end_region = reinterpret_cast<std::byte*>(
247       reinterpret_cast<intptr_t>(this->Next()) - PW_ALLOCATOR_POISON_OFFSET);
248   if (std::memcmp(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
249     return false;
250   }
251 #endif  // PW_ALLOCATOR_POISON_ENABLE
252   return true;
253 }
254 
255 }  // namespace pw::allocator
256