• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Dawn Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "common/SlabAllocator.h"
16 
17 #include "common/Assert.h"
18 #include "common/Math.h"
19 
20 #include <algorithm>
21 #include <cstdlib>
22 #include <limits>
23 #include <new>
24 
25 // IndexLinkNode
26 
IndexLinkNode(Index index,Index nextIndex)27 SlabAllocatorImpl::IndexLinkNode::IndexLinkNode(Index index, Index nextIndex)
28     : index(index), nextIndex(nextIndex) {
29 }
30 
31 // Slab
32 
Slab(char allocation[],IndexLinkNode * head)33 SlabAllocatorImpl::Slab::Slab(char allocation[], IndexLinkNode* head)
34     : allocation(allocation), freeList(head), prev(nullptr), next(nullptr), blocksInUse(0) {
35 }
36 
37 SlabAllocatorImpl::Slab::Slab(Slab&& rhs) = default;
38 
SentinelSlab()39 SlabAllocatorImpl::SentinelSlab::SentinelSlab() : Slab(nullptr, nullptr) {
40 }
41 
42 SlabAllocatorImpl::SentinelSlab::SentinelSlab(SentinelSlab&& rhs) = default;
43 
~SentinelSlab()44 SlabAllocatorImpl::SentinelSlab::~SentinelSlab() {
45     Slab* slab = this->next;
46     while (slab != nullptr) {
47         Slab* next = slab->next;
48         ASSERT(slab->blocksInUse == 0);
49         // Delete the slab's allocation. The slab is allocated inside slab->allocation.
50         delete[] slab->allocation;
51         slab = next;
52     }
53 }
54 
55 // SlabAllocatorImpl
56 
57 SlabAllocatorImpl::Index SlabAllocatorImpl::kInvalidIndex =
58     std::numeric_limits<SlabAllocatorImpl::Index>::max();
59 
SlabAllocatorImpl(Index blocksPerSlab,uint32_t objectSize,uint32_t objectAlignment)60 SlabAllocatorImpl::SlabAllocatorImpl(Index blocksPerSlab,
61                                      uint32_t objectSize,
62                                      uint32_t objectAlignment)
63     : mAllocationAlignment(std::max(static_cast<uint32_t>(alignof(Slab)), objectAlignment)),
64       mSlabBlocksOffset(Align(sizeof(Slab), objectAlignment)),
65       mIndexLinkNodeOffset(Align(objectSize, alignof(IndexLinkNode))),
66       mBlockStride(Align(mIndexLinkNodeOffset + sizeof(IndexLinkNode), objectAlignment)),
67       mBlocksPerSlab(blocksPerSlab),
68       mTotalAllocationSize(
69           // required allocation size
70           static_cast<size_t>(mSlabBlocksOffset) + mBlocksPerSlab * mBlockStride +
71           // Pad the allocation size by mAllocationAlignment so that the aligned allocation still
72           // fulfills the required size.
73           mAllocationAlignment) {
74     ASSERT(IsPowerOfTwo(mAllocationAlignment));
75 }
76 
SlabAllocatorImpl(SlabAllocatorImpl && rhs)77 SlabAllocatorImpl::SlabAllocatorImpl(SlabAllocatorImpl&& rhs)
78     : mAllocationAlignment(rhs.mAllocationAlignment),
79       mSlabBlocksOffset(rhs.mSlabBlocksOffset),
80       mIndexLinkNodeOffset(rhs.mIndexLinkNodeOffset),
81       mBlockStride(rhs.mBlockStride),
82       mBlocksPerSlab(rhs.mBlocksPerSlab),
83       mTotalAllocationSize(rhs.mTotalAllocationSize),
84       mAvailableSlabs(std::move(rhs.mAvailableSlabs)),
85       mFullSlabs(std::move(rhs.mFullSlabs)),
86       mRecycledSlabs(std::move(rhs.mRecycledSlabs)) {
87 }
88 
89 SlabAllocatorImpl::~SlabAllocatorImpl() = default;
90 
OffsetFrom(IndexLinkNode * node,std::make_signed_t<Index> offset) const91 SlabAllocatorImpl::IndexLinkNode* SlabAllocatorImpl::OffsetFrom(
92     IndexLinkNode* node,
93     std::make_signed_t<Index> offset) const {
94     return reinterpret_cast<IndexLinkNode*>(reinterpret_cast<char*>(node) +
95                                             static_cast<intptr_t>(mBlockStride) * offset);
96 }
97 
NodeFromObject(void * object) const98 SlabAllocatorImpl::IndexLinkNode* SlabAllocatorImpl::NodeFromObject(void* object) const {
99     return reinterpret_cast<SlabAllocatorImpl::IndexLinkNode*>(static_cast<char*>(object) +
100                                                                mIndexLinkNodeOffset);
101 }
102 
ObjectFromNode(IndexLinkNode * node) const103 void* SlabAllocatorImpl::ObjectFromNode(IndexLinkNode* node) const {
104     return static_cast<void*>(reinterpret_cast<char*>(node) - mIndexLinkNodeOffset);
105 }
106 
IsNodeInSlab(Slab * slab,IndexLinkNode * node) const107 bool SlabAllocatorImpl::IsNodeInSlab(Slab* slab, IndexLinkNode* node) const {
108     char* firstObjectPtr = reinterpret_cast<char*>(slab) + mSlabBlocksOffset;
109     IndexLinkNode* firstNode = NodeFromObject(firstObjectPtr);
110     IndexLinkNode* lastNode = OffsetFrom(firstNode, mBlocksPerSlab - 1);
111     return node >= firstNode && node <= lastNode && node->index < mBlocksPerSlab;
112 }
113 
PushFront(Slab * slab,IndexLinkNode * node) const114 void SlabAllocatorImpl::PushFront(Slab* slab, IndexLinkNode* node) const {
115     ASSERT(IsNodeInSlab(slab, node));
116 
117     IndexLinkNode* head = slab->freeList;
118     if (head == nullptr) {
119         node->nextIndex = kInvalidIndex;
120     } else {
121         ASSERT(IsNodeInSlab(slab, head));
122         node->nextIndex = head->index;
123     }
124     slab->freeList = node;
125 
126     ASSERT(slab->blocksInUse != 0);
127     slab->blocksInUse--;
128 }
129 
PopFront(Slab * slab) const130 SlabAllocatorImpl::IndexLinkNode* SlabAllocatorImpl::PopFront(Slab* slab) const {
131     ASSERT(slab->freeList != nullptr);
132 
133     IndexLinkNode* head = slab->freeList;
134     if (head->nextIndex == kInvalidIndex) {
135         slab->freeList = nullptr;
136     } else {
137         ASSERT(IsNodeInSlab(slab, head));
138         slab->freeList = OffsetFrom(head, head->nextIndex - head->index);
139         ASSERT(IsNodeInSlab(slab, slab->freeList));
140     }
141 
142     ASSERT(slab->blocksInUse < mBlocksPerSlab);
143     slab->blocksInUse++;
144     return head;
145 }
146 
Prepend(SlabAllocatorImpl::Slab * slab)147 void SlabAllocatorImpl::SentinelSlab::Prepend(SlabAllocatorImpl::Slab* slab) {
148     if (this->next != nullptr) {
149         this->next->prev = slab;
150     }
151     slab->prev = this;
152     slab->next = this->next;
153     this->next = slab;
154 }
155 
Splice()156 void SlabAllocatorImpl::Slab::Splice() {
157     SlabAllocatorImpl::Slab* originalPrev = this->prev;
158     SlabAllocatorImpl::Slab* originalNext = this->next;
159 
160     this->prev = nullptr;
161     this->next = nullptr;
162 
163     ASSERT(originalPrev != nullptr);
164 
165     // Set the originalNext's prev pointer.
166     if (originalNext != nullptr) {
167         originalNext->prev = originalPrev;
168     }
169 
170     // Now, set the originalNext as the originalPrev's new next.
171     originalPrev->next = originalNext;
172 }
173 
Allocate()174 void* SlabAllocatorImpl::Allocate() {
175     if (mAvailableSlabs.next == nullptr) {
176         GetNewSlab();
177     }
178 
179     Slab* slab = mAvailableSlabs.next;
180     IndexLinkNode* node = PopFront(slab);
181     ASSERT(node != nullptr);
182 
183     // Move full slabs to a separate list, so allocate can always return quickly.
184     if (slab->blocksInUse == mBlocksPerSlab) {
185         slab->Splice();
186         mFullSlabs.Prepend(slab);
187     }
188 
189     return ObjectFromNode(node);
190 }
191 
Deallocate(void * ptr)192 void SlabAllocatorImpl::Deallocate(void* ptr) {
193     IndexLinkNode* node = NodeFromObject(ptr);
194 
195     ASSERT(node->index < mBlocksPerSlab);
196     void* firstAllocation = ObjectFromNode(OffsetFrom(node, -node->index));
197     Slab* slab = reinterpret_cast<Slab*>(static_cast<char*>(firstAllocation) - mSlabBlocksOffset);
198     ASSERT(slab != nullptr);
199 
200     bool slabWasFull = slab->blocksInUse == mBlocksPerSlab;
201 
202     ASSERT(slab->blocksInUse != 0);
203     PushFront(slab, node);
204 
205     if (slabWasFull) {
206         // Slab is in the full list. Move it to the recycled list.
207         ASSERT(slab->freeList != nullptr);
208         slab->Splice();
209         mRecycledSlabs.Prepend(slab);
210     }
211 
212     // TODO(crbug.com/dawn/825): Occasionally prune slabs if |blocksInUse == 0|.
213     // Doing so eagerly hurts performance.
214 }
215 
GetNewSlab()216 void SlabAllocatorImpl::GetNewSlab() {
217     // Should only be called when there are no available slabs.
218     ASSERT(mAvailableSlabs.next == nullptr);
219 
220     if (mRecycledSlabs.next != nullptr) {
221         // If the recycled list is non-empty, swap their contents.
222         std::swap(mAvailableSlabs.next, mRecycledSlabs.next);
223 
224         // We swapped the next pointers, so the prev pointer is wrong.
225         // Update it here.
226         mAvailableSlabs.next->prev = &mAvailableSlabs;
227         ASSERT(mRecycledSlabs.next == nullptr);
228         return;
229     }
230 
231     // TODO(crbug.com/dawn/824): Use aligned_alloc with C++17.
232     char* allocation = new char[mTotalAllocationSize];
233     char* alignedPtr = AlignPtr(allocation, mAllocationAlignment);
234 
235     char* dataStart = alignedPtr + mSlabBlocksOffset;
236 
237     IndexLinkNode* node = NodeFromObject(dataStart);
238     for (uint32_t i = 0; i < mBlocksPerSlab; ++i) {
239         new (OffsetFrom(node, i)) IndexLinkNode(i, i + 1);
240     }
241 
242     IndexLinkNode* lastNode = OffsetFrom(node, mBlocksPerSlab - 1);
243     lastNode->nextIndex = kInvalidIndex;
244 
245     mAvailableSlabs.Prepend(new (alignedPtr) Slab(allocation, node));
246 }
247