• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "mempool.h"
17 #include <cstdio>
18 #include <cstdlib>
19 #include <cstring>
20 #include <iostream>
21 #include <mutex>
22 #include "thread_env.h"
23 #include "securec.h"
24 #include "mpl_logging.h"
25 
26 namespace maple {
27 MemPoolCtrler memPoolCtrler;
28 bool MemPoolCtrler::freeMemInTime = false;
29 
FreeMemBlocks(const MemPool & pool,MemBlock * fixedMemHead,MemBlock * bigMemHead)30 void MemPoolCtrler::FreeMemBlocks(const MemPool &pool, MemBlock *fixedMemHead, MemBlock *bigMemHead)
31 {
32     (void)(pool);
33 
34     MemBlock *fixedTail = nullptr;
35 
36     if (fixedMemHead != nullptr) {
37         fixedTail = fixedMemHead;
38         while (fixedTail->nextMemBlock != nullptr) {
39             fixedTail = fixedTail->nextMemBlock;
40         }
41     }
42 
43     while (bigMemHead != nullptr) {
44         auto *cur = bigMemHead;
45         bigMemHead = bigMemHead->nextMemBlock;
46         free(cur->startPtr);
47         delete cur;
48     }
49 
50     ParallelGuard guard(ctrlerMutex, HaveRace());
51     if (fixedTail != nullptr) {
52         fixedTail->nextMemBlock = fixedFreeMemBlocks;
53         DEBUG_ASSERT(fixedTail->nextMemBlock != fixedTail, "error");
54         fixedFreeMemBlocks = fixedMemHead;
55     }
56 }
57 
58 // Destructor, free all allocated memories
~MemPoolCtrler()59 MemPoolCtrler::~MemPoolCtrler()
60 {
61     FreeMem();
62 }
63 
64 // Allocate a new memory pool and register it in controller
NewMemPool(const std::string & name,bool isLocalPool)65 MemPool *MemPoolCtrler::NewMemPool(const std::string &name, bool isLocalPool)
66 {
67     MemPool *memPool = nullptr;
68 
69     if (isLocalPool) {
70         memPool = new ThreadLocalMemPool(*this, name);
71     } else {
72         memPool = new ThreadShareMemPool(*this, name);
73     }
74 
75     return memPool;
76 }
77 
78 // This function will be removed soon, DO NOT call it, just use delete memPool
DeleteMemPool(MemPool * memPool) const79 void MemPoolCtrler::DeleteMemPool(MemPool *memPool) const
80 {
81     delete memPool;
82 }
83 
FreeMem()84 void MemPoolCtrler::FreeMem()
85 {
86     ParallelGuard guard(ctrlerMutex, HaveRace());
87 
88     while (fixedFreeMemBlocks != nullptr) {
89         MemBlock *arena = fixedFreeMemBlocks;
90         fixedFreeMemBlocks = fixedFreeMemBlocks->nextMemBlock;
91         delete arena;
92     }
93 }
94 
AllocMemBlock(const MemPool & pool,size_t size)95 MemBlock *MemPoolCtrler::AllocMemBlock(const MemPool &pool, size_t size)
96 {
97     if (size <= kMemBlockSizeMin) {
98         return AllocFixMemBlock(pool);
99     } else {
100         return AllocBigMemBlock(pool, size);
101     }
102 }
103 
AllocFixMemBlock(const MemPool & pool)104 MemBlock *MemPoolCtrler::AllocFixMemBlock(const MemPool &pool)
105 {
106     (void)(pool);
107     MemBlock *ret = nullptr;
108 
109     ParallelGuard guard(ctrlerMutex, HaveRace());
110     if (fixedFreeMemBlocks != nullptr) {
111         ret = fixedFreeMemBlocks;
112         fixedFreeMemBlocks = fixedFreeMemBlocks->nextMemBlock;
113         return ret;
114     }
115 
116     uint8_t *ptr = sysMemoryMgr->RealAllocMemory(kMemBlockMalloc);
117     // leave one MemBlock to return
118     for (size_t i = 0; i < kMemBlockMalloc / kMemBlockSizeMin - 1; ++i) {
119         auto *block = new MemBlock(ptr, kMemBlockSizeMin);
120         ptr += kMemBlockSizeMin;
121         block->nextMemBlock = fixedFreeMemBlocks;
122         fixedFreeMemBlocks = block;
123     }
124 
125     return new MemBlock(ptr, kMemBlockSizeMin);
126 }
127 
AllocBigMemBlock(const MemPool & pool,size_t size) const128 MemBlock *MemPoolCtrler::AllocBigMemBlock(const MemPool &pool, size_t size) const
129 {
130     DEBUG_ASSERT(size > kMemBlockSizeMin, "Big memory block must be bigger than fixed memory block");
131     (void)(pool);
132 
133     uint8_t *block = reinterpret_cast<uint8_t *>(malloc(size));
134     CHECK_FATAL(block != nullptr, "malloc failed");
135     return new MemBlock(block, size);
136 }
137 
~MemPool()138 MemPool::~MemPool()
139 {
140     ctrler.FreeMemBlocks(*this, fixedMemHead, bigMemHead);
141 }
142 
Malloc(size_t size)143 void *MemPool::Malloc(size_t size)
144 {
145     size = BITS_ALIGN(size);
146     DEBUG_ASSERT(endPtr >= curPtr, "endPtr should >= curPtr");
147     if (size > static_cast<size_t>(endPtr - curPtr)) {
148         return AllocNewMemBlock(size);
149     }
150     uint8_t *retPtr = curPtr;
151     curPtr += size;
152     return retPtr;
153 }
154 
ReleaseContainingMem()155 void MemPool::ReleaseContainingMem()
156 {
157     ctrler.FreeMemBlocks(*this, fixedMemHead, bigMemHead);
158 
159     fixedMemHead = nullptr;
160     bigMemHead = nullptr;
161     endPtr = nullptr;
162     curPtr = nullptr;
163 }
164 
165 // Malloc size of memory from memory pool, then set 0
Calloc(size_t size)166 void *MemPool::Calloc(size_t size)
167 {
168     void *p = Malloc(BITS_ALIGN(size));
169     DEBUG_ASSERT(p != nullptr, "ERROR: Calloc error");
170     errno_t eNum = memset_s(p, BITS_ALIGN(size), 0, BITS_ALIGN(size));
171     CHECK_FATAL(eNum == EOK, "memset_s failed");
172     return p;
173 }
174 
175 // Realloc new size of memory
Realloc(const void * ptr,size_t oldSize,size_t newSize)176 void *MemPool::Realloc(const void *ptr, size_t oldSize, size_t newSize)
177 {
178     void *result = Malloc(newSize);
179     DEBUG_ASSERT(result != nullptr, "ERROR: Realloc error");
180     size_t copySize = ((newSize > oldSize) ? oldSize : newSize);
181     if (copySize != 0 && ptr != nullptr) {
182         errno_t eNum = memcpy_s(result, copySize, ptr, copySize);
183         CHECK_FATAL(eNum == EOK, "memcpy_s failed");
184     }
185     return result;
186 }
187 
AllocNewMemBlock(size_t size)188 uint8_t *MemPool::AllocNewMemBlock(size_t size)
189 {
190     MemBlock **head = nullptr;
191     MemBlock *newMemBlock = ctrler.AllocMemBlock(*this, size);
192     if (newMemBlock->memSize <= kMemBlockSizeMin) {
193         head = &fixedMemHead;
194     } else {
195         head = &bigMemHead;
196     }
197 
198     newMemBlock->nextMemBlock = *head;
199     *head = newMemBlock;
200     CHECK_FATAL(newMemBlock->nextMemBlock != newMemBlock, "error");
201 
202     curPtr = newMemBlock->startPtr + size;
203     endPtr = newMemBlock->startPtr + newMemBlock->memSize;
204     DEBUG_ASSERT(curPtr <= endPtr, "must be");
205 
206     return newMemBlock->startPtr;
207 }
208 
Malloc(size_t size)209 void *StackMemPool::Malloc(size_t size)
210 {
211     size = BITS_ALIGN(size);
212     uint8_t **curPtrPtr = nullptr;
213     uint8_t *curEndPtr = nullptr;
214     if (size <= kMemBlockSizeMin) {
215         curPtrPtr = &curPtr;
216         curEndPtr = endPtr;
217     } else {
218         curPtrPtr = &bigCurPtr;
219         curEndPtr = bigEndPtr;
220     }
221     uint8_t *retPtr = *curPtrPtr;
222     DEBUG_ASSERT(curEndPtr >= *curPtrPtr, "endPtr should >= curPtr");
223     if (size > static_cast<size_t>(curEndPtr - *curPtrPtr)) {
224         retPtr = AllocTailMemBlock(size);
225     }
226     *curPtrPtr = retPtr + size;
227     return retPtr;
228 }
229 
230 // scoped mem pool don't use big mem block for small size, different with normal mempool
AllocMemBlockBySize(size_t size)231 MemBlock *StackMemPool::AllocMemBlockBySize(size_t size)
232 {
233     if (size <= kMemBlockSizeMin) {
234         return ctrler.AllocFixMemBlock(*this);
235     } else {
236         return ctrler.AllocBigMemBlock(*this, size);
237     }
238 }
239 
ResetStackTop(const LocalMapleAllocator * alloc,uint8_t * fixedCurPtrMark,MemBlock * fixedStackTopMark,uint8_t * bigCurPtrMark,MemBlock * bigStackTopMark)240 void StackMemPool::ResetStackTop(const LocalMapleAllocator *alloc, uint8_t *fixedCurPtrMark,
241                                  MemBlock *fixedStackTopMark, uint8_t *bigCurPtrMark,
242                                  MemBlock *bigStackTopMark) noexcept
243 {
244     CheckTopAllocator(alloc);
245     PopAllocator();
246 
247     if (fixedStackTopMark != nullptr) {
248         fixedMemStackTop = fixedStackTopMark;
249         curPtr = fixedCurPtrMark;
250         endPtr = fixedMemStackTop->EndPtr();
251     } else if (fixedMemHead != nullptr) {
252         fixedMemStackTop = fixedMemHead;
253         curPtr = fixedMemStackTop->startPtr;
254         endPtr = fixedMemStackTop->EndPtr();
255     }
256 
257     if (bigStackTopMark != nullptr) {
258         bigMemStackTop = bigStackTopMark;
259         bigCurPtr = bigCurPtrMark;
260         bigEndPtr = bigMemStackTop->EndPtr();
261     } else if (bigMemHead != nullptr) {
262         bigMemStackTop = bigMemHead;
263         bigCurPtr = bigMemStackTop->startPtr;
264         bigEndPtr = bigMemStackTop->EndPtr();
265     }
266 }
267 
AllocTailMemBlock(size_t size)268 uint8_t *StackMemPool::AllocTailMemBlock(size_t size)
269 {
270     MemBlock **head = nullptr;
271     MemBlock **stackTop = nullptr;
272     uint8_t **endPtrPtr = nullptr;
273 
274     if (size <= kMemBlockSizeMin) {
275         head = &fixedMemHead;
276         stackTop = &fixedMemStackTop;
277         endPtrPtr = &endPtr;
278     } else {
279         head = &bigMemHead;
280         stackTop = &bigMemStackTop;
281         endPtrPtr = &bigEndPtr;
282     }
283 
284     if (*stackTop == nullptr) {
285         MemBlock *newMemBlock = AllocMemBlockBySize(size);
286         *stackTop = newMemBlock;
287         *head = newMemBlock;
288         (*stackTop)->nextMemBlock = nullptr;
289     } else {
290         if ((*stackTop)->nextMemBlock != nullptr && (*stackTop)->nextMemBlock->memSize >= size) {
291             *stackTop = (*stackTop)->nextMemBlock;
292         } else {
293             MemBlock *newMemBlock = AllocMemBlockBySize(size);
294             auto *tmp = (*stackTop)->nextMemBlock;
295             (*stackTop)->nextMemBlock = newMemBlock;
296             *stackTop = newMemBlock;
297             newMemBlock->nextMemBlock = tmp;
298         }
299     }
300     *endPtrPtr = (*stackTop)->EndPtr();
301     return (*stackTop)->startPtr;
302 }
303 }  // namespace maple
304