• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef LIBPANDABASE_MEM_MMAP_MEM_POOL_H
17 #define LIBPANDABASE_MEM_MMAP_MEM_POOL_H
18 
19 #include "libpandabase/mem/mem_pool.h"
20 #include "libpandabase/mem/mem.h"
21 #include "libpandabase/os/mem.h"
22 #include "libpandabase/os/mutex.h"
23 #include "libpandabase/mem/space.h"
24 
25 #include <map>
26 #include <tuple>
27 
28 namespace panda {
29 
30 class MMapMemPoolTest;
31 namespace mem::test {
32 class InternalAllocatorTest;
33 }  // namespace mem::test
34 
35 class MmapPool {
36 public:
37     using FreePoolsIter = std::multimap<size_t, MmapPool *>::iterator;
MmapPool(Pool pool,FreePoolsIter free_pools_iter)38     explicit MmapPool(Pool pool, FreePoolsIter free_pools_iter) : pool_(pool), free_pools_iter_(free_pools_iter) {}
39 
40     ~MmapPool() = default;
41 
42     DEFAULT_COPY_SEMANTIC(MmapPool);
43     DEFAULT_MOVE_SEMANTIC(MmapPool);
44 
GetSize()45     size_t GetSize()
46     {
47         return pool_.GetSize();
48     }
49 
SetSize(size_t size)50     void SetSize(size_t size)
51     {
52         pool_ = Pool(size, GetMem());
53     }
54 
GetMem()55     void *GetMem()
56     {
57         return pool_.GetMem();
58     }
59 
60     // A free pool will be store in the free_pools_, and it's iterator will be recorded in the free_pools_iter_.
61     // If the free_pools_iter_ is equal to the end of free_pools_, the pool is used.
IsUsed(FreePoolsIter end_iter)62     bool IsUsed(FreePoolsIter end_iter)
63     {
64         return free_pools_iter_ == end_iter;
65     }
66 
GetFreePoolsIter()67     FreePoolsIter GetFreePoolsIter()
68     {
69         return free_pools_iter_;
70     }
71 
SetFreePoolsIter(FreePoolsIter free_pools_iter)72     void SetFreePoolsIter(FreePoolsIter free_pools_iter)
73     {
74         free_pools_iter_ = free_pools_iter;
75     }
76 
77 private:
78     Pool pool_;
79     // record the iterator of the pool in the multimap
80     FreePoolsIter free_pools_iter_;
81 };
82 
83 class MmapPoolMap {
84 public:
85     MmapPoolMap() = default;
86 
~MmapPoolMap()87     ~MmapPoolMap()
88     {
89         for (auto &pool : pool_map_) {
90             delete pool.second;
91         }
92     }
93 
94     DEFAULT_COPY_SEMANTIC(MmapPoolMap);
95     DEFAULT_MOVE_SEMANTIC(MmapPoolMap);
96 
97     // Find a free pool with enough size in the map. Split the pool, if the pool size is larger than required size.
98     Pool PopFreePool(size_t size);
99 
100     // push the unused pool to the map.
101     void PushFreePool(Pool pool);
102 
103     // Add a new pool to the map. This pool will be marked as used.
104     void AddNewPool(Pool pool);
105 
106     // Get the sum of all free pools size.
107     size_t GetAllSize() const;
108 
109     /**
110      * To check if we can alloc enough pools from free pools
111      * @param pools_num the number of pools we need
112      * @param pool_size the size of the pool we need
113      * @return true if we can make sure that we have enough space in free pools to alloc pools we need
114      */
115     bool HaveEnoughFreePools(size_t pools_num, size_t pool_size) const;
116 
117 private:
118     std::map<void *, MmapPool *> pool_map_;
119     std::multimap<size_t, MmapPool *> free_pools_;
120 };
121 
122 class MmapMemPool : public MemPool<MmapMemPool> {
123 public:
124     NO_COPY_SEMANTIC(MmapMemPool);
125     NO_MOVE_SEMANTIC(MmapMemPool);
126     ~MmapMemPool() override;
127 
128     /**
129      * Get min address in pool
130      * @return min address in pool
131      */
GetMinObjectAddress()132     uintptr_t GetMinObjectAddress() const
133     {
134         return min_object_memory_addr_;
135     }
136 
GetAddressOfMinObjectAddress()137     void *GetAddressOfMinObjectAddress()
138     {
139         return static_cast<void *>(&min_object_memory_addr_);
140     }
141 
142     /**
143      * Get max address in pool
144      * @return max address in pool
145      */
GetMaxObjectAddress()146     uintptr_t GetMaxObjectAddress() const
147     {
148         return min_object_memory_addr_ + mmaped_object_memory_size_;
149     }
150 
GetTotalObjectSize()151     size_t GetTotalObjectSize() const
152     {
153         return mmaped_object_memory_size_;
154     }
155 
156     /**
157      * Get start address of pool for input address in this pool
158      * @param addr address in pool
159      * @return start address of pool
160      */
GetStartAddrPoolForAddr(const void * addr)161     void *GetStartAddrPoolForAddr(const void *addr) const
162     {
163         return GetStartAddrPoolForAddrImpl(addr);
164     }
165 
166     size_t GetObjectSpaceFreeBytes() const;
167 
168     // To check if we can alloc enough pools in object space
169     bool HaveEnoughPoolsInObjectSpace(size_t pools_num, size_t pool_size) const;
170 
171     /**
172      * @return used bytes count in object space (so exclude bytes in free pools)
173      */
174     size_t GetObjectUsedBytes() const;
175 
176 private:
177     template <class ArenaT = Arena>
178     ArenaT *AllocArenaImpl(size_t size, SpaceType space_type, AllocatorType allocator_type, const void *allocator_addr);
179     template <class ArenaT = Arena>
180     void FreeArenaImpl(ArenaT *arena);
181 
182     void *AllocRawMemImpl(size_t size, SpaceType type);
183     void *AllocRawMemCompilerImpl(size_t size);
184     void *AllocRawMemInternalImpl(size_t size);
185     void *AllocRawMemCodeImpl(size_t size);
186     void *AllocRawMemObjectImpl(size_t size, SpaceType type);
187     void FreeRawMemImpl(void *mem, size_t size);
188 
189     Pool AllocPoolImpl(size_t size, SpaceType space_type, AllocatorType allocator_type, const void *allocator_addr);
190     void FreePoolImpl(void *mem, size_t size);
191 
192     AllocatorInfo GetAllocatorInfoForAddrImpl(const void *addr) const;
193     SpaceType GetSpaceTypeForAddrImpl(const void *addr) const;
194     void *GetStartAddrPoolForAddrImpl(const void *addr) const;
195 
196     Pool AllocPoolUnsafe(size_t size, SpaceType space_type, AllocatorType allocator_type, const void *allocator_addr);
197     void FreePoolUnsafe(void *mem, size_t size);
198 
199     void AddToNonObjectPoolsMap(std::tuple<Pool, AllocatorInfo, SpaceType> pool_info);
200     void RemoveFromNonObjectPoolsMap(void *pool_addr);
201     std::tuple<Pool, AllocatorInfo, SpaceType> FindAddrInNonObjectPoolsMap(const void *addr) const;
202 
203     MmapMemPool();
204 
205     // A super class for raw memory allocation for spaces.
206     class SpaceMemory {
207     public:
Initialize(uintptr_t min_addr,size_t max_size)208         void Initialize(uintptr_t min_addr, size_t max_size)
209         {
210             min_address_ = min_addr;
211             max_size_ = max_size;
212             cur_alloc_offset_ = 0U;
213         }
214 
GetMinAddress()215         uintptr_t GetMinAddress() const
216         {
217             return min_address_;
218         }
219 
GetMaxSize()220         size_t GetMaxSize() const
221         {
222             return max_size_;
223         }
224 
GetOccupiedMemorySize()225         size_t GetOccupiedMemorySize() const
226         {
227             return cur_alloc_offset_;
228         }
229 
GetFreeSpace()230         inline size_t GetFreeSpace() const
231         {
232             ASSERT(max_size_ >= cur_alloc_offset_);
233             return max_size_ - cur_alloc_offset_;
234         }
235 
AllocRawMem(size_t size,MmapPoolMap * pool_map)236         void *AllocRawMem(size_t size, MmapPoolMap *pool_map)
237         {
238             if (UNLIKELY(GetFreeSpace() < size)) {
239                 return nullptr;
240             }
241             void *mem = ToVoidPtr(min_address_ + cur_alloc_offset_);
242             cur_alloc_offset_ += size;
243             pool_map->AddNewPool(Pool(size, mem));
244             return mem;
245         }
246 
247     private:
248         uintptr_t min_address_ {0U};    /// < Min address for the space
249         size_t max_size_ {0U};          /// < Max size in bytes for the space
250         size_t cur_alloc_offset_ {0U};  /// < A value of occupied memory from the min_address_
251     };
252 
253     uintptr_t min_object_memory_addr_ {0U};  // < Minimal address of the mmaped object memory
254     size_t mmaped_object_memory_size_ {0U};  // < Size of whole the mmaped object memory
255 
256     SpaceMemory common_space_;
257 
258     PoolMap pool_map_;  /// < Pool map for object pools with all required information for quick search
259 
260     MmapPoolMap common_space_pools_;
261 
262     size_t code_space_current_size_ {0};
263     size_t compiler_space_current_size_ {0};
264     size_t internal_space_current_size_ {0};
265 
266     size_t code_space_max_size_ {0};
267     size_t compiler_space_max_size_ {0};
268     size_t internal_space_max_size_ {0};
269 
270     // Map for non object pools allocated via mmap
271     std::map<const void *, std::tuple<Pool, AllocatorInfo, SpaceType>> non_object_mmaped_pools_;
272     // AllocRawMem is called both from alloc and externally
273     mutable os::memory::RecursiveMutex lock_;
274 
275     friend class PoolManager;
276     friend class MemPool<MmapMemPool>;
277     friend class MMapMemPoolTest;
278     friend class mem::test::InternalAllocatorTest;
279 };
280 
281 }  // namespace panda
282 
283 #endif  // LIBPANDABASE_MEM_MMAP_MEM_POOL_H
284