• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
17 #define LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
18 
19 #include <array>
20 #include <cstdint>
21 #include <cstdlib>
22 #include <limits>
23 #include <utility>
24 #include <memory>
25 
26 #include "concepts.h"
27 #include "mem/base_mem_stats.h"
28 #include "malloc_mem_pool-inl.h"
29 #include "mmap_mem_pool-inl.h"
30 #include "mem.h"
31 #include "mem_pool.h"
32 #include "arena-inl.h"
33 
34 #define USE_MMAP_POOL_FOR_ARENAS
35 
36 WEAK_FOR_LTO_START
37 
38 namespace ark {
39 
40 constexpr size_t DEFAULT_ARENA_SIZE = PANDA_DEFAULT_ARENA_SIZE;
41 constexpr Alignment DEFAULT_ARENA_ALIGNMENT = LOG_ALIGN_3;
42 // Buffer for on stack allocation
43 constexpr size_t ON_STACK_BUFFER_SIZE = 128 * SIZE_1K;
44 #ifdef FORCE_ARENA_ALLOCATOR_ON_STACK_CACHE
45 constexpr bool ON_STACK_ALLOCATION_ENABLED = true;
46 #else
47 constexpr bool ON_STACK_ALLOCATION_ENABLED = false;
48 #endif
49 
50 constexpr size_t DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE = 128 * SIZE_1K;
51 
52 template <typename T, bool USE_OOM_HANDLER>
53 class ArenaAllocatorAdapter;
54 
55 template <bool USE_OOM_HANDLER = false>
56 class ArenaAllocatorT {
57 public:
58     using OOMHandler = std::add_pointer_t<void()>;
59     template <typename T>
60     using AdapterType = ArenaAllocatorAdapter<T, USE_OOM_HANDLER>;
61 
62     PANDA_PUBLIC_API explicit ArenaAllocatorT(SpaceType spaceType, BaseMemStats *memStats = nullptr,
63                                               bool limitAllocSizeByPool = false);
64     ArenaAllocatorT(OOMHandler oomHandler, SpaceType spaceType, BaseMemStats *memStats = nullptr,
65                     bool limitAllocSizeByPool = false);
66 
67     PANDA_PUBLIC_API ~ArenaAllocatorT();
68     NO_COPY_SEMANTIC(ArenaAllocatorT);
69     NO_MOVE_SEMANTIC(ArenaAllocatorT);
70 
71     [[nodiscard]] PANDA_PUBLIC_API void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT);
72 
73     template <typename T, typename... Args>
New(Args &&...args)74     [[nodiscard]] std::enable_if_t<!std::is_array_v<T>, T *> New(Args &&...args)
75     {
76         auto p = reinterpret_cast<void *>(Alloc(sizeof(T)));
77         if (UNLIKELY(p == nullptr)) {
78             return nullptr;
79         }
80         new (p) T(std::forward<Args>(args)...);
81         return reinterpret_cast<T *>(p);
82     }
83 
84     template <typename T>
New(size_t size)85     [[nodiscard]] std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> New(size_t size)
86     {
87         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
88             AlignUp(sizeof(size_t), GetAlignmentInBytes(DEFAULT_ARENA_ALIGNMENT));
89         using ElementType = std::remove_extent_t<T>;
90         void *p = Alloc(SIZE_BEFORE_DATA_OFFSET + sizeof(ElementType) * size);
91         if (UNLIKELY(p == nullptr)) {
92             return nullptr;
93         }
94         *static_cast<size_t *>(p) = size;
95         auto *data = ToNativePtr<ElementType>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
96         ElementType *currentElement = data;
97         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
98         for (size_t i = 0; i < size; ++i, ++currentElement) {
99             new (currentElement) ElementType();
100         }
101         return data;
102     }
103 
104     template <typename T>
105     [[nodiscard]] T *AllocArray(size_t arrLength);
106 
107     ArenaAllocatorAdapter<void, USE_OOM_HANDLER> Adapter();
108 
109     PANDA_PUBLIC_API size_t GetAllocatedSize() const;
110 
111     /**
112      * @brief Set the size of allocated memory to @param new_size.
113      *  Free all memory that exceeds @param new_size bytes in the allocator.
114      */
115     PANDA_PUBLIC_API void Resize(size_t newSize);
116 
GetAllocatorType()117     static constexpr AllocatorType GetAllocatorType()
118     {
119         return AllocatorType::ARENA_ALLOCATOR;
120     }
121 
122 protected:
123     Arena *arenas_ = nullptr;  // NOLINT(misc-non-private-member-variables-in-classes)
124 
125 private:
126     template <bool USE_ON_STACK_BUFF, typename DummyArg = void>
127     class OnStackBuffT {
128     public:
129         void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT)
130         {
131             size_t freeSize = GetFreeSize();
132             void *newPos = curPos_;
133             void *ret = std::align(GetAlignmentInBytes(align), size, newPos, freeSize);
134             if (ret != nullptr) {
135                 curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(ret) + size));
136             }
137             return ret;
138         }
139 
GetFreeSize()140         size_t GetFreeSize() const
141         {
142             return DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE - (curPos_ - &buff_[0]);
143         }
144 
GetOccupiedSize()145         size_t GetOccupiedSize() const
146         {
147             return curPos_ - &buff_[0];
148         }
149 
Resize(size_t newSize)150         void Resize(size_t newSize)
151         {
152             ASSERT(newSize <= GetOccupiedSize());
153             curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(&buff_[0]) + newSize));
154         }
155 
156     private:
157         std::array<char, ON_STACK_BUFFER_SIZE> buff_ {0};
158         char *curPos_ = &buff_[0];
159     };
160 
161     template <typename DummyArg>
162     class OnStackBuffT<false, DummyArg> {
163     public:
164         void *Alloc([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align = DEFAULT_ARENA_ALIGNMENT)
165         {
166             return nullptr;
167         }
168 
GetOccupiedSize()169         size_t GetOccupiedSize() const
170         {
171             return 0;
172         }
173 
Resize(size_t newSize)174         void Resize(size_t newSize)
175         {
176             (void)newSize;
177         }
178     };
179 
180     /**
181      * @brief Adds Arena from MallocMemPool and links it to active
182      * @param pool_size size of new pool
183      */
184     bool AddArenaFromPool(size_t poolSize);
185 
186     /**
187      * @brief Allocate new element.
188      * Try to allocate new element at current arena or try to add new pool to this allocator and allocate element at new
189      * pool
190      * @param size new element size
191      * @param alignment alignment of new element address
192      */
193     [[nodiscard]] void *AllocateAndAddNewPool(size_t size, Alignment alignment);
194 
AllocArenaMemStats(size_t size)195     inline void AllocArenaMemStats(size_t size)
196     {
197         if (memStats_ != nullptr) {
198             memStats_->RecordAllocateRaw(size, spaceType_);
199         }
200     }
201 
202     using OnStackBuff = OnStackBuffT<ON_STACK_ALLOCATION_ENABLED>;
203     OnStackBuff buff_;
204     BaseMemStats *memStats_;
205     SpaceType spaceType_;
206     OOMHandler oomHandler_ {nullptr};
207     bool limitAllocSizeByPool_ {false};
208 };
209 
210 using ArenaAllocator = ArenaAllocatorT<false>;
211 using ArenaAllocatorWithOOMHandler = ArenaAllocatorT<true>;
212 
213 template <bool USE_OOM_HANDLER>
214 class ArenaResizeWrapper {
215 public:
ArenaResizeWrapper(ArenaAllocatorT<USE_OOM_HANDLER> * arenaAllocator)216     explicit ArenaResizeWrapper(ArenaAllocatorT<USE_OOM_HANDLER> *arenaAllocator)
217         : oldSize_(arenaAllocator->GetAllocatedSize()), allocator_(arenaAllocator)
218     {
219     }
220 
~ArenaResizeWrapper()221     ~ArenaResizeWrapper()
222     {
223         allocator_->Resize(oldSize_);
224     }
225 
226 private:
227     size_t oldSize_;
228     ArenaAllocatorT<USE_OOM_HANDLER> *allocator_;
229 
230     NO_COPY_SEMANTIC(ArenaResizeWrapper);
231     NO_MOVE_SEMANTIC(ArenaResizeWrapper);
232 };
233 
234 template <bool USE_OOM_HANDLER>
235 template <typename T>
AllocArray(size_t arrLength)236 T *ArenaAllocatorT<USE_OOM_HANDLER>::AllocArray(size_t arrLength)
237 {
238     // NOTE(Dmitrii Trubenkov): change to the proper implementation
239     return static_cast<T *>(Alloc(sizeof(T) * arrLength));
240 }
241 
242 }  // namespace ark
243 
244 WEAK_FOR_LTO_END
245 
246 #endif  // LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
247