• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
17 #define LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
18 
19 #include <array>
20 #include <cstdint>
21 #include <cstdlib>
22 #include <limits>
23 #include <utility>
24 #include <memory>
25 
26 #include "concepts.h"
27 #include "mem/base_mem_stats.h"
28 #include "malloc_mem_pool-inl.h"
29 #include "mmap_mem_pool-inl.h"
30 #include "mem.h"
31 #include "mem_pool.h"
32 #include "arena-inl.h"
33 
34 #define USE_MMAP_POOL_FOR_ARENAS
35 
36 WEAK_FOR_LTO_START
37 
38 namespace panda {
39 
40 constexpr size_t DEFAULT_ARENA_SIZE = PANDA_DEFAULT_ARENA_SIZE;
41 constexpr Alignment DEFAULT_ARENA_ALIGNMENT = LOG_ALIGN_3;
42 // Buffer for on stack allocation
43 constexpr size_t ON_STACK_BUFFER_SIZE = 128 * SIZE_1K;
44 #ifdef FORCE_ARENA_ALLOCATOR_ON_STACK_CACHE
45 constexpr bool ON_STACK_ALLOCATION_ENABLED = true;
46 #else
47 constexpr bool ON_STACK_ALLOCATION_ENABLED = false;
48 #endif
49 
50 constexpr size_t DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE = 128 * SIZE_1K;
51 
52 template <typename T, bool use_oom_handler>
53 class ArenaAllocatorAdapter;
54 
55 template <bool use_oom_handler = false>
56 class ArenaAllocatorT {
57 public:
58     using OOMHandler = std::add_pointer_t<void()>;
59     template <typename T>
60     using AdapterType = ArenaAllocatorAdapter<T, use_oom_handler>;
61 
62     explicit ArenaAllocatorT(SpaceType space_type, BaseMemStats *mem_stats = nullptr,
63                              bool limit_alloc_size_by_pool = false);
64     ArenaAllocatorT(OOMHandler oom_handler, SpaceType space_type, BaseMemStats *mem_stats = nullptr,
65                     bool limit_alloc_size_by_pool = false);
66 
67     ~ArenaAllocatorT();
68     ArenaAllocatorT(const ArenaAllocatorT &) = delete;
69     ArenaAllocatorT(ArenaAllocatorT &&) = default;
70     ArenaAllocatorT &operator=(const ArenaAllocatorT &) = delete;
71     ArenaAllocatorT &operator=(ArenaAllocatorT &&) = default;
72 
73     [[nodiscard]] void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT);
74 
75     template <typename T, typename... Args>
New(Args &&...args)76     [[nodiscard]] std::enable_if_t<!std::is_array_v<T>, T *> New(Args &&... args)
77     {
78         auto p = reinterpret_cast<void *>(Alloc(sizeof(T)));
79         if (UNLIKELY(p == nullptr)) {
80             return nullptr;
81         }
82         new (p) T(std::forward<Args>(args)...);
83         return reinterpret_cast<T *>(p);
84     }
85 
86     template <typename T>
New(size_t size)87     [[nodiscard]] std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> New(size_t size)
88     {
89         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
90             AlignUp(sizeof(size_t), GetAlignmentInBytes(DEFAULT_ARENA_ALIGNMENT));
91         using element_type = std::remove_extent_t<T>;
92         void *p = Alloc(SIZE_BEFORE_DATA_OFFSET + sizeof(element_type) * size);
93         if (UNLIKELY(p == nullptr)) {
94             return nullptr;
95         }
96         *static_cast<size_t *>(p) = size;
97         auto *data = ToNativePtr<element_type>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
98         element_type *current_element = data;
99         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
100         for (size_t i = 0; i < size; ++i, ++current_element) {
101             new (current_element) element_type();
102         }
103         return data;
104     }
105 
106     template <typename T>
107     [[nodiscard]] T *AllocArray(size_t arr_length);
108 
109     ArenaAllocatorAdapter<void, use_oom_handler> Adapter();
110 
111     size_t GetAllocatedSize() const;
112 
113     /**
114      * \brief Set the size of allocated memory to \param new_size.
115      *  Free all memory that exceeds \param new_size bytes in the allocator.
116      */
117     void Resize(size_t new_size);
118 
GetAllocatorType()119     static constexpr AllocatorType GetAllocatorType()
120     {
121         return AllocatorType::ARENA_ALLOCATOR;
122     }
123 
124 protected:
125     Arena *arenas_ = nullptr;  // NOLINT(misc-non-private-member-variables-in-classes)
126 
127 private:
128     template <bool useOnStackBuff, typename DummyArg = void>
129     class OnStackBuffT {
130     public:
131         void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT)
132         {
133             size_t free_size = GetFreeSize();
134             void *new_pos = curPos_;
135             void *ret = std::align(GetAlignmentInBytes(align), size, new_pos, free_size);
136             if (ret != nullptr) {
137                 curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(ret) + size));
138             }
139             return ret;
140         }
141 
GetFreeSize()142         size_t GetFreeSize() const
143         {
144             return DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE - (curPos_ - &buff_[0]);
145         }
146 
GetOccupiedSize()147         size_t GetOccupiedSize() const
148         {
149             return curPos_ - &buff_[0];
150         }
151 
Resize(size_t new_size)152         void Resize(size_t new_size)
153         {
154             ASSERT(new_size <= GetOccupiedSize());
155             curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(&buff_[0]) + new_size));
156         }
157 
158     private:
159         std::array<char, ON_STACK_BUFFER_SIZE> buff_ {0};
160         char *curPos_ = &buff_[0];
161     };
162 
163     template <typename DummyArg>
164     class OnStackBuffT<false, DummyArg> {
165     public:
166         void *Alloc([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align = DEFAULT_ARENA_ALIGNMENT)
167         {
168             return nullptr;
169         }
170 
GetOccupiedSize()171         size_t GetOccupiedSize() const
172         {
173             return 0;
174         }
175 
Resize(size_t new_size)176         void Resize(size_t new_size)
177         {
178             (void)new_size;
179         }
180     };
181 
182     /**
183      * \brief Adds Arena from MallocMemPool and links it to active
184      * @param pool_size size of new pool
185      */
186     bool AddArenaFromPool(size_t pool_size);
187 
188     /**
189      * \brief Allocate new element.
190      * Try to allocate new element at current arena or try to add new pool to this allocator and allocate element at new
191      * pool
192      * @param size new element size
193      * @param alignment alignment of new element address
194      */
195     [[nodiscard]] void *AllocateAndAddNewPool(size_t size, Alignment alignment);
196 
AllocArenaMemStats(size_t size)197     inline void AllocArenaMemStats(size_t size)
198     {
199         if (memStats_ != nullptr) {
200             memStats_->RecordAllocateRaw(size, space_type_);
201         }
202     }
203 
204     using OnStackBuff = OnStackBuffT<ON_STACK_ALLOCATION_ENABLED>;
205     OnStackBuff buff_;
206     BaseMemStats *memStats_;
207     SpaceType space_type_;
208     OOMHandler oom_handler_ {nullptr};
209     bool limit_alloc_size_by_pool_ {false};
210 };
211 
212 using ArenaAllocator = ArenaAllocatorT<false>;
213 using ArenaAllocatorWithOOMHandler = ArenaAllocatorT<true>;
214 
215 template <bool use_oom_handler>
216 class ArenaResizeWrapper {
217 public:
ArenaResizeWrapper(ArenaAllocatorT<use_oom_handler> * arena_allocator)218     explicit ArenaResizeWrapper(ArenaAllocatorT<use_oom_handler> *arena_allocator)
219         : old_size_(arena_allocator->GetAllocatedSize()), allocator_(arena_allocator)
220     {
221     }
222 
~ArenaResizeWrapper()223     ~ArenaResizeWrapper()
224     {
225         allocator_->Resize(old_size_);
226     }
227 
228 private:
229     size_t old_size_;
230     ArenaAllocatorT<use_oom_handler> *allocator_;
231 
232     NO_COPY_SEMANTIC(ArenaResizeWrapper);
233     NO_MOVE_SEMANTIC(ArenaResizeWrapper);
234 };
235 
236 template <bool use_oom_handler>
237 template <typename T>
AllocArray(size_t arr_length)238 T *ArenaAllocatorT<use_oom_handler>::AllocArray(size_t arr_length)
239 {
240     // TODO(Dmitrii Trubenkov): change to the proper implementation
241     return static_cast<T *>(Alloc(sizeof(T) * arr_length));
242 }
243 
244 }  // namespace panda
245 
246 WEAK_FOR_LTO_END
247 
248 #endif  // LIBPANDABASE_MEM_ARENA_ALLOCATOR_H
249