• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef PANDA_LIBPANDABASE_MEM_ARENA_ALLOCATOR_H_
17 #define PANDA_LIBPANDABASE_MEM_ARENA_ALLOCATOR_H_
18 
19 #include <array>
20 #include <cstdint>
21 #include <cstdlib>
22 #include <limits>
23 #include <utility>
24 #include <memory>
25 
26 #include "concepts.h"
27 #include "mem/base_mem_stats.h"
28 #include "malloc_mem_pool-inl.h"
29 #include "mmap_mem_pool-inl.h"
30 #include "mem.h"
31 #include "mem_pool.h"
32 #include "arena.h"
33 
34 #define USE_MMAP_POOL_FOR_ARENAS
35 
36 namespace panda {
37 
38 constexpr size_t DEFAULT_ARENA_SIZE = PANDA_DEFAULT_ARENA_SIZE;
39 constexpr Alignment DEFAULT_ARENA_ALIGNMENT = LOG_ALIGN_3;
40 // Buffer for on stack allocation
41 constexpr size_t ON_STACK_BUFFER_SIZE = 128 * SIZE_1K;
42 #ifdef FORCE_ARENA_ALLOCATOR_ON_STACK_CACHE
43 constexpr bool ON_STACK_ALLOCATION_ENABLED = true;
44 #else
45 constexpr bool ON_STACK_ALLOCATION_ENABLED = false;
46 #endif
47 
48 constexpr size_t DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE = 128 * SIZE_1K;
49 
50 template <typename T, bool use_oom_handler>
51 class ArenaAllocatorAdapter;
52 
53 template <bool use_oom_handler = false>
54 class ArenaAllocatorT {
55 public:
56     using OOMHandler = std::add_pointer_t<void()>;
57     template <typename T>
58     using AdapterType = ArenaAllocatorAdapter<T, use_oom_handler>;
59 
60     explicit ArenaAllocatorT(SpaceType space_type, BaseMemStats *mem_stats = nullptr,
61                              bool limit_alloc_size_by_pool = false);
62     ArenaAllocatorT(OOMHandler oom_handler, SpaceType space_type, BaseMemStats *mem_stats = nullptr,
63                     bool limit_alloc_size_by_pool = false);
64 
65     ~ArenaAllocatorT();
66     ArenaAllocatorT(const ArenaAllocatorT &) = delete;
67     ArenaAllocatorT(ArenaAllocatorT &&) = default;
68     ArenaAllocatorT &operator=(const ArenaAllocatorT &) = delete;
69     ArenaAllocatorT &operator=(ArenaAllocatorT &&) = default;
70 
71     [[nodiscard]] void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT);
72 
73     template <typename T, typename... Args>
New(Args &&...args)74     [[nodiscard]] std::enable_if_t<!std::is_array_v<T>, T *> New(Args &&... args)
75     {
76         auto p = reinterpret_cast<void *>(Alloc(sizeof(T)));
77         // CODECHECK-NOLINTNEXTLINE(CPP_RULE_ID_SMARTPOINTER_INSTEADOF_ORIGINPOINTER)
78         new (p) T(std::forward<Args>(args)...);
79         return reinterpret_cast<T *>(p);
80     }
81 
82     template <typename T>
New(size_t size)83     [[nodiscard]] std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> New(size_t size)
84     {
85         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
86             AlignUp(sizeof(size_t), GetAlignmentInBytes(DEFAULT_ARENA_ALIGNMENT));
87         using element_type = std::remove_extent_t<T>;
88         void *p = Alloc(SIZE_BEFORE_DATA_OFFSET + sizeof(element_type) * size);
89         *static_cast<size_t *>(p) = size;
90         auto *data = ToNativePtr<element_type>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
91         element_type *current_element = data;
92         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
93         for (size_t i = 0; i < size; ++i, ++current_element) {
94             // CODECHECK-NOLINTNEXTLINE(CPP_RULE_ID_SMARTPOINTER_INSTEADOF_ORIGINPOINTER)
95             new (current_element) element_type();
96         }
97         return data;
98     }
99 
100     template <typename T>
101     [[nodiscard]] T *AllocArray(size_t arr_length);
102 
103     ArenaAllocatorAdapter<void, use_oom_handler> Adapter();
104 
105     size_t GetAllocatedSize() const;
106 
107     /**
108      * \brief Set the size of allocated memory to \param new_size
109      *  Free all memory that exceeds \param new_size bytes in the allocator.
110      */
111     void Resize(size_t new_size);
112 
GetAllocatorType()113     static constexpr AllocatorType GetAllocatorType()
114     {
115         return AllocatorType::ARENA_ALLOCATOR;
116     }
117 
118 protected:
119     Arena *arenas_ = nullptr;  // NOLINT(misc-non-private-member-variables-in-classes)
120 
121 private:
122     template <bool useOnStackBuff, typename DummyArg = void>
123     class OnStackBuffT {
124     public:
125         void *Alloc(size_t size, Alignment align = DEFAULT_ARENA_ALIGNMENT)
126         {
127             size_t free_size = GetFreeSize();
128             void *new_pos = curPos_;
129             void *ret = std::align(GetAlignmentInBytes(align), size, new_pos, free_size);
130             if (ret != nullptr) {
131                 curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(ret) + size));
132             }
133             return ret;
134         }
135 
GetFreeSize()136         size_t GetFreeSize() const
137         {
138             return DEFAULT_ON_STACK_ARENA_ALLOCATOR_BUFF_SIZE - (curPos_ - &buff_[0]);
139         }
140 
GetOccupiedSize()141         size_t GetOccupiedSize() const
142         {
143             return curPos_ - &buff_[0];
144         }
145 
Resize(size_t new_size)146         void Resize(size_t new_size)
147         {
148             ASSERT(new_size <= GetOccupiedSize());
149             curPos_ = static_cast<char *>(ToVoidPtr(ToUintPtr(&buff_[0]) + new_size));
150         }
151 
152     private:
153         std::array<char, ON_STACK_BUFFER_SIZE> buff_ {0};
154         char *curPos_ = &buff_[0];
155     };
156 
157     template <typename DummyArg>
158     class OnStackBuffT<false, DummyArg> {
159     public:
160         void *Alloc([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align = DEFAULT_ARENA_ALIGNMENT)
161         {
162             return nullptr;
163         }
164 
GetOccupiedSize()165         size_t GetOccupiedSize() const
166         {
167             return 0;
168         }
169 
Resize(size_t new_size)170         void Resize(size_t new_size)
171         {
172             (void)new_size;
173         }
174     };
175 
176     /**
177      * \brief Add Arena from MallocMemPool and links it to active
178      * @param pool_size size of new pool
179      */
180     bool AddArenaFromPool(size_t pool_size);
181 
182     /**
183      * \brief Allocate new element
184      * Try to allocate new element at current arena
185      * or try to add new pool to this allocator and allocate element at new pool.
186      * @param size new element size
187      * @param alignment alignment of new element address
188      */
189     [[nodiscard]] void *AllocateAndAddNewPool(size_t size, Alignment alignment);
190 
AllocArenaMemStats(size_t size)191     inline void AllocArenaMemStats(size_t size) const
192     {
193         if (memStats_ != nullptr) {
194             memStats_->RecordAllocateRaw(size, space_type_);
195         }
196     }
197 
198     using OnStackBuff = OnStackBuffT<ON_STACK_ALLOCATION_ENABLED>;
199     OnStackBuff buff_;
200     BaseMemStats *memStats_;
201     SpaceType space_type_;
202     OOMHandler oom_handler_ {nullptr};
203     bool limit_alloc_size_by_pool_ {false};
204 };
205 
206 using ArenaAllocator = ArenaAllocatorT<false>;
207 using ArenaAllocatorWithOOMHandler = ArenaAllocatorT<true>;
208 
209 template <bool use_oom_handler>
210 class ArenaResizeWrapper {
211 public:
ArenaResizeWrapper(ArenaAllocatorT<use_oom_handler> * arena_allocator)212     explicit ArenaResizeWrapper(ArenaAllocatorT<use_oom_handler> *arena_allocator)
213         : old_size_(arena_allocator->GetAllocatedSize()), allocator_(arena_allocator)
214     {
215     }
216 
~ArenaResizeWrapper()217     ~ArenaResizeWrapper()
218     {
219         allocator_->Resize(old_size_);
220     }
221 
222 private:
223     size_t old_size_;
224     ArenaAllocatorT<use_oom_handler> *allocator_;
225 
226     NO_COPY_SEMANTIC(ArenaResizeWrapper);
227     NO_MOVE_SEMANTIC(ArenaResizeWrapper);
228 };
229 
230 template <bool use_oom_handler>
231 template <typename T>
AllocArray(size_t arr_length)232 T *ArenaAllocatorT<use_oom_handler>::AllocArray(size_t arr_length)
233 {
234     return static_cast<T *>(Alloc(sizeof(T) * arr_length));
235 }
236 
237 }  // namespace panda
238 
239 #endif  // PANDA_LIBPANDABASE_MEM_ARENA_ALLOCATOR_H_
240