1 /*
2 * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "arena-inl.h"
17 #include "arena_allocator.h"
18 #include "utils/logger.h"
19 #include "pool_manager.h"
20 #include "trace/trace.h"
21 #include "mem/base_mem_stats.h"
22 #include <cstdlib>
23 #include <cstring>
24
25 namespace ark {
26
27 template <bool USE_OOM_HANDLER>
ArenaAllocatorT(SpaceType spaceType,BaseMemStats * memStats,bool limitAllocSizeByPool)28 ArenaAllocatorT<USE_OOM_HANDLER>::ArenaAllocatorT(SpaceType spaceType, BaseMemStats *memStats,
29 bool limitAllocSizeByPool)
30 : memStats_(memStats), spaceType_(spaceType), limitAllocSizeByPool_(limitAllocSizeByPool)
31 {
32 ASSERT(!USE_OOM_HANDLER);
33 if (!ON_STACK_ALLOCATION_ENABLED) {
34 arenas_ = PoolManager::AllocArena(DEFAULT_ARENA_SIZE, spaceType_, AllocatorType::ARENA_ALLOCATOR, this);
35 ASSERT(arenas_ != nullptr);
36 AllocArenaMemStats(DEFAULT_ARENA_SIZE);
37 }
38 }
39
40 template <bool USE_OOM_HANDLER>
ArenaAllocatorT(OOMHandler oomHandler,SpaceType spaceType,BaseMemStats * memStats,bool limitAllocSizeByPool)41 ArenaAllocatorT<USE_OOM_HANDLER>::ArenaAllocatorT(OOMHandler oomHandler, SpaceType spaceType, BaseMemStats *memStats,
42 bool limitAllocSizeByPool)
43 : memStats_(memStats), spaceType_(spaceType), oomHandler_(oomHandler), limitAllocSizeByPool_(limitAllocSizeByPool)
44 {
45 ASSERT(USE_OOM_HANDLER);
46 if (!ON_STACK_ALLOCATION_ENABLED) {
47 arenas_ = PoolManager::AllocArena(DEFAULT_ARENA_SIZE, spaceType_, AllocatorType::ARENA_ALLOCATOR, this);
48 ASSERT(arenas_ != nullptr);
49 AllocArenaMemStats(DEFAULT_ARENA_SIZE);
50 }
51 }
52
53 template <bool USE_OOM_HANDLER>
~ArenaAllocatorT()54 ArenaAllocatorT<USE_OOM_HANDLER>::~ArenaAllocatorT()
55 {
56 Arena *cur = arenas_;
57 while (cur != nullptr) {
58 Arena *tmp = cur->GetNextArena();
59 PoolManager::FreeArena(cur);
60 cur = tmp;
61 }
62 }
63
64 template <bool USE_OOM_HANDLER>
AllocateAndAddNewPool(size_t size,Alignment alignment)65 inline void *ArenaAllocatorT<USE_OOM_HANDLER>::AllocateAndAddNewPool(size_t size, Alignment alignment)
66 {
67 ASSERT(arenas_ != nullptr);
68 void *mem = arenas_->Alloc(size, alignment);
69 if (mem == nullptr) {
70 bool addNewPool = false;
71 if (limitAllocSizeByPool_) {
72 addNewPool = AddArenaFromPool(std::max(AlignUp(size, alignment) + sizeof(Arena), DEFAULT_ARENA_SIZE));
73 } else {
74 addNewPool = AddArenaFromPool(DEFAULT_ARENA_SIZE);
75 }
76 if (UNLIKELY(!addNewPool)) {
77 LOG(DEBUG, ALLOC) << "Can not add new pool for " << SpaceTypeToString(spaceType_);
78 return nullptr;
79 }
80 mem = arenas_->Alloc(size, alignment);
81 ASSERT(!limitAllocSizeByPool_ || mem != nullptr);
82 }
83 return mem;
84 }
85
86 template <bool USE_OOM_HANDLER>
Alloc(size_t size,Alignment align)87 void *ArenaAllocatorT<USE_OOM_HANDLER>::Alloc(size_t size, Alignment align)
88 {
89 trace::ScopedTrace scopedTrace("ArenaAllocator allocate");
90 LOG(DEBUG, ALLOC) << "ArenaAllocator: try to alloc " << size << " with align " << align;
91 void *ret = nullptr;
92 if (ON_STACK_ALLOCATION_ENABLED && UNLIKELY(!arenas_)) {
93 LOG(DEBUG, ALLOC) << "\tTry to allocate from stack";
94 ret = buff_.Alloc(size, align);
95 LOG_IF(ret, DEBUG, ALLOC) << "\tallocate from stack buffer";
96 if (ret == nullptr) {
97 ret = AllocateAndAddNewPool(size, align);
98 }
99 } else {
100 ret = AllocateAndAddNewPool(size, align);
101 }
102 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
103 if constexpr (USE_OOM_HANDLER) {
104 if (ret == nullptr) {
105 oomHandler_();
106 }
107 }
108 LOG(DEBUG, ALLOC) << "ArenaAllocator: allocated " << size << " bytes aligned by " << align;
109 AllocArenaMemStats(size);
110 return ret;
111 }
112
113 template <bool USE_OOM_HANDLER>
Resize(size_t newSize)114 void ArenaAllocatorT<USE_OOM_HANDLER>::Resize(size_t newSize)
115 {
116 LOG(DEBUG, ALLOC) << "ArenaAllocator: resize to new size " << newSize;
117 // NOTE(aemelenko): we have O(2n) here in the worst case
118 size_t curSize = GetAllocatedSize();
119 if (curSize <= newSize) {
120 LOG_IF(curSize < newSize, FATAL, ALLOC) << "ArenaAllocator: resize to bigger size than we have. Do nothing";
121 return;
122 }
123
124 size_t bytesToDelete = curSize - newSize;
125
126 // Try to delete unused arenas
127 while ((arenas_ != nullptr) && (bytesToDelete != 0)) {
128 Arena *next = arenas_->GetNextArena();
129 size_t curArenaSize = arenas_->GetOccupiedSize();
130 if (curArenaSize < bytesToDelete) {
131 // We need to free the whole arena
132 PoolManager::FreeArena(arenas_);
133 arenas_ = next;
134 bytesToDelete -= curArenaSize;
135 } else {
136 arenas_->Resize(curArenaSize - bytesToDelete);
137 bytesToDelete = 0;
138 }
139 }
140 if ((ON_STACK_ALLOCATION_ENABLED) && (bytesToDelete > 0)) {
141 size_t stackSize = buff_.GetOccupiedSize();
142 ASSERT(stackSize >= bytesToDelete);
143 buff_.Resize(stackSize - bytesToDelete);
144 bytesToDelete = 0;
145 }
146 ASSERT(bytesToDelete == 0);
147 }
148
149 template <bool USE_OOM_HANDLER>
AddArenaFromPool(size_t poolSize)150 bool ArenaAllocatorT<USE_OOM_HANDLER>::AddArenaFromPool(size_t poolSize)
151 {
152 ASSERT(poolSize != 0);
153 poolSize = AlignUp(poolSize, PANDA_POOL_ALIGNMENT_IN_BYTES);
154 Arena *newArena = PoolManager::AllocArena(poolSize, spaceType_, GetAllocatorType(), this);
155 if (UNLIKELY(newArena == nullptr)) {
156 return false;
157 }
158 newArena->LinkTo(arenas_);
159 arenas_ = newArena;
160 return true;
161 }
162
163 template <bool USE_OOM_HANDLER>
GetAllocatedSize() const164 size_t ArenaAllocatorT<USE_OOM_HANDLER>::GetAllocatedSize() const
165 {
166 size_t size = 0;
167 if (ON_STACK_ALLOCATION_ENABLED) {
168 size += buff_.GetOccupiedSize();
169 }
170 for (Arena *cur = arenas_; cur != nullptr; cur = cur->GetNextArena()) {
171 size += cur->GetOccupiedSize();
172 }
173 return size;
174 }
175
176 template class ArenaAllocatorT<true>;
177 template class ArenaAllocatorT<false>;
178
179 } // namespace ark
180