1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
16 #define RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
17
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/object_helpers.h"
21 #include "runtime/mem/pygote_space_allocator.h"
22 #include "runtime/mem/runslots_allocator-inl.h"
23 #include "runtime/include/runtime.h"
24
25 namespace ark::mem {
26
27 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
28 #define LOG_PYGOTE_SPACE_ALLOCATOR(level) LOG(level, ALLOC) << "PygoteSpaceAllocator: "
29
30 template <typename AllocConfigT>
PygoteSpaceAllocator(MemStatsType * memStats)31 PygoteSpaceAllocator<AllocConfigT>::PygoteSpaceAllocator(MemStatsType *memStats)
32 : runslotsAlloc_(memStats), memStats_(memStats)
33 {
34 LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Initializing of PygoteSpaceAllocator";
35 }
36
37 template <typename AllocConfigT>
~PygoteSpaceAllocator()38 PygoteSpaceAllocator<AllocConfigT>::~PygoteSpaceAllocator()
39 {
40 auto cur = arena_;
41 while (cur != nullptr) {
42 auto tmp = cur->GetNextArena();
43 PoolManager::FreeArena(cur);
44 cur = tmp;
45 }
46 auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
47 for (const auto &bitmap : liveBitmaps_) {
48 allocator->Delete(bitmap->GetBitMap().data());
49 allocator->Delete(bitmap);
50 }
51 LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Destroying of PygoteSpaceAllocator";
52 }
53
54 template <typename AllocConfigT>
SetState(PygoteSpaceState newState)55 inline void PygoteSpaceAllocator<AllocConfigT>::SetState(PygoteSpaceState newState)
56 {
57 // must move to next state
58 ASSERT(newState > state_);
59 state_ = newState;
60
61 if (state_ != STATE_PYGOTE_FORKED) {
62 return;
63 }
64 // build bitmaps for used pools
65 runslotsAlloc_.memoryPool_.VisitAllPoolsWithOccupiedSize(
66 [this](void *mem, size_t usedSize, size_t) { CreateLiveBitmap(mem, usedSize); });
67 runslotsAlloc_.IterateOverObjects([this](ObjectHeader *object) {
68 for (auto bitmap : liveBitmaps_) {
69 if (bitmap->IsAddrInRange(object)) {
70 bitmap->Set(object);
71 return;
72 }
73 }
74 });
75
76 // trim unused pages in runslots allocator
77 runslotsAlloc_.TrimUnsafe();
78
79 // only trim the last arena
80 if (arena_ != nullptr && arena_->GetFreeSize() >= ark::os::mem::GetPageSize()) {
81 uintptr_t start = AlignUp(ToUintPtr(arena_->GetAllocatedEnd()), ark::os::mem::GetPageSize());
82 uintptr_t end = ToUintPtr(arena_->GetArenaEnd());
83 os::mem::ReleasePages(start, end);
84 }
85 }
86
87 template <typename AllocConfigT>
Alloc(size_t size,Alignment align)88 inline void *PygoteSpaceAllocator<AllocConfigT>::Alloc(size_t size, Alignment align)
89 {
90 ASSERT(state_ == STATE_PYGOTE_INIT || state_ == STATE_PYGOTE_FORKING);
91
92 // alloc from runslots firstly, if failed, try to alloc from new arena
93 // NOTE(yxr) : will optimzie this later, currently we use runslots as much as possible before we have crossing map
94 // or mark card table with object header, also it will reduce the bitmap count which will reduce the gc mark time.
95 void *obj = runslotsAlloc_.template Alloc<true, false>(size, align);
96 if (obj != nullptr) {
97 return obj;
98 }
99 if (state_ == STATE_PYGOTE_INIT) {
100 // try again in lock
101 static os::memory::Mutex poolLock;
102 os::memory::LockHolder lock(poolLock);
103 obj = runslotsAlloc_.Alloc(size, align);
104 if (obj != nullptr) {
105 return obj;
106 }
107
108 auto pool = heapSpace_->TryAllocPool(RunSlotsAllocator<AllocConfigT>::GetMinPoolSize(), spaceType_,
109 AllocatorType::RUNSLOTS_ALLOCATOR, this);
110 if (UNLIKELY(pool.GetMem() == nullptr)) {
111 return nullptr;
112 }
113 if (!runslotsAlloc_.AddMemoryPool(pool.GetMem(), pool.GetSize())) {
114 LOG(FATAL, ALLOC) << "PygoteSpaceAllocator: couldn't add memory pool to object allocator";
115 }
116 // alloc object again
117 obj = runslotsAlloc_.Alloc(size, align);
118 } else {
119 if (arena_ != nullptr) {
120 obj = arena_->Alloc(size, align);
121 }
122 if (obj == nullptr) {
123 auto newArena =
124 heapSpace_->TryAllocArena(DEFAULT_ARENA_SIZE, spaceType_, AllocatorType::ARENA_ALLOCATOR, this);
125 if (newArena == nullptr) {
126 return nullptr;
127 }
128 CreateLiveBitmap(newArena, DEFAULT_ARENA_SIZE);
129 newArena->LinkTo(arena_);
130 arena_ = newArena;
131 obj = arena_->Alloc(size, align);
132 }
133 liveBitmaps_.back()->Set(obj); // mark live in bitmap
134 AllocConfigT::OnAlloc(size, spaceType_, memStats_);
135 AllocConfigT::MemoryInit(obj);
136 }
137 return obj;
138 }
139
140 template <typename AllocConfigT>
Free(void * mem)141 inline void PygoteSpaceAllocator<AllocConfigT>::Free(void *mem)
142 {
143 if (!liveBitmaps_.empty()) {
144 for (auto bitmap : liveBitmaps_) {
145 if (bitmap->IsAddrInRange(mem)) {
146 bitmap->Clear(mem);
147 return;
148 }
149 }
150 }
151
152 if (state_ == STATE_PYGOTE_FORKED) {
153 return;
154 }
155
156 if (runslotsAlloc_.ContainObject(reinterpret_cast<ObjectHeader *>(mem))) {
157 runslotsAlloc_.Free(mem);
158 }
159 }
160
161 template <typename AllocConfigT>
ContainObject(const ObjectHeader * object)162 inline bool PygoteSpaceAllocator<AllocConfigT>::ContainObject(const ObjectHeader *object)
163 {
164 // see if in runslots firstly
165 if (runslotsAlloc_.ContainObject(object)) {
166 return true;
167 }
168
169 // see if in arena list
170 auto cur = arena_;
171 while (cur != nullptr) {
172 if (cur->InArena(const_cast<ObjectHeader *>(object))) {
173 return true;
174 }
175 cur = cur->GetNextArena();
176 }
177 return false;
178 }
179
180 template <typename AllocConfigT>
IsLive(const ObjectHeader * object)181 inline bool PygoteSpaceAllocator<AllocConfigT>::IsLive(const ObjectHeader *object)
182 {
183 if (!liveBitmaps_.empty()) {
184 for (auto bitmap : liveBitmaps_) {
185 if (bitmap->IsAddrInRange(object)) {
186 return bitmap->Test(object);
187 }
188 }
189 }
190
191 if (state_ == STATE_PYGOTE_FORKED) {
192 return false;
193 }
194
195 return runslotsAlloc_.ContainObject(object) && runslotsAlloc_.IsLive(object);
196 }
197
198 template <typename AllocConfigT>
CreateLiveBitmap(void * heapBegin,size_t heapSize)199 inline void PygoteSpaceAllocator<AllocConfigT>::CreateLiveBitmap(void *heapBegin, size_t heapSize)
200 {
201 auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
202 auto bitmapData = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(heapSize));
203 ASSERT(bitmapData != nullptr);
204 auto bitmap = allocator->Alloc(sizeof(MarkBitmap));
205 ASSERT(bitmap != nullptr);
206 auto bitmapObj = new (bitmap) MarkBitmap(heapBegin, heapSize, bitmapData);
207 bitmapObj->ClearAllBits();
208 liveBitmaps_.emplace_back(bitmapObj);
209 }
210
211 template <typename AllocConfigT>
ClearLiveBitmaps()212 inline void PygoteSpaceAllocator<AllocConfigT>::ClearLiveBitmaps()
213 {
214 for (auto bitmap : liveBitmaps_) {
215 bitmap->ClearAllBits();
216 }
217 }
218
219 template <typename AllocConfigT>
220 template <typename Visitor>
IterateOverObjectsInRange(const Visitor & visitor,void * start,void * end)221 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjectsInRange(const Visitor &visitor, void *start,
222 void *end)
223 {
224 if (liveBitmaps_.empty()) {
225 ASSERT(arena_ == nullptr);
226 runslotsAlloc_.IterateOverObjectsInRange(visitor, start, end);
227 return;
228 }
229 for (auto bitmap : liveBitmaps_) {
230 auto [left, right] = bitmap->GetHeapRange();
231 left = std::max(ToUintPtr(start), left);
232 right = std::min(ToUintPtr(end), right);
233 if (left < right) {
234 bitmap->IterateOverMarkedChunkInRange(ToVoidPtr(left), ToVoidPtr(right), [&visitor](void *mem) {
235 visitor(reinterpret_cast<ObjectHeader *>(mem));
236 });
237 }
238 }
239 }
240
241 template <typename AllocConfigT>
IterateOverObjects(const ObjectVisitor & objectVisitor)242 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
243 {
244 if (!liveBitmaps_.empty()) {
245 for (auto bitmap : liveBitmaps_) {
246 bitmap->IterateOverMarkedChunks(
247 [&objectVisitor](void *mem) { objectVisitor(static_cast<ObjectHeader *>(static_cast<void *>(mem))); });
248 }
249 if (state_ != STATE_PYGOTE_FORKED) {
250 runslotsAlloc_.IterateOverObjects(objectVisitor);
251 }
252 } else {
253 ASSERT(arena_ == nullptr);
254 runslotsAlloc_.IterateOverObjects(objectVisitor);
255 }
256 }
257
258 template <typename AllocConfigT>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)259 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
260 {
261 // IterateOverPools only used when allocator should be destroyed
262 auto cur = arena_;
263 while (cur != nullptr) {
264 auto tmp = cur->GetNextArena();
265 heapSpace_->FreeArena(cur);
266 cur = tmp;
267 }
268 arena_ = nullptr; // avoid to duplicated free
269 runslotsAlloc_.VisitAndRemoveAllPools(memVisitor);
270 }
271
272 template <typename AllocConfigT>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)273 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
274 {
275 // afte pygote fork, we don't change pygote space for free unused pools
276 if (state_ == STATE_PYGOTE_FORKED) {
277 return;
278 }
279
280 // before pygote fork, call underlying allocator to free unused pools
281 runslotsAlloc_.VisitAndRemoveFreePools(memVisitor);
282 }
283
284 template <typename AllocConfigT>
Collect(const GCObjectVisitor & gcVisitor)285 inline void PygoteSpaceAllocator<AllocConfigT>::Collect(const GCObjectVisitor &gcVisitor)
286 {
287 // the live bitmaps has been updated in gc process, need to do nothing here
288 if (state_ == STATE_PYGOTE_FORKED) {
289 return;
290 }
291
292 // before pygote fork, call underlying allocator to collect garbage
293 runslotsAlloc_.Collect(gcVisitor);
294 }
295
296 #undef LOG_PYGOTE_SPACE_ALLOCATOR
297
298 } // namespace ark::mem
299
300 #endif // RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
301