• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
16 #define RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
17 
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/object_helpers.h"
21 #include "runtime/mem/pygote_space_allocator.h"
22 #include "runtime/mem/runslots_allocator-inl.h"
23 #include "runtime/include/runtime.h"
24 
25 namespace ark::mem {
26 
27 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
28 #define LOG_PYGOTE_SPACE_ALLOCATOR(level) LOG(level, ALLOC) << "PygoteSpaceAllocator: "
29 
30 template <typename AllocConfigT>
PygoteSpaceAllocator(MemStatsType * memStats)31 PygoteSpaceAllocator<AllocConfigT>::PygoteSpaceAllocator(MemStatsType *memStats)
32     : runslotsAlloc_(memStats), memStats_(memStats)
33 {
34     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Initializing of PygoteSpaceAllocator";
35 }
36 
37 template <typename AllocConfigT>
~PygoteSpaceAllocator()38 PygoteSpaceAllocator<AllocConfigT>::~PygoteSpaceAllocator()
39 {
40     auto cur = arena_;
41     while (cur != nullptr) {
42         auto tmp = cur->GetNextArena();
43         PoolManager::FreeArena(cur);
44         cur = tmp;
45     }
46     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
47     for (const auto &bitmap : liveBitmaps_) {
48         allocator->Delete(bitmap->GetBitMap().data());
49         allocator->Delete(bitmap);
50     }
51     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Destroying of PygoteSpaceAllocator";
52 }
53 
54 template <typename AllocConfigT>
55 // CC-OFFNXT(G.FUD.06) perf critical
SetState(PygoteSpaceState newState)56 inline void PygoteSpaceAllocator<AllocConfigT>::SetState(PygoteSpaceState newState)
57 {
58     // must move to next state
59     ASSERT(newState > state_);
60     state_ = newState;
61 
62     if (state_ != STATE_PYGOTE_FORKED) {
63         return;
64     }
65     // build bitmaps for used pools
66     runslotsAlloc_.memoryPool_.VisitAllPoolsWithOccupiedSize(
67         [this](void *mem, size_t usedSize, size_t) { CreateLiveBitmap(mem, usedSize); });
68     runslotsAlloc_.IterateOverObjects([this](ObjectHeader *object) {
69         for (auto bitmap : liveBitmaps_) {
70             if (bitmap->IsAddrInRange(object)) {
71                 bitmap->Set(object);
72                 return;
73             }
74         }
75     });
76 
77     // trim unused pages in runslots allocator
78     runslotsAlloc_.TrimUnsafe();
79 
80     // only trim the last arena
81     if (arena_ != nullptr && arena_->GetFreeSize() >= ark::os::mem::GetPageSize()) {
82         uintptr_t start = AlignUp(ToUintPtr(arena_->GetAllocatedEnd()), ark::os::mem::GetPageSize());
83         uintptr_t end = ToUintPtr(arena_->GetArenaEnd());
84         os::mem::ReleasePages(start, end);
85     }
86 }
87 
88 template <typename AllocConfigT>
89 // CC-OFFNXT(G.FUD.06) perf critical
Alloc(size_t size,Alignment align)90 inline void *PygoteSpaceAllocator<AllocConfigT>::Alloc(size_t size, Alignment align)
91 {
92     ASSERT(state_ == STATE_PYGOTE_INIT || state_ == STATE_PYGOTE_FORKING);
93 
94     // alloc from runslots firstly, if failed, try to alloc from new arena
95     // NOTE(yxr) : will optimzie this later, currently we use runslots as much as possible before we have crossing map
96     // or mark card table with object header, also it will reduce the bitmap count which will reduce the gc mark time.
97     void *obj = runslotsAlloc_.template Alloc<true, false>(size, align);
98     if (obj != nullptr) {
99         return obj;
100     }
101     if (state_ == STATE_PYGOTE_INIT) {
102         // try again in lock
103         static os::memory::Mutex poolLock;
104         os::memory::LockHolder lock(poolLock);
105         obj = runslotsAlloc_.Alloc(size, align);
106         if (obj != nullptr) {
107             return obj;
108         }
109 
110         auto pool = heapSpace_->TryAllocPool(RunSlotsAllocator<AllocConfigT>::GetMinPoolSize(), spaceType_,
111                                              AllocatorType::RUNSLOTS_ALLOCATOR, this);
112         if (UNLIKELY(pool.GetMem() == nullptr)) {
113             return nullptr;
114         }
115         if (!runslotsAlloc_.AddMemoryPool(pool.GetMem(), pool.GetSize())) {
116             LOG(FATAL, ALLOC) << "PygoteSpaceAllocator: couldn't add memory pool to object allocator";
117         }
118         // alloc object again
119         obj = runslotsAlloc_.Alloc(size, align);
120     } else {
121         if (arena_ != nullptr) {
122             obj = arena_->Alloc(size, align);
123         }
124         if (obj == nullptr) {
125             auto newArena =
126                 heapSpace_->TryAllocArena(DEFAULT_ARENA_SIZE, spaceType_, AllocatorType::ARENA_ALLOCATOR, this);
127             if (newArena == nullptr) {
128                 return nullptr;
129             }
130             CreateLiveBitmap(newArena, DEFAULT_ARENA_SIZE);
131             newArena->LinkTo(arena_);
132             arena_ = newArena;
133             obj = arena_->Alloc(size, align);
134         }
135         liveBitmaps_.back()->Set(obj);  // mark live in bitmap
136         AllocConfigT::OnAlloc(size, spaceType_, memStats_);
137         AllocConfigT::MemoryInit(obj);
138     }
139     return obj;
140 }
141 
142 template <typename AllocConfigT>
143 // CC-OFFNXT(G.FUD.06) perf critical
Free(void * mem)144 inline void PygoteSpaceAllocator<AllocConfigT>::Free(void *mem)
145 {
146     for (auto bitmap : liveBitmaps_) {
147         if (bitmap->IsAddrInRange(mem)) {
148             bitmap->Clear(mem);
149             return;
150         }
151     }
152 
153     if (state_ == STATE_PYGOTE_FORKED) {
154         return;
155     }
156 
157     if (runslotsAlloc_.ContainObject(reinterpret_cast<ObjectHeader *>(mem))) {
158         runslotsAlloc_.Free(mem);
159     }
160 }
161 
162 template <typename AllocConfigT>
ContainObject(const ObjectHeader * object)163 inline bool PygoteSpaceAllocator<AllocConfigT>::ContainObject(const ObjectHeader *object)
164 {
165     // see if in runslots firstly
166     if (runslotsAlloc_.ContainObject(object)) {
167         return true;
168     }
169 
170     // see if in arena list
171     for (auto cur = arena_; cur != nullptr; cur = cur->GetNextArena()) {
172         if (cur->InArena(const_cast<ObjectHeader *>(object))) {
173             return true;
174         }
175     }
176     return false;
177 }
178 
179 template <typename AllocConfigT>
IsLive(const ObjectHeader * object)180 inline bool PygoteSpaceAllocator<AllocConfigT>::IsLive(const ObjectHeader *object)
181 {
182     for (auto bitmap : liveBitmaps_) {
183         if (bitmap->IsAddrInRange(object)) {
184             return bitmap->Test(object);
185         }
186     }
187 
188     if (state_ == STATE_PYGOTE_FORKED) {
189         return false;
190     }
191 
192     return runslotsAlloc_.ContainObject(object) && runslotsAlloc_.IsLive(object);
193 }
194 
195 template <typename AllocConfigT>
CreateLiveBitmap(void * heapBegin,size_t heapSize)196 inline void PygoteSpaceAllocator<AllocConfigT>::CreateLiveBitmap(void *heapBegin, size_t heapSize)
197 {
198     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
199     auto bitmapData = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(heapSize));
200     ASSERT(bitmapData != nullptr);
201     auto bitmap = allocator->Alloc(sizeof(MarkBitmap));
202     ASSERT(bitmap != nullptr);
203     auto bitmapObj = new (bitmap) MarkBitmap(heapBegin, heapSize, bitmapData);
204     bitmapObj->ClearAllBits();
205     liveBitmaps_.emplace_back(bitmapObj);
206 }
207 
208 template <typename AllocConfigT>
ClearLiveBitmaps()209 inline void PygoteSpaceAllocator<AllocConfigT>::ClearLiveBitmaps()
210 {
211     for (auto bitmap : liveBitmaps_) {
212         bitmap->ClearAllBits();
213     }
214 }
215 
216 template <typename AllocConfigT>
217 template <typename Visitor>
218 // CC-OFFNXT(G.FUD.06) perf critical
IterateOverObjectsInRange(const Visitor & visitor,void * start,void * end)219 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjectsInRange(const Visitor &visitor, void *start,
220                                                                           void *end)
221 {
222     if (liveBitmaps_.empty()) {
223         ASSERT(arena_ == nullptr);
224         runslotsAlloc_.IterateOverObjectsInRange(visitor, start, end);
225         return;
226     }
227     for (auto bitmap : liveBitmaps_) {
228         auto [left, right] = bitmap->GetHeapRange();
229         left = std::max(ToUintPtr(start), left);
230         right = std::min(ToUintPtr(end), right);
231         if (left < right) {
232             bitmap->IterateOverMarkedChunkInRange(ToVoidPtr(left), ToVoidPtr(right), [&visitor](void *mem) {
233                 visitor(reinterpret_cast<ObjectHeader *>(mem));
234             });
235         }
236     }
237 }
238 
239 template <typename AllocConfigT>
240 // CC-OFFNXT(G.FUD.06) perf critical
IterateOverObjects(const ObjectVisitor & objectVisitor)241 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
242 {
243     if (!liveBitmaps_.empty()) {
244         for (auto bitmap : liveBitmaps_) {
245             bitmap->IterateOverMarkedChunks(
246                 [&objectVisitor](void *mem) { objectVisitor(static_cast<ObjectHeader *>(static_cast<void *>(mem))); });
247         }
248         if (state_ != STATE_PYGOTE_FORKED) {
249             runslotsAlloc_.IterateOverObjects(objectVisitor);
250         }
251     } else {
252         ASSERT(arena_ == nullptr);
253         runslotsAlloc_.IterateOverObjects(objectVisitor);
254     }
255 }
256 
257 template <typename AllocConfigT>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)258 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
259 {
260     // IterateOverPools only used when allocator should be destroyed
261     auto cur = arena_;
262     while (cur != nullptr) {
263         auto tmp = cur->GetNextArena();
264         heapSpace_->FreeArena(cur);
265         cur = tmp;
266     }
267     arena_ = nullptr;  // avoid to duplicated free
268     runslotsAlloc_.VisitAndRemoveAllPools(memVisitor);
269 }
270 
271 template <typename AllocConfigT>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)272 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
273 {
274     // afte pygote fork, we don't change pygote space for free unused pools
275     if (state_ == STATE_PYGOTE_FORKED) {
276         return;
277     }
278 
279     // before pygote fork, call underlying allocator to free unused pools
280     runslotsAlloc_.VisitAndRemoveFreePools(memVisitor);
281 }
282 
283 template <typename AllocConfigT>
Collect(const GCObjectVisitor & gcVisitor)284 inline void PygoteSpaceAllocator<AllocConfigT>::Collect(const GCObjectVisitor &gcVisitor)
285 {
286     // the live bitmaps has been updated in gc process, need to do nothing here
287     if (state_ == STATE_PYGOTE_FORKED) {
288         return;
289     }
290 
291     // before pygote fork, call underlying allocator to collect garbage
292     runslotsAlloc_.Collect(gcVisitor);
293 }
294 
295 #undef LOG_PYGOTE_SPACE_ALLOCATOR
296 
297 }  // namespace ark::mem
298 
299 #endif  // RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
300