• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
16 #define RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
17 
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/object_helpers.h"
21 #include "runtime/mem/pygote_space_allocator.h"
22 #include "runtime/mem/runslots_allocator-inl.h"
23 #include "runtime/include/runtime.h"
24 
25 namespace panda::mem {
26 
27 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
28 #define LOG_PYGOTE_SPACE_ALLOCATOR(level) LOG(level, ALLOC) << "PygoteSpaceAllocator: "
29 
30 template <typename AllocConfigT>
PygoteSpaceAllocator(MemStatsType * memStats)31 PygoteSpaceAllocator<AllocConfigT>::PygoteSpaceAllocator(MemStatsType *memStats)
32     : runslotsAlloc_(memStats), memStats_(memStats)
33 {
34     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Initializing of PygoteSpaceAllocator";
35 }
36 
37 template <typename AllocConfigT>
~PygoteSpaceAllocator()38 PygoteSpaceAllocator<AllocConfigT>::~PygoteSpaceAllocator()
39 {
40     auto cur = arena_;
41     while (cur != nullptr) {
42         auto tmp = cur->GetNextArena();
43         PoolManager::FreeArena(cur);
44         cur = tmp;
45     }
46     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
47     for (const auto &bitmap : liveBitmaps_) {
48         allocator->Delete(bitmap->GetBitMap().data());
49         allocator->Delete(bitmap);
50     }
51     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Destroying of PygoteSpaceAllocator";
52 }
53 
54 template <typename AllocConfigT>
SetState(PygoteSpaceState newState)55 inline void PygoteSpaceAllocator<AllocConfigT>::SetState(PygoteSpaceState newState)
56 {
57     // must move to next state
58     ASSERT(newState > state_);
59     state_ = newState;
60 
61     if (state_ == STATE_PYGOTE_FORKED) {
62         // build bitmaps for used pools
63         runslotsAlloc_.memoryPool_.VisitAllPoolsWithOccupiedSize(
64             [this](void *mem, size_t usedSize, size_t /* size */) { CreateLiveBitmap(mem, usedSize); });
65         runslotsAlloc_.IterateOverObjects([this](ObjectHeader *object) {
66             if (!liveBitmaps_.empty()) {
67                 for (auto bitmap : liveBitmaps_) {
68                     if (bitmap->IsAddrInRange(object)) {
69                         bitmap->Set(object);
70                         return;
71                     }
72                 }
73             }
74         });
75 
76         // trim unused pages in runslots allocator
77         runslotsAlloc_.TrimUnsafe();
78 
79         // only trim the last arena
80         if (arena_ != nullptr && arena_->GetFreeSize() >= panda::os::mem::GetPageSize()) {
81             uintptr_t start = AlignUp(ToUintPtr(arena_->GetAllocatedEnd()), panda::os::mem::GetPageSize());
82             uintptr_t end = ToUintPtr(arena_->GetArenaEnd());
83             os::mem::ReleasePages(start, end);
84         }
85     }
86 }
87 
88 template <typename AllocConfigT>
Alloc(size_t size,Alignment align)89 inline void *PygoteSpaceAllocator<AllocConfigT>::Alloc(size_t size, Alignment align)
90 {
91     ASSERT(state_ == STATE_PYGOTE_INIT || state_ == STATE_PYGOTE_FORKING);
92 
93     // alloc from runslots firstly, if failed, try to alloc from new arena
94     // NOTE(yxr) : will optimzie this later, currently we use runslots as much as possible before we have crossing map
95     // or mark card table with object header, also it will reduce the bitmap count which will reduce the gc mark time.
96     void *obj = runslotsAlloc_.template Alloc<true, false>(size, align);
97     if (obj != nullptr) {
98         return obj;
99     }
100     if (state_ == STATE_PYGOTE_INIT) {
101         // try again in lock
102         static os::memory::Mutex poolLock;
103         os::memory::LockHolder lock(poolLock);
104         obj = runslotsAlloc_.Alloc(size, align);
105         if (obj != nullptr) {
106             return obj;
107         }
108 
109         auto pool = heapSpace_->TryAllocPool(RunSlotsAllocator<AllocConfigT>::GetMinPoolSize(), spaceType_,
110                                              AllocatorType::RUNSLOTS_ALLOCATOR, this);
111         if (UNLIKELY(pool.GetMem() == nullptr)) {
112             return nullptr;
113         }
114         if (!runslotsAlloc_.AddMemoryPool(pool.GetMem(), pool.GetSize())) {
115             LOG(FATAL, ALLOC) << "PygoteSpaceAllocator: couldn't add memory pool to object allocator";
116         }
117         // alloc object again
118         obj = runslotsAlloc_.Alloc(size, align);
119     } else {
120         if (arena_ != nullptr) {
121             obj = arena_->Alloc(size, align);
122         }
123         if (obj == nullptr) {
124             auto newArena =
125                 heapSpace_->TryAllocArena(DEFAULT_ARENA_SIZE, spaceType_, AllocatorType::ARENA_ALLOCATOR, this);
126             if (newArena == nullptr) {
127                 return nullptr;
128             }
129             CreateLiveBitmap(newArena, DEFAULT_ARENA_SIZE);
130             newArena->LinkTo(arena_);
131             arena_ = newArena;
132             obj = arena_->Alloc(size, align);
133         }
134         liveBitmaps_.back()->Set(obj);  // mark live in bitmap
135         AllocConfigT::OnAlloc(size, spaceType_, memStats_);
136         AllocConfigT::MemoryInit(obj);
137     }
138     return obj;
139 }
140 
141 template <typename AllocConfigT>
Free(void * mem)142 inline void PygoteSpaceAllocator<AllocConfigT>::Free(void *mem)
143 {
144     if (!liveBitmaps_.empty()) {
145         for (auto bitmap : liveBitmaps_) {
146             if (bitmap->IsAddrInRange(mem)) {
147                 bitmap->Clear(mem);
148                 return;
149             }
150         }
151     }
152 
153     if (state_ == STATE_PYGOTE_FORKED) {
154         return;
155     }
156 
157     if (runslotsAlloc_.ContainObject(reinterpret_cast<ObjectHeader *>(mem))) {
158         runslotsAlloc_.Free(mem);
159     }
160 }
161 
162 template <typename AllocConfigT>
ContainObject(const ObjectHeader * object)163 inline bool PygoteSpaceAllocator<AllocConfigT>::ContainObject(const ObjectHeader *object)
164 {
165     // see if in runslots firstly
166     if (runslotsAlloc_.ContainObject(object)) {
167         return true;
168     }
169 
170     // see if in arena list
171     auto cur = arena_;
172     while (cur != nullptr) {
173         if (cur->InArena(const_cast<ObjectHeader *>(object))) {
174             return true;
175         }
176         cur = cur->GetNextArena();
177     }
178     return false;
179 }
180 
181 template <typename AllocConfigT>
IsLive(const ObjectHeader * object)182 inline bool PygoteSpaceAllocator<AllocConfigT>::IsLive(const ObjectHeader *object)
183 {
184     if (!liveBitmaps_.empty()) {
185         for (auto bitmap : liveBitmaps_) {
186             if (bitmap->IsAddrInRange(object)) {
187                 return bitmap->Test(object);
188             }
189         }
190     }
191 
192     if (state_ == STATE_PYGOTE_FORKED) {
193         return false;
194     }
195 
196     return runslotsAlloc_.ContainObject(object) && runslotsAlloc_.IsLive(object);
197 }
198 
199 template <typename AllocConfigT>
CreateLiveBitmap(void * heapBegin,size_t heapSize)200 inline void PygoteSpaceAllocator<AllocConfigT>::CreateLiveBitmap(void *heapBegin, size_t heapSize)
201 {
202     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
203     auto bitmapData = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(heapSize));
204     ASSERT(bitmapData != nullptr);
205     auto bitmap = allocator->Alloc(sizeof(MarkBitmap));
206     ASSERT(bitmap != nullptr);
207     auto bitmapObj = new (bitmap) MarkBitmap(heapBegin, heapSize, bitmapData);
208     bitmapObj->ClearAllBits();
209     liveBitmaps_.emplace_back(bitmapObj);
210 }
211 
212 template <typename AllocConfigT>
ClearLiveBitmaps()213 inline void PygoteSpaceAllocator<AllocConfigT>::ClearLiveBitmaps()
214 {
215     for (auto bitmap : liveBitmaps_) {
216         bitmap->ClearAllBits();
217     }
218 }
219 
220 template <typename AllocConfigT>
221 template <typename Visitor>
IterateOverObjectsInRange(const Visitor & visitor,void * start,void * end)222 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjectsInRange(const Visitor &visitor, void *start,
223                                                                           void *end)
224 {
225     if (liveBitmaps_.empty()) {
226         ASSERT(arena_ == nullptr);
227         runslotsAlloc_.IterateOverObjectsInRange(visitor, start, end);
228         return;
229     }
230     for (auto bitmap : liveBitmaps_) {
231         auto [left, right] = bitmap->GetHeapRange();
232         left = std::max(ToUintPtr(start), left);
233         right = std::min(ToUintPtr(end), right);
234         if (left < right) {
235             bitmap->IterateOverMarkedChunkInRange(ToVoidPtr(left), ToVoidPtr(right), [&visitor](void *mem) {
236                 visitor(reinterpret_cast<ObjectHeader *>(mem));
237             });
238         }
239     }
240 }
241 
242 template <typename AllocConfigT>
IterateOverObjects(const ObjectVisitor & objectVisitor)243 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjects(const ObjectVisitor &objectVisitor)
244 {
245     if (!liveBitmaps_.empty()) {
246         for (auto bitmap : liveBitmaps_) {
247             bitmap->IterateOverMarkedChunks(
248                 [&objectVisitor](void *mem) { objectVisitor(static_cast<ObjectHeader *>(static_cast<void *>(mem))); });
249         }
250         if (state_ != STATE_PYGOTE_FORKED) {
251             runslotsAlloc_.IterateOverObjects(objectVisitor);
252         }
253     } else {
254         ASSERT(arena_ == nullptr);
255         runslotsAlloc_.IterateOverObjects(objectVisitor);
256     }
257 }
258 
259 template <typename AllocConfigT>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)260 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
261 {
262     // IterateOverPools only used when allocator should be destroyed
263     auto cur = arena_;
264     while (cur != nullptr) {
265         auto tmp = cur->GetNextArena();
266         heapSpace_->FreeArena(cur);
267         cur = tmp;
268     }
269     arena_ = nullptr;  // avoid to duplicated free
270     runslotsAlloc_.VisitAndRemoveAllPools(memVisitor);
271 }
272 
273 template <typename AllocConfigT>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)274 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
275 {
276     // afte pygote fork, we don't change pygote space for free unused pools
277     if (state_ == STATE_PYGOTE_FORKED) {
278         return;
279     }
280 
281     // before pygote fork, call underlying allocator to free unused pools
282     runslotsAlloc_.VisitAndRemoveFreePools(memVisitor);
283 }
284 
285 template <typename AllocConfigT>
Collect(const GCObjectVisitor & gcVisitor)286 inline void PygoteSpaceAllocator<AllocConfigT>::Collect(const GCObjectVisitor &gcVisitor)
287 {
288     // the live bitmaps has been updated in gc process, need to do nothing here
289     if (state_ == STATE_PYGOTE_FORKED) {
290         return;
291     }
292 
293     // before pygote fork, call underlying allocator to collect garbage
294     runslotsAlloc_.Collect(gcVisitor);
295 }
296 
297 #undef LOG_PYGOTE_SPACE_ALLOCATOR
298 
299 }  // namespace panda::mem
300 
301 #endif  // RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
302