• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
16 #define RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
17 
18 #include "libpandabase/utils/logger.h"
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/object_helpers.h"
21 #include "runtime/mem/pygote_space_allocator.h"
22 #include "runtime/mem/runslots_allocator-inl.h"
23 #include "runtime/include/runtime.h"
24 
25 namespace panda::mem {
26 
27 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
28 #define LOG_PYGOTE_SPACE_ALLOCATOR(level) LOG(level, ALLOC) << "PygoteSpaceAllocator: "
29 
30 template <typename AllocConfigT>
PygoteSpaceAllocator(MemStatsType * mem_stats)31 PygoteSpaceAllocator<AllocConfigT>::PygoteSpaceAllocator(MemStatsType *mem_stats)
32     : runslots_alloc_(mem_stats), mem_stats_(mem_stats)
33 {
34     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Initializing of PygoteSpaceAllocator";
35 }
36 
37 template <typename AllocConfigT>
~PygoteSpaceAllocator()38 PygoteSpaceAllocator<AllocConfigT>::~PygoteSpaceAllocator()
39 {
40     auto cur = arena_;
41     while (cur != nullptr) {
42         auto tmp = cur->GetNextArena();
43         PoolManager::FreeArena(cur);
44         cur = tmp;
45     }
46     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
47     for (const auto &bitmap : live_bitmaps_) {
48         allocator->Delete(bitmap->GetBitMap().data());
49         allocator->Delete(bitmap);
50     }
51     LOG_PYGOTE_SPACE_ALLOCATOR(DEBUG) << "Destroying of PygoteSpaceAllocator";
52 }
53 
54 template <typename AllocConfigT>
SetState(PygoteSpaceState new_state)55 inline void PygoteSpaceAllocator<AllocConfigT>::SetState(PygoteSpaceState new_state)
56 {
57     // must move to next state
58     ASSERT(new_state > state_);
59     state_ = new_state;
60 
61     if (state_ == STATE_PYGOTE_FORKED) {
62         // build bitmaps for used pools
63         runslots_alloc_.memory_pool_.VisitAllPoolsWithOccupiedSize(
64             [this](void *mem, size_t used_size, size_t /* size */) { CreateLiveBitmap(mem, used_size); });
65         runslots_alloc_.IterateOverObjects([this](ObjectHeader *object) {
66             if (!live_bitmaps_.empty()) {
67                 for (auto bitmap : live_bitmaps_) {
68                     if (bitmap->IsAddrInRange(object)) {
69                         bitmap->Set(object);
70                         return;
71                     }
72                 }
73             }
74         });
75 
76         // trim unused pages in runslots allocator
77         runslots_alloc_.TrimUnsafe();
78 
79         // only trim the last arena
80         if (arena_ != nullptr && arena_->GetFreeSize() >= panda::os::mem::GetPageSize()) {
81             uintptr_t start = AlignUp(ToUintPtr(arena_->GetAllocatedEnd()), panda::os::mem::GetPageSize());
82             uintptr_t end = ToUintPtr(arena_->GetArenaEnd());
83             os::mem::ReleasePages(start, end);
84         }
85     }
86 }
87 
88 template <typename AllocConfigT>
Alloc(size_t size,Alignment align)89 inline void *PygoteSpaceAllocator<AllocConfigT>::Alloc(size_t size, Alignment align)
90 {
91     ASSERT(state_ == STATE_PYGOTE_INIT || state_ == STATE_PYGOTE_FORKING);
92 
93     // alloc from runslots firstly, if failed, try to alloc from new arena
94     // TODO(yxr) : will optimzie this later, currently we use runslots as much as possible before we have crossing map
95     // or mark card table with object header, also it will reduce the bitmap count which will reduce the gc mark time.
96     void *obj = runslots_alloc_.template Alloc<true, false>(size, align);
97     if (obj == nullptr) {
98         if (state_ == STATE_PYGOTE_INIT) {
99             // try again in lock
100             static os::memory::Mutex pool_lock;
101             os::memory::LockHolder lock(pool_lock);
102             obj = runslots_alloc_.Alloc(size, align);
103             if (obj != nullptr) {
104                 return obj;
105             }
106 
107             auto pool = heap_space_->TryAllocPool(RunSlotsAllocator<AllocConfigT>::GetMinPoolSize(), space_type_,
108                                                   AllocatorType::RUNSLOTS_ALLOCATOR, this);
109             if (UNLIKELY(pool.GetMem() == nullptr)) {
110                 return nullptr;
111             }
112             if (!runslots_alloc_.AddMemoryPool(pool.GetMem(), pool.GetSize())) {
113                 LOG(FATAL, ALLOC) << "PygoteSpaceAllocator: couldn't add memory pool to object allocator";
114             }
115             // alloc object again
116             obj = runslots_alloc_.Alloc(size, align);
117         } else {
118             if (arena_ != nullptr) {
119                 obj = arena_->Alloc(size, align);
120             }
121             if (obj == nullptr) {
122                 auto new_arena =
123                     heap_space_->TryAllocArena(DEFAULT_ARENA_SIZE, space_type_, AllocatorType::ARENA_ALLOCATOR, this);
124                 if (new_arena == nullptr) {
125                     return nullptr;
126                 }
127                 CreateLiveBitmap(new_arena, DEFAULT_ARENA_SIZE);
128                 new_arena->LinkTo(arena_);
129                 arena_ = new_arena;
130                 obj = arena_->Alloc(size, align);
131             }
132             live_bitmaps_.back()->Set(obj);  // mark live in bitmap
133             AllocConfigT::OnAlloc(size, space_type_, mem_stats_);
134             AllocConfigT::MemoryInit(obj, size);
135         }
136     }
137     return obj;
138 }
139 
140 template <typename AllocConfigT>
Free(void * mem)141 inline void PygoteSpaceAllocator<AllocConfigT>::Free(void *mem)
142 {
143     if (!live_bitmaps_.empty()) {
144         for (auto bitmap : live_bitmaps_) {
145             if (bitmap->IsAddrInRange(mem)) {
146                 bitmap->Clear(mem);
147                 return;
148             }
149         }
150     }
151 
152     if (state_ == STATE_PYGOTE_FORKED) {
153         return;
154     }
155 
156     if (runslots_alloc_.ContainObject(reinterpret_cast<ObjectHeader *>(mem))) {
157         runslots_alloc_.Free(mem);
158     }
159 }
160 
161 template <typename AllocConfigT>
ContainObject(const ObjectHeader * object)162 inline bool PygoteSpaceAllocator<AllocConfigT>::ContainObject(const ObjectHeader *object)
163 {
164     // see if in runslots firstly
165     if (runslots_alloc_.ContainObject(object)) {
166         return true;
167     }
168 
169     // see if in arena list
170     auto cur = arena_;
171     while (cur != nullptr) {
172         if (cur->InArena(const_cast<ObjectHeader *>(object))) {
173             return true;
174         }
175         cur = cur->GetNextArena();
176     }
177     return false;
178 }
179 
180 template <typename AllocConfigT>
IsLive(const ObjectHeader * object)181 inline bool PygoteSpaceAllocator<AllocConfigT>::IsLive(const ObjectHeader *object)
182 {
183     if (!live_bitmaps_.empty()) {
184         for (auto bitmap : live_bitmaps_) {
185             if (bitmap->IsAddrInRange(object)) {
186                 return bitmap->Test(object);
187             }
188         }
189     }
190 
191     if (state_ == STATE_PYGOTE_FORKED) {
192         return false;
193     }
194 
195     return runslots_alloc_.ContainObject(object) && runslots_alloc_.IsLive(object);
196 }
197 
198 template <typename AllocConfigT>
CreateLiveBitmap(void * heap_begin,size_t heap_size)199 inline void PygoteSpaceAllocator<AllocConfigT>::CreateLiveBitmap(void *heap_begin, size_t heap_size)
200 {
201     auto allocator = Runtime::GetCurrent()->GetInternalAllocator();
202     auto bitmap_data = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(heap_size));
203     ASSERT(bitmap_data != nullptr);
204     auto bitmap = allocator->Alloc(sizeof(MarkBitmap));
205     ASSERT(bitmap != nullptr);
206     auto bitmap_obj = new (bitmap) MarkBitmap(heap_begin, heap_size, bitmap_data);
207     bitmap_obj->ClearAllBits();
208     live_bitmaps_.emplace_back(bitmap_obj);
209 }
210 
211 template <typename AllocConfigT>
ClearLiveBitmaps()212 inline void PygoteSpaceAllocator<AllocConfigT>::ClearLiveBitmaps()
213 {
214     for (auto bitmap : live_bitmaps_) {
215         bitmap->ClearAllBits();
216     }
217 }
218 
219 template <typename AllocConfigT>
220 template <typename Visitor>
IterateOverObjectsInRange(const Visitor & visitor,void * start,void * end)221 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjectsInRange(const Visitor &visitor, void *start,
222                                                                           void *end)
223 {
224     if (!live_bitmaps_.empty()) {
225         for (auto bitmap : live_bitmaps_) {
226             auto [left, right] = bitmap->GetHeapRange();
227             left = std::max(ToUintPtr(start), left);
228             right = std::min(ToUintPtr(end), right);
229             if (left < right) {
230                 bitmap->IterateOverMarkedChunkInRange(ToVoidPtr(left), ToVoidPtr(right), [&visitor](void *mem) {
231                     visitor(reinterpret_cast<ObjectHeader *>(mem));
232                 });
233             }
234         }
235     } else {
236         ASSERT(arena_ == nullptr);
237         runslots_alloc_.IterateOverObjectsInRange(visitor, start, end);
238     }
239 }
240 
241 template <typename AllocConfigT>
IterateOverObjects(const ObjectVisitor & object_visitor)242 inline void PygoteSpaceAllocator<AllocConfigT>::IterateOverObjects(const ObjectVisitor &object_visitor)
243 {
244     if (!live_bitmaps_.empty()) {
245         for (auto bitmap : live_bitmaps_) {
246             bitmap->IterateOverMarkedChunks([&object_visitor](void *mem) {
247                 object_visitor(static_cast<ObjectHeader *>(static_cast<void *>(mem)));
248             });
249         }
250         if (state_ != STATE_PYGOTE_FORKED) {
251             runslots_alloc_.IterateOverObjects(object_visitor);
252         }
253     } else {
254         ASSERT(arena_ == nullptr);
255         runslots_alloc_.IterateOverObjects(object_visitor);
256     }
257 }
258 
259 template <typename AllocConfigT>
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)260 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveAllPools(const MemVisitor &mem_visitor)
261 {
262     // IterateOverPools only used when allocator should be destroyed
263     auto cur = arena_;
264     while (cur != nullptr) {
265         auto tmp = cur->GetNextArena();
266         heap_space_->FreeArena(cur);
267         cur = tmp;
268     }
269     arena_ = nullptr;  // avoid to duplicated free
270     runslots_alloc_.VisitAndRemoveAllPools(mem_visitor);
271 }
272 
273 template <typename AllocConfigT>
VisitAndRemoveFreePools(const MemVisitor & mem_visitor)274 inline void PygoteSpaceAllocator<AllocConfigT>::VisitAndRemoveFreePools(const MemVisitor &mem_visitor)
275 {
276     // afte pygote fork, we don't change pygote space for free unused pools
277     if (state_ == STATE_PYGOTE_FORKED) {
278         return;
279     }
280 
281     // before pygote fork, call underlying allocator to free unused pools
282     runslots_alloc_.VisitAndRemoveFreePools(mem_visitor);
283 }
284 
285 template <typename AllocConfigT>
Collect(const GCObjectVisitor & gc_visitor)286 inline void PygoteSpaceAllocator<AllocConfigT>::Collect(const GCObjectVisitor &gc_visitor)
287 {
288     // the live bitmaps has been updated in gc process, need to do nothing here
289     if (state_ == STATE_PYGOTE_FORKED) {
290         return;
291     }
292 
293     // before pygote fork, call underlying allocator to collect garbage
294     runslots_alloc_.Collect(gc_visitor);
295 }
296 
297 #undef LOG_PYGOTE_SPACE_ALLOCATOR
298 
299 }  // namespace panda::mem
300 
301 #endif  // RUNTIME_MEM_PANDA_PYGOTE_SPACE_ALLOCATOR_INL_H
302