1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "runtime/mem/region_space-inl.h"
16 #include "runtime/mem/rem_set-inl.h"
17 #include "runtime/include/runtime.h"
18 #include "runtime/include/panda_vm.h"
19
20 namespace panda::mem {
21
GetAllocatedBytes() const22 uint32_t Region::GetAllocatedBytes() const
23 {
24 if (!IsTLAB()) {
25 return top_ - begin_;
26 }
27 uint32_t allocated_bytes = 0;
28 ASSERT(tlab_vector_ != nullptr);
29 for (auto i : *tlab_vector_) {
30 allocated_bytes += i->GetOccupiedSize();
31 }
32 return allocated_bytes;
33 }
34
GetInternalAllocator()35 InternalAllocatorPtr Region::GetInternalAllocator()
36 {
37 return space_->GetPool()->GetInternalAllocator();
38 }
39
CreateRemSet()40 void Region::CreateRemSet()
41 {
42 ASSERT(rem_set_ == nullptr);
43 auto card_table = Thread::GetCurrent()->GetVM()->GetGC()->GetCardTable();
44 rem_set_ = GetInternalAllocator()->New<RemSetT>(this, card_table);
45 }
46
CreateTLABSupport()47 void Region::CreateTLABSupport()
48 {
49 ASSERT(tlab_vector_ == nullptr);
50 tlab_vector_ = GetInternalAllocator()->New<PandaVector<TLAB *>>(GetInternalAllocator()->Adapter());
51 }
52
GetRemainingSizeForTLABs() const53 size_t Region::GetRemainingSizeForTLABs() const
54 {
55 ASSERT(IsTLAB());
56 // TLABs are stored one by one.
57 uintptr_t last_tlab_end_byte = tlab_vector_->empty() ? Top() : ToUintPtr(tlab_vector_->back()->GetEndAddr());
58 ASSERT((last_tlab_end_byte <= End()) && (last_tlab_end_byte >= Top()));
59 return End() - last_tlab_end_byte;
60 }
61
CreateTLAB(size_t size)62 TLAB *Region::CreateTLAB(size_t size)
63 {
64 ASSERT(IsTLAB());
65 ASSERT(Begin() != 0);
66 ASSERT(Top() == Begin());
67 size_t remaining_size = GetRemainingSizeForTLABs();
68 if (remaining_size < size) {
69 return nullptr;
70 }
71 ASSERT(End() > remaining_size);
72 TLAB *tlab = GetInternalAllocator()->New<TLAB>(ToVoidPtr(End() - remaining_size), size);
73 tlab_vector_->push_back(tlab);
74 return tlab;
75 }
76
CreateMarkBitmap()77 MarkBitmap *Region::CreateMarkBitmap()
78 {
79 ASSERT(mark_bitmap_ == nullptr);
80 auto allocator = GetInternalAllocator();
81 auto bitmap_data = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(Size()));
82 ASSERT(bitmap_data != nullptr);
83 mark_bitmap_ = allocator->New<MarkBitmap>(this, Size(), bitmap_data);
84 ASSERT(mark_bitmap_ != nullptr);
85 mark_bitmap_->ClearAllBits();
86 return mark_bitmap_;
87 }
88
CreateLiveBitmap()89 MarkBitmap *Region::CreateLiveBitmap()
90 {
91 ASSERT(live_bitmap_ == nullptr);
92 auto allocator = GetInternalAllocator();
93 auto bitmap_data = allocator->Alloc(MarkBitmap::GetBitMapSizeInByte(Size()));
94 ASSERT(bitmap_data != nullptr);
95 live_bitmap_ = allocator->New<MarkBitmap>(this, Size(), bitmap_data);
96 ASSERT(live_bitmap_ != nullptr);
97 live_bitmap_->ClearAllBits();
98 return live_bitmap_;
99 }
100
SetMarkBit(ObjectHeader * object)101 void Region::SetMarkBit(ObjectHeader *object)
102 {
103 ASSERT(IsInRange(object));
104 mark_bitmap_->Set(object);
105 }
106
CalcLiveBytes() const107 uint32_t Region::CalcLiveBytes() const
108 {
109 ASSERT(live_bitmap_ != nullptr);
110 uint32_t live_bytes = 0;
111 live_bitmap_->IterateOverMarkedChunks<true>(
112 [&live_bytes](const void *object) { live_bytes += GetAlignedObjectSize(GetObjectSize(object)); });
113 return live_bytes;
114 }
115
CalcMarkBytes() const116 uint32_t Region::CalcMarkBytes() const
117 {
118 ASSERT(mark_bitmap_ != nullptr);
119 uint32_t live_bytes = 0;
120 mark_bitmap_->IterateOverMarkedChunks(
121 [&live_bytes](const void *object) { live_bytes += GetAlignedObjectSize(GetObjectSize(object)); });
122 return live_bytes;
123 }
124
Destroy()125 void Region::Destroy()
126 {
127 auto allocator = GetInternalAllocator();
128 if (rem_set_ != nullptr) {
129 allocator->Delete(rem_set_);
130 rem_set_ = nullptr;
131 }
132 if (tlab_vector_ != nullptr) {
133 for (auto i : *tlab_vector_) {
134 allocator->Delete(i);
135 }
136 allocator->Delete(tlab_vector_);
137 tlab_vector_ = nullptr;
138 }
139 if (live_bitmap_ != nullptr) {
140 allocator->Delete(live_bitmap_->GetBitMap().data());
141 allocator->Delete(live_bitmap_);
142 live_bitmap_ = nullptr;
143 }
144 if (mark_bitmap_ != nullptr) {
145 allocator->Delete(mark_bitmap_->GetBitMap().data());
146 allocator->Delete(mark_bitmap_);
147 mark_bitmap_ = nullptr;
148 }
149 }
150
Init(uintptr_t regions_begin,uintptr_t regions_end)151 void RegionBlock::Init(uintptr_t regions_begin, uintptr_t regions_end)
152 {
153 os::memory::LockHolder lock(lock_);
154 ASSERT(occupied_.Empty());
155 ASSERT(Region::IsAlignment(regions_begin, region_size_));
156 ASSERT((regions_end - regions_begin) % region_size_ == 0);
157 size_t num_regions = (regions_end - regions_begin) / region_size_;
158 if (num_regions > 0) {
159 size_t size = num_regions * sizeof(Region *);
160 auto data = reinterpret_cast<Region **>(allocator_->Alloc(size));
161 memset_s(data, size, 0, size);
162 occupied_ = Span<Region *>(data, num_regions);
163 regions_begin_ = regions_begin;
164 regions_end_ = regions_end;
165 }
166 }
167
AllocRegion()168 Region *RegionBlock::AllocRegion()
169 {
170 os::memory::LockHolder lock(lock_);
171 // TODO(yxr) : find a unused region, improve it
172 for (size_t i = 0; i < occupied_.Size(); ++i) {
173 if (occupied_[i] == nullptr) {
174 auto *region = RegionAt(i);
175 occupied_[i] = region;
176 num_used_regions_++;
177 return region;
178 }
179 }
180 return nullptr;
181 }
182
AllocLargeRegion(size_t large_region_size)183 Region *RegionBlock::AllocLargeRegion(size_t large_region_size)
184 {
185 os::memory::LockHolder lock(lock_);
186 // TODO(yxr) : search continuous unused regions, improve it
187 size_t alloc_region_num = large_region_size / region_size_;
188 size_t left = 0;
189 while (left + alloc_region_num <= occupied_.Size()) {
190 bool found = true;
191 size_t right = left;
192 while (right < left + alloc_region_num) {
193 if (occupied_[right] != nullptr) {
194 found = false;
195 break;
196 }
197 ++right;
198 }
199 if (found) {
200 // mark those regions as 'used'
201 auto *region = RegionAt(left);
202 for (size_t i = 0; i < alloc_region_num; i++) {
203 occupied_[left + i] = region;
204 }
205 num_used_regions_ += alloc_region_num;
206 return region;
207 }
208 // next round
209 left = right + 1;
210 }
211 return nullptr;
212 }
213
FreeRegion(Region * region,bool release_pages)214 void RegionBlock::FreeRegion(Region *region, bool release_pages)
215 {
216 os::memory::LockHolder lock(lock_);
217 size_t region_idx = RegionIndex(region);
218 size_t region_num = region->Size() / region_size_;
219 ASSERT(region_idx + region_num <= occupied_.Size());
220 for (size_t i = 0; i < region_num; i++) {
221 ASSERT(occupied_[region_idx + i] == region);
222 occupied_[region_idx + i] = nullptr;
223 }
224 num_used_regions_ -= region_num;
225 if (release_pages) {
226 os::mem::ReleasePages(ToUintPtr(region), region->End());
227 }
228 }
229
NewRegion(RegionSpace * space,SpaceType space_type,AllocatorType allocator_type,size_t region_size,RegionFlag eden_or_old_or_nonmovable,RegionFlag properties)230 Region *RegionPool::NewRegion(RegionSpace *space, SpaceType space_type, AllocatorType allocator_type,
231 size_t region_size, RegionFlag eden_or_old_or_nonmovable, RegionFlag properties)
232 {
233 // check that the input region_size is aligned
234 ASSERT(region_size % region_size_ == 0);
235 ASSERT(IsYoungRegionFlag(eden_or_old_or_nonmovable) || eden_or_old_or_nonmovable == RegionFlag::IS_OLD ||
236 eden_or_old_or_nonmovable == RegionFlag::IS_NONMOVABLE);
237
238 // TODO(agrebenkin) Remove it as soon as Full gc doesn't rely on having any free regions
239 // Ensure leaving enough space so there's always some free regions in heap which we can use for full gc
240 if (eden_or_old_or_nonmovable == RegionFlag::IS_NONMOVABLE || region_size > region_size_) {
241 constexpr int TWO = 2;
242 if (!spaces_->CanAllocInSpace(false, region_size + spaces_->GetMaxYoungSize() + region_size_ * TWO)) {
243 return nullptr;
244 }
245 }
246
247 if (!spaces_->CanAllocInSpace(IsYoungRegionFlag(eden_or_old_or_nonmovable), region_size)) {
248 return nullptr;
249 }
250
251 // 1.get region from pre-allocated region block(e.g. a big mmaped continuous space)
252 void *region = nullptr;
253 if (block_.GetFreeRegionsNum() > 0) {
254 region = (region_size <= region_size_) ? block_.AllocRegion() : block_.AllocLargeRegion(region_size);
255 }
256 if (region != nullptr) {
257 IsYoungRegionFlag(eden_or_old_or_nonmovable) ? spaces_->IncreaseYoungOccupiedInSharedPool(region_size)
258 : spaces_->IncreaseTenuredOccupiedInSharedPool(region_size);
259 } else if (extend_) { // 2.mmap region directly, this is more flexible for memory usage
260 region = IsYoungRegionFlag(eden_or_old_or_nonmovable)
261 ? spaces_->TryAllocPoolForYoung(region_size, space_type, allocator_type, this).GetMem()
262 : spaces_->TryAllocPoolForTenured(region_size, space_type, allocator_type, this).GetMem();
263 }
264
265 if (UNLIKELY(region == nullptr)) {
266 return nullptr;
267 }
268
269 ASSERT(Region::IsAlignment(ToUintPtr(region), region_size_));
270
271 ASAN_UNPOISON_MEMORY_REGION(region, Region::HeadSize());
272 auto *ret = new (region) Region(space, ToUintPtr(region) + Region::HeadSize(), ToUintPtr(region) + region_size);
273 // TODO(dtrubenkov): remove this fast fixup
274 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN();
275 ret->AddFlag(eden_or_old_or_nonmovable);
276 ret->AddFlag(properties);
277 ret->CreateRemSet();
278 ret->CreateMarkBitmap();
279 if (!IsYoungRegionFlag(eden_or_old_or_nonmovable)) {
280 ret->CreateLiveBitmap();
281 }
282 TSAN_ANNOTATE_IGNORE_WRITES_END();
283 return ret;
284 }
285
FreeRegion(Region * region,bool release_pages)286 void RegionPool::FreeRegion(Region *region, bool release_pages)
287 {
288 if (block_.IsAddrInRange(region)) {
289 region->IsYoung() ? spaces_->ReduceYoungOccupiedInSharedPool(region->Size())
290 : spaces_->ReduceTenuredOccupiedInSharedPool(region->Size());
291 block_.FreeRegion(region, release_pages);
292 } else {
293 region->IsYoung() ? spaces_->FreeYoungPool(region, region->Size())
294 : spaces_->FreeTenuredPool(region, region->Size());
295 }
296 }
297
PromoteYoungRegion(Region * region)298 void RegionPool::PromoteYoungRegion(Region *region)
299 {
300 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
301 if (block_.IsAddrInRange(region)) {
302 spaces_->ReduceYoungOccupiedInSharedPool(region->Size());
303 spaces_->IncreaseTenuredOccupiedInSharedPool(region->Size());
304 } else {
305 spaces_->PromoteYoungPool(region->Size());
306 }
307 // Change region type
308 region->AddFlag(RegionFlag::IS_PROMOTED);
309 region->RmvFlag(RegionFlag::IS_EDEN);
310 region->AddFlag(RegionFlag::IS_OLD);
311 }
312
HaveTenuredSize(size_t size) const313 bool RegionPool::HaveTenuredSize(size_t size) const
314 {
315 return spaces_->CanAllocInSpace(GenerationalSpaces::IS_TENURED_SPACE, size);
316 }
317
HaveFreeRegions(size_t num_regions,size_t region_size) const318 bool RegionPool::HaveFreeRegions(size_t num_regions, size_t region_size) const
319 {
320 if (block_.GetFreeRegionsNum() >= num_regions) {
321 return true;
322 }
323 num_regions -= block_.GetFreeRegionsNum();
324 return PoolManager::GetMmapMemPool()->HaveEnoughPoolsInObjectSpace(num_regions, region_size);
325 }
326
NewRegion(size_t region_size,RegionFlag eden_or_old_or_nonmovable,RegionFlag properties)327 Region *RegionSpace::NewRegion(size_t region_size, RegionFlag eden_or_old_or_nonmovable, RegionFlag properties)
328 {
329 auto *region =
330 region_pool_->NewRegion(this, space_type_, allocator_type_, region_size, eden_or_old_or_nonmovable, properties);
331 if (UNLIKELY(region == nullptr)) {
332 return nullptr;
333 }
334 ASAN_POISON_MEMORY_REGION(ToVoidPtr(region->Begin()), region->End() - region->Begin());
335 regions_.push_back(region->AsListNode());
336 return region;
337 }
338
FreeRegion(Region * region)339 void RegionSpace::FreeRegion(Region *region)
340 {
341 ASSERT(region->GetSpace() == this);
342 ASAN_POISON_MEMORY_REGION(ToVoidPtr(region->Begin()), region->End() - region->Begin());
343 regions_.erase(region->AsListNode());
344 DestroyRegion(region);
345 }
346
PromoteYoungRegion(Region * region)347 void RegionSpace::PromoteYoungRegion(Region *region)
348 {
349 ASSERT(region->GetSpace() == this);
350 ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
351 region_pool_->PromoteYoungRegion(region);
352 }
353
FreeAllRegions()354 void RegionSpace::FreeAllRegions()
355 {
356 // delete all regions
357 IterateRegions([this](Region *region) { FreeRegion(region); });
358 }
359
360 } // namespace panda::mem
361