1 /*
2 * Copyright (c) 2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMMON_COMPONENTS_HEAP_ALLOCATOR_REGION_INFO_H
17 #define COMMON_COMPONENTS_HEAP_ALLOCATOR_REGION_INFO_H
18
19 #include <cstdint>
20 #include <cstddef>
21 #include <list>
22 #include <map>
23 #include <set>
24 #include <thread>
25 #include <vector>
26 #include "macros.h"
27 #ifdef _WIN64
28 #include <memoryapi.h>
29 #include <errhandlingapi.h>
30 #include <handleapi.h>
31 #else
32 #include <sys/mman.h>
33 #endif
34 #include "common_components/base/globals.h"
35 #include "common_components/base/mem_utils.h"
36 #include "common_components/base/rw_lock.h"
37 #include "common_components/heap/collector/copy_data_manager.h"
38 #include "common_components/heap/collector/gc_infos.h"
39 #include "common_components/heap/collector/region_bitmap.h"
40 #include "common_components/heap/collector/region_rset.h"
41 #include "common_components/log/log.h"
42 #include "common_components/platform/map.h"
43 #include "securec.h"
44 #ifdef COMMON_ASAN_SUPPORT
45 #include "common_components/sanitizer/sanitizer_interface.h"
46 #endif
47
48 namespace common {
49 template<typename T>
50 class BitFields {
51 public:
52 // pos: the position where the bit locates. It starts from 0.
53 // bitLen: the length that is to be read.
AtomicGetValue(size_t pos,size_t bitLen)54 T AtomicGetValue(size_t pos, size_t bitLen) const
55 {
56 T value = __atomic_load_n(&fieldVal_, __ATOMIC_ACQUIRE);
57 T bitMask = ((1 << bitLen) - 1) << pos;
58 return (value & bitMask) >> pos;
59 }
60
AtomicSetValue(size_t pos,size_t bitLen,T newValue)61 void AtomicSetValue(size_t pos, size_t bitLen, T newValue)
62 {
63 do {
64 T oldValue = fieldVal_;
65 T bitMask = ((1 << bitLen) - 1) << pos;
66 T unchangedBitMask = ~bitMask;
67 T newFieldValue = ((newValue << pos) & bitMask) | (oldValue & unchangedBitMask);
68 if (__atomic_compare_exchange_n(&fieldVal_, &oldValue, newFieldValue, false, __ATOMIC_ACQ_REL,
69 __ATOMIC_ACQUIRE)) {
70 return;
71 }
72 } while (true);
73 }
74
75 private:
76 T fieldVal_;
77 };
78 // this class is the metadata of region, it contains all the information needed to manage its corresponding memory.
79 // Region memory is composed of several Units, described by UnitInfo.
80 // sizeof(RegionDesc) must be equal to sizeof(UnitInfo). We rely on this fact to calculate region-related address.
81
82 // region info is stored in the metadata of its primary unit (i.e. the first unit).
83 class RegionDesc {
84 public:
85 enum RouteState : uint8_t {
86 INITIAL = 0,
87 COPYABLE,
88 ROUTING,
89 ROUTED,
90 COMPACTED,
91 COPYED,
92 };
93
94 // default common region unit size.
95 static constexpr size_t UNIT_SIZE = 256 * KB;
96
97 // threshold for object to unique a region
98 static constexpr size_t LARGE_OBJECT_DEFAULT_THRESHOLD = UNIT_SIZE * 2 / 3;
99
100 // release a large object when the size is greater than 4096KB.
101 static constexpr size_t LARGE_OBJECT_RELEASE_THRESHOLD = 4096 * KB;
102
103 static constexpr size_t DEFAULT_REGION_UNIT_MASK = RegionDesc::UNIT_SIZE - 1;
104
RegionDesc()105 RegionDesc()
106 {
107 metadata.allocPtr = reinterpret_cast<uintptr_t>(nullptr);
108 metadata.markingLine = std::numeric_limits<uintptr_t>::max();
109 metadata.forwardLine = std::numeric_limits<uintptr_t>::max();
110 metadata.freeSlot = nullptr;
111 metadata.regionBase = reinterpret_cast<uintptr_t>(nullptr);
112 metadata.regionStart = reinterpret_cast<uintptr_t>(nullptr);
113 metadata.regionEnd = reinterpret_cast<uintptr_t>(nullptr);
114 metadata.regionRSet = nullptr;
115 }
NullRegion()116 static inline RegionDesc* NullRegion()
117 {
118 static RegionDesc nullRegion;
119 return &nullRegion;
120 }
121
SetReadOnly()122 void SetReadOnly()
123 {
124 constexpr int pageProtRead = 1;
125 DLOG(REPORT, "try to set readonly to %p, size is %ld", GetRegionBase(), GetRegionBaseSize());
126 if (PageProtect(reinterpret_cast<void *>(GetRegionBase()), GetRegionBaseSize(), pageProtRead) != 0) {
127 DLOG(REPORT, "set read only fail");
128 }
129 }
130
ClearReadOnly()131 void ClearReadOnly()
132 {
133 constexpr int pageProtReadWrite = 3;
134 DLOG(REPORT, "try to set read & write to %p, size is %ld", GetRegionBase(), GetRegionBaseSize());
135 if (PageProtect(reinterpret_cast<void *>(GetRegionBase()), GetRegionBaseSize(), pageProtReadWrite) != 0) {
136 DLOG(REPORT, "clear read only fail");
137 }
138 }
139
GetMarkBitmap()140 RegionBitmap *GetMarkBitmap()
141 {
142 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.markBitmap_, std::memory_order_acquire);
143 if (reinterpret_cast<HeapAddress>(bitmap) == RegionLiveDesc::TEMPORARY_PTR) {
144 return nullptr;
145 }
146 return bitmap;
147 }
148
GetOrAllocMarkBitmap()149 ALWAYS_INLINE RegionBitmap* GetOrAllocMarkBitmap()
150 {
151 do {
152 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.markBitmap_, std::memory_order_acquire);
153 if (UNLIKELY_CC(reinterpret_cast<uintptr_t>(bitmap) == RegionLiveDesc::TEMPORARY_PTR)) {
154 continue;
155 }
156 if (LIKELY_CC(bitmap != nullptr)) {
157 return bitmap;
158 }
159 RegionBitmap *newValue = reinterpret_cast<RegionBitmap *>(RegionLiveDesc::TEMPORARY_PTR);
160 if (__atomic_compare_exchange_n(&metadata.liveInfo_.markBitmap_, &bitmap, newValue, false,
161 std::memory_order_seq_cst, std::memory_order_relaxed)) {
162 RegionBitmap *allocated =
163 HeapBitmapManager::GetHeapBitmapManager().AllocateRegionBitmap(GetRegionBaseSize());
164 __atomic_store_n(&metadata.liveInfo_.markBitmap_, allocated, std::memory_order_release);
165 DLOG(REGION, "region %p(base=%#zx)@%#zx liveinfo %p alloc markbitmap %p",
166 this, GetRegionBase(), GetRegionStart(), &metadata.liveInfo_, metadata.liveInfo_.markBitmap_);
167 return allocated;
168 }
169 } while (true);
170
171 return nullptr;
172 }
173
GetResurrectBitmap()174 RegionBitmap *GetResurrectBitmap()
175 {
176 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.resurrectBitmap_, std::memory_order_acquire);
177 if (reinterpret_cast<HeapAddress>(bitmap) == RegionLiveDesc::TEMPORARY_PTR) {
178 return nullptr;
179 }
180 return bitmap;
181 }
182
GetOrAllocResurrectBitmap()183 RegionBitmap *GetOrAllocResurrectBitmap()
184 {
185 do {
186 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.resurrectBitmap_, std::memory_order_acquire);
187 if (UNLIKELY_CC(reinterpret_cast<uintptr_t>(bitmap) == RegionLiveDesc::TEMPORARY_PTR)) {
188 continue;
189 }
190 if (LIKELY_CC(bitmap != nullptr)) {
191 return bitmap;
192 }
193 RegionBitmap *newValue = reinterpret_cast<RegionBitmap *>(RegionLiveDesc::TEMPORARY_PTR);
194 if (__atomic_compare_exchange_n(&metadata.liveInfo_.resurrectBitmap_, &bitmap, newValue, false,
195 std::memory_order_seq_cst, std::memory_order_relaxed)) {
196 RegionBitmap *allocated =
197 HeapBitmapManager::GetHeapBitmapManager().AllocateRegionBitmap(GetRegionBaseSize());
198 __atomic_store_n(&metadata.liveInfo_.resurrectBitmap_, allocated, std::memory_order_release);
199 DLOG(REGION, "region %p(base=%#zx)@%#zx liveinfo %p alloc resurrectbitmap %p",
200 this, GetRegionBase(), GetRegionStart(), &metadata.liveInfo_,
201 metadata.liveInfo_.resurrectBitmap_);
202 return allocated;
203 }
204 } while (true);
205
206 return nullptr;
207 }
208
GetEnqueueBitmap()209 RegionBitmap* GetEnqueueBitmap()
210 {
211 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.enqueueBitmap_, std::memory_order_acquire);
212 if (reinterpret_cast<HeapAddress>(bitmap) == RegionLiveDesc::TEMPORARY_PTR) {
213 return nullptr;
214 }
215 return bitmap;
216 }
217
GetOrAllocEnqueueBitmap()218 RegionBitmap* GetOrAllocEnqueueBitmap()
219 {
220 do {
221 RegionBitmap *bitmap = __atomic_load_n(&metadata.liveInfo_.enqueueBitmap_, std::memory_order_acquire);
222 if (UNLIKELY_CC(reinterpret_cast<uintptr_t>(bitmap) == RegionLiveDesc::TEMPORARY_PTR)) {
223 continue;
224 }
225 if (LIKELY_CC(bitmap != nullptr)) {
226 return bitmap;
227 }
228 RegionBitmap* newValue = reinterpret_cast<RegionBitmap *>(RegionLiveDesc::TEMPORARY_PTR);
229 if (__atomic_compare_exchange_n(&metadata.liveInfo_.enqueueBitmap_, &bitmap, newValue, false,
230 std::memory_order_seq_cst, std::memory_order_relaxed)) {
231 RegionBitmap *allocated =
232 HeapBitmapManager::GetHeapBitmapManager().AllocateRegionBitmap(GetRegionBaseSize());
233 __atomic_store_n(&metadata.liveInfo_.enqueueBitmap_, allocated, std::memory_order_release);
234 DLOG(REGION, "region %p(base=%#zx)@%#zx liveinfo %p alloc enqueuebitmap %p",
235 this, GetRegionBase(), GetRegionStart(), &metadata.liveInfo_, metadata.liveInfo_.enqueueBitmap_);
236 return allocated;
237 }
238 } while (true);
239
240 return nullptr;
241 }
242
ResetMarkBit()243 void ResetMarkBit()
244 {
245 SetMarkedRegionFlag(0);
246 SetEnqueuedRegionFlag(0);
247 SetResurrectedRegionFlag(0);
248 }
249
MarkObjectForLargeRegion(const BaseObject * obj)250 NO_INLINE bool MarkObjectForLargeRegion(const BaseObject* obj)
251 {
252 if (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_MARKED_REGION, 1) != 1) {
253 SetMarkedRegionFlag(1);
254 return false;
255 }
256 return true;
257 }
MarkObject(const BaseObject * obj)258 ALWAYS_INLINE bool MarkObject(const BaseObject* obj)
259 {
260 if (IsLargeRegion()) {
261 return MarkObjectForLargeRegion(obj);
262 }
263 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
264 bool marked = GetOrAllocMarkBitmap()->MarkBits(offset);
265 DCHECK_CC(IsMarkedObject(obj));
266 return marked;
267 }
268
ResurrentObject(const BaseObject * obj)269 bool ResurrentObject(const BaseObject* obj)
270 {
271 if (IsLargeRegion()) {
272 if (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_RESURRECTED_REGION, 1) != 1) {
273 SetResurrectedRegionFlag(1);
274 return false;
275 }
276 return true;
277 }
278 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
279 bool marked = GetOrAllocResurrectBitmap()->MarkBits(offset);
280 CHECK_CC(IsResurrectedObject(obj));
281 return marked;
282 }
283
EnqueueObject(const BaseObject * obj)284 bool EnqueueObject(const BaseObject* obj)
285 {
286 if (IsLargeRegion()) {
287 if (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_ENQUEUED_REGION, 1) != 1) {
288 SetEnqueuedRegionFlag(1);
289 return false;
290 }
291 return true;
292 }
293 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
294 bool marked = GetOrAllocEnqueueBitmap()->MarkBits(offset);
295 CHECK_CC(IsEnqueuedObject(obj));
296 return marked;
297 }
298
IsResurrectedObject(const BaseObject * obj)299 bool IsResurrectedObject(const BaseObject* obj)
300 {
301 if (IsLargeRegion()) {
302 return (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_RESURRECTED_REGION, 1) == 1);
303 }
304 RegionBitmap* resurrectBitmap = GetResurrectBitmap();
305 if (resurrectBitmap == nullptr) {
306 return false;
307 }
308 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
309 return resurrectBitmap->IsMarked(offset);
310 }
311
IsMarkedObject(const BaseObject * obj)312 bool IsMarkedObject(const BaseObject* obj)
313 {
314 if (IsLargeRegion()) {
315 return (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_MARKED_REGION, 1) == 1);
316 }
317 RegionBitmap* markBitmap = GetMarkBitmap();
318 if (markBitmap == nullptr) {
319 return false;
320 }
321 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
322 return markBitmap->IsMarked(offset);
323 }
324
IsEnqueuedObject(const BaseObject * obj)325 bool IsEnqueuedObject(const BaseObject* obj)
326 {
327 if (IsLargeRegion()) {
328 return (metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_ENQUEUED_REGION, 1) == 1);
329 }
330 RegionBitmap* enqueBitmap = GetEnqueueBitmap();
331 if (enqueBitmap == nullptr) {
332 return false;
333 }
334 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
335 return enqueBitmap->IsMarked(offset);
336 }
337
GetRSet()338 RegionRSet* GetRSet()
339 {
340 return metadata.regionRSet;
341 }
342
ClearRSet()343 void ClearRSet()
344 {
345 metadata.regionRSet->ClearCardTable();
346 }
347
MarkRSetCardTable(BaseObject * obj)348 bool MarkRSetCardTable(BaseObject* obj)
349 {
350 size_t offset = GetAddressOffset(reinterpret_cast<HeapAddress>(obj));
351 return GetRSet()->MarkCardTable(offset);
352 }
353
GetAddressOffset(HeapAddress address)354 ALWAYS_INLINE_CC size_t GetAddressOffset(HeapAddress address)
355 {
356 DCHECK_CC(GetRegionBaseFast() <= address);
357 return (address - GetRegionBaseFast());
358 }
359
360 enum class UnitRole : uint8_t {
361 // for the head unit
362 FREE_UNITS = 0,
363 SMALL_SIZED_UNITS,
364 LARGE_SIZED_UNITS,
365
366 SUBORDINATE_UNIT,
367 };
368
369 // region is and must be one of following types during its whole lifecycle.
370 // one-to-one mapping to region-lists.
371
372 enum class RegionType : uint8_t {
373 FREE_REGION,
374 GARBAGE_REGION,
375
376 // ************************boundary of dead region and alive region**************************
377
378 THREAD_LOCAL_REGION,
379 RECENT_FULL_REGION,
380 FROM_REGION,
381 EXEMPTED_FROM_REGION,
382 LONE_FROM_REGION,
383 TO_REGION,
384 OLD_REGION,
385 THREAD_LOCAL_OLD_REGION,
386
387 // pinned object will not be forwarded by concurrent copying gc.
388 FULL_PINNED_REGION,
389 RECENT_PINNED_REGION,
390 FIXED_PINNED_REGION,
391 FULL_FIXED_PINNED_REGION,
392
393 // region for raw-pointer objects which are exposed to runtime thus can not be moved by any gc.
394 // raw-pointer region becomes pinned region when none of its member objects are used as raw pointer.
395 RAW_POINTER_REGION,
396
397 // allocation context is able and responsible to determine whether it is safe to be collected.
398 TL_RAW_POINTER_REGION,
399
400 RECENT_LARGE_REGION,
401 LARGE_REGION,
402
403 READ_ONLY_REGION,
404 APPSPAWN_REGION,
405
406 END_OF_REGION_TYPE,
407
408 ALIVE_REGION_FIRST = THREAD_LOCAL_REGION,
409 };
410
IsAliveRegionType(RegionType type)411 static bool IsAliveRegionType(RegionType type)
412 {
413 return static_cast<uint8_t>(type) >= static_cast<uint8_t>(RegionType::ALIVE_REGION_FIRST);
414 }
415
IsInRecentSpace(RegionType type)416 static bool IsInRecentSpace(RegionType type)
417 {
418 return type == RegionType::THREAD_LOCAL_REGION || type == RegionType::RECENT_FULL_REGION;
419 }
420
IsInYoungSpaceForWB(RegionType type)421 static bool IsInYoungSpaceForWB(RegionType type)
422 {
423 return type == RegionType::THREAD_LOCAL_REGION || type == RegionType::RECENT_FULL_REGION ||
424 type == RegionType::FROM_REGION;
425 }
426
IsInYoungSpace(RegionType type)427 static bool IsInYoungSpace(RegionType type)
428 {
429 return type == RegionType::THREAD_LOCAL_REGION || type == RegionType::RECENT_FULL_REGION ||
430 type == RegionType::FROM_REGION || type == RegionType::EXEMPTED_FROM_REGION;
431 }
432
IsInFromSpace(RegionType type)433 static bool IsInFromSpace(RegionType type)
434 {
435 return type == RegionType::FROM_REGION || type == RegionType::EXEMPTED_FROM_REGION;
436 }
437
IsInToSpace(RegionType type)438 static bool IsInToSpace(RegionType type)
439 {
440 return type == RegionType::TO_REGION;
441 }
442
IsInOldSpace(RegionType type)443 static bool IsInOldSpace(RegionType type)
444 {
445 return type == RegionType::OLD_REGION;
446 }
447
Initialize(size_t nUnit,uintptr_t regionInfoAddr,uintptr_t heapAddress)448 static void Initialize(size_t nUnit, uintptr_t regionInfoAddr, uintptr_t heapAddress)
449 {
450 UnitInfo::totalUnitCount = nUnit;
451 UnitInfo::unitInfoStart = regionInfoAddr;
452 UnitInfo::heapStartAddress = heapAddress;
453 }
454
GetRegionDesc(uint32_t idx)455 static RegionDesc* GetRegionDesc(uint32_t idx)
456 {
457 UnitInfo* unit = RegionDesc::UnitInfo::GetUnitInfo(idx);
458 DCHECK_CC((reinterpret_cast<uintptr_t>(unit) % 8) == 0); // 8: Align with 8
459 DCHECK_CC(static_cast<UnitRole>(unit->GetMetadata().unitRole) != UnitRole::SUBORDINATE_UNIT);
460 return reinterpret_cast<RegionDesc*>(unit);
461 }
462
GetRegionDescAt(uintptr_t allocAddr)463 static RegionDesc* GetRegionDescAt(uintptr_t allocAddr)
464 {
465 ASSERT_LOGF(Heap::IsHeapAddress(allocAddr), "Cannot get region info of a non-heap object");
466 UnitInfo* unit = reinterpret_cast<UnitInfo*>(UnitInfo::heapStartAddress -
467 (((allocAddr - UnitInfo::heapStartAddress) / UNIT_SIZE) + 1) *
468 sizeof(RegionDesc));
469 DCHECK_CC((reinterpret_cast<uintptr_t>(unit) % 8) == 0); // 8: Align with 8
470 DCHECK_CC(static_cast<UnitRole>(unit->GetMetadata().unitRole) != UnitRole::SUBORDINATE_UNIT);
471 return reinterpret_cast<RegionDesc*>(unit);
472 }
473
474 // This could only used for surely alive region, such as from interpreter,
475 // because ONLY alive region have `InlinedRegionMetaData`
GetAliveRegionDescAt(uintptr_t allocAddr)476 static RegionDesc* GetAliveRegionDescAt(uintptr_t allocAddr)
477 {
478 // only alive region have `InlinedRegionMetaData`.
479 DCHECK_CC(IsAliveRegionType(GetRegionDescAt(allocAddr)->GetRegionType()));
480 InlinedRegionMetaData *metaData = InlinedRegionMetaData::GetInlinedRegionMetaData(allocAddr);
481 UnitInfo *unit = reinterpret_cast<UnitInfo*>(metaData->regionDesc_);
482 DCHECK_CC(reinterpret_cast<RegionDesc *>(unit) == GetRegionDescAt(allocAddr));
483 DCHECK_CC((reinterpret_cast<uintptr_t>(unit) % 8) == 0); // 8: Align with 8
484 DCHECK_CC(static_cast<UnitRole>(unit->GetMetadata().unitRole) != UnitRole::SUBORDINATE_UNIT);
485 return reinterpret_cast<RegionDesc*>(unit);
486 }
487
InitFreeRegion(size_t unitIdx,size_t nUnit)488 static void InitFreeRegion(size_t unitIdx, size_t nUnit)
489 {
490 RegionDesc* region = reinterpret_cast<RegionDesc*>(RegionDesc::UnitInfo::GetUnitInfo(unitIdx));
491 region->InitRegionDesc(nUnit, UnitRole::FREE_UNITS);
492 }
493
ResetRegion(size_t unitIdx,size_t nUnit,RegionDesc::UnitRole uclass)494 static RegionDesc* ResetRegion(size_t unitIdx, size_t nUnit, RegionDesc::UnitRole uclass)
495 {
496 RegionDesc* region = reinterpret_cast<RegionDesc*>(RegionDesc::UnitInfo::GetUnitInfo(unitIdx));
497 region->ResetRegion(nUnit, uclass);
498 return region;
499 }
500
InitRegion(size_t unitIdx,size_t nUnit,RegionDesc::UnitRole uclass)501 static RegionDesc* InitRegion(size_t unitIdx, size_t nUnit, RegionDesc::UnitRole uclass)
502 {
503 RegionDesc* region = reinterpret_cast<RegionDesc*>(RegionDesc::UnitInfo::GetUnitInfo(unitIdx));
504 region->InitRegion(nUnit, uclass);
505 return region;
506 }
507
InitRegionAt(uintptr_t addr,size_t nUnit,RegionDesc::UnitRole uclass)508 static RegionDesc* InitRegionAt(uintptr_t addr, size_t nUnit, RegionDesc::UnitRole uclass)
509 {
510 size_t idx = RegionDesc::UnitInfo::GetUnitIdxAt(addr);
511 return InitRegion(idx, nUnit, uclass);
512 }
513
GetUnitAddress(size_t unitIdx)514 static HeapAddress GetUnitAddress(size_t unitIdx) { return UnitInfo::GetUnitAddress(unitIdx); }
515
ClearUnits(size_t idx,size_t cnt)516 static void ClearUnits(size_t idx, size_t cnt)
517 {
518 uintptr_t unitAddress = RegionDesc::GetUnitAddress(idx);
519 size_t size = cnt * RegionDesc::UNIT_SIZE;
520 DLOG(REGION, "clear dirty units[%zu+%zu, %zu) @[%#zx+%zu, %#zx)", idx, cnt, idx + cnt, unitAddress, size,
521 RegionDesc::GetUnitAddress(idx + cnt));
522 MemorySet(unitAddress, size, 0, size);
523 }
524
ReleaseUnits(size_t idx,size_t cnt)525 static void ReleaseUnits(size_t idx, size_t cnt)
526 {
527 void* unitAddress = reinterpret_cast<void*>(RegionDesc::GetUnitAddress(idx));
528 size_t size = cnt * RegionDesc::UNIT_SIZE;
529 DLOG(REGION, "release physical memory for units [%zu+%zu, %zu) @[%p+%zu, 0x%zx)", idx, cnt, idx + cnt,
530 unitAddress, size, RegionDesc::GetUnitAddress(idx + cnt));
531 #if defined(_WIN64)
532 LOGE_IF(UNLIKELY_CC(!VirtualFree(unitAddress, size, MEM_DECOMMIT))) <<
533 "VirtualFree failed in ReturnPage, errno: " << GetLastError();
534
535 #elif defined(__APPLE__)
536 MemorySet(reinterpret_cast<uintptr_t>(unitAddress), size, 0, size);
537 (void)madvise(unitAddress, size, MADV_DONTNEED);
538 #else
539 (void)madvise(unitAddress, size, MADV_DONTNEED);
540 #endif
541 #ifdef COMMON_ASAN_SUPPORT
542 Sanitizer::OnHeapMadvise(unitAddress, size);
543 #endif
544 #ifdef USE_HWASAN
545 ASAN_POISON_MEMORY_REGION(unitAddress, size);
546 const uintptr_t pSize = size;
547 LOG_COMMON(DEBUG) << std::hex << "set [" << unitAddress << ", " <<
548 (reinterpret_cast<uintptr_t>(unitAddress) + pSize) << ") poisoned\n";
549 #endif
550 }
551
GetRegionSize()552 size_t GetRegionSize() const
553 {
554 DCHECK_CC(GetRegionEnd() > GetRegionStart());
555 return GetRegionEnd() - GetRegionStart();
556 }
557
GetRegionBaseSize()558 size_t GetRegionBaseSize() const
559 {
560 DCHECK_CC(GetRegionEnd() > GetRegionBase());
561 return GetRegionEnd() - GetRegionBase();
562 }
563
GetUnitCount()564 size_t GetUnitCount() const { return GetRegionBaseSize() / UNIT_SIZE; }
565
GetAvailableSize()566 size_t GetAvailableSize() const
567 {
568 ASSERT_LOGF(IsSmallRegion(), "wrong region type");
569 return GetRegionEnd() - GetRegionAllocPtr();
570 }
571
GetRegionAllocatedSize()572 size_t GetRegionAllocatedSize() const { return GetRegionAllocPtr() - GetRegionStart(); }
573
574 #if defined(GCINFO_DEBUG) && GCINFO_DEBUG
575 void DumpRegionDesc(LogType type) const;
576 const char* GetTypeName() const;
577 #endif
578
579 void VisitAllObjectsWithFixedSize(size_t cellCount, const std::function<void(BaseObject*)>&& func);
580 void VisitAllObjects(const std::function<void(BaseObject*)>&& func);
581 void VisitAllObjectsBeforeCopy(const std::function<void(BaseObject*)>&& func);
582 bool VisitLiveObjectsUntilFalse(const std::function<bool(BaseObject*)>&& func);
583
584 void VisitRememberSetBeforeMarking(const std::function<void(BaseObject*)>& func);
585 void VisitRememberSetBeforeCopy(const std::function<void(BaseObject*)>& func);
586 void VisitRememberSet(const std::function<void(BaseObject*)>& func);
587
588 // reset so that this region can be reused for allocation
InitFreeUnits()589 void InitFreeUnits()
590 {
591 if (metadata.regionRSet != nullptr) {
592 RegionRSet::DestroyRegionRSet(metadata.regionRSet);
593 metadata.regionRSet = nullptr;
594 }
595 size_t nUnit = GetUnitCount();
596 UnitInfo* unit = reinterpret_cast<UnitInfo*>(this) - (nUnit - 1);
597 for (size_t i = 0; i < nUnit; ++i) {
598 unit[i].ToFreeRegion();
599 }
600 }
601
ClearLiveInfo()602 void ClearLiveInfo()
603 {
604 DCHECK_CC(metadata.liveInfo_.relatedRegion_ == this);
605 metadata.liveInfo_.ClearLiveInfo();
606 DLOG(REGION, "region %p(base=%#zx)@%#zx+%zu clear liveinfo %p type %u",
607 this, GetRegionBase(), GetRegionStart(), GetRegionSize(), &metadata.liveInfo_, GetRegionType());
608 metadata.liveByteCount = 0;
609 }
610
611 // These interfaces are used to make sure the writing operations of value in C++ Bit Field will be atomic.
SetUnitRole(UnitRole role)612 void SetUnitRole(UnitRole role)
613 {
614 metadata.unitBits.AtomicSetValue(0, BITS_5, static_cast<uint8_t>(role));
615 }
SetRegionType(RegionType type)616 void SetRegionType(RegionType type)
617 {
618 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_REGION_TYPE,
619 BITS_5, static_cast<uint8_t>(type));
620 if (IsAliveRegionType(type)) {
621 InlinedRegionMetaData::GetInlinedRegionMetaData(this)->SetRegionType(type);
622 }
623 }
624
SetMarkedRegionFlag(uint8_t flag)625 void SetMarkedRegionFlag(uint8_t flag)
626 {
627 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_MARKED_REGION, 1, flag);
628 }
SetEnqueuedRegionFlag(uint8_t flag)629 void SetEnqueuedRegionFlag(uint8_t flag)
630 {
631 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_ENQUEUED_REGION, 1, flag);
632 }
SetResurrectedRegionFlag(uint8_t flag)633 void SetResurrectedRegionFlag(uint8_t flag)
634 {
635 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_RESURRECTED_REGION, 1, flag);
636 }
637
SetRegionCellCount(uint8_t cellCount)638 void SetRegionCellCount(uint8_t cellCount)
639 {
640 // 7: region cell count is 7 bits.
641 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_REGION_CELLCOUNT, 7, cellCount);
642 }
643
GetRegionCellCount()644 uint16_t GetRegionCellCount()
645 {
646 // 7: region cell count is 7 bits.
647 return metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_REGION_CELLCOUNT, 7);
648 }
649
SetJitFortAwaitInstallFlag(uint8_t flag)650 void SetJitFortAwaitInstallFlag(uint8_t flag)
651 {
652 metadata.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_IS_JITFORT_AWAIT_INSTALL, 1, flag);
653 }
654
IsJitFortAwaitInstallFlag()655 bool IsJitFortAwaitInstallFlag()
656 {
657 return metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_IS_JITFORT_AWAIT_INSTALL, 1);
658 }
659
GetRegionType()660 RegionType GetRegionType() const
661 {
662 return static_cast<RegionType>(metadata.regionBits.AtomicGetValue(RegionBitOffset::BIT_OFFSET_REGION_TYPE,
663 BITS_5));
664 }
665
GetAliveRegionType(uintptr_t allocAddr)666 static RegionType GetAliveRegionType(uintptr_t allocAddr)
667 {
668 // only alive region have `InlinedRegionMetaData`.
669 DCHECK_CC(IsAliveRegionType(GetRegionDescAt(allocAddr)->GetRegionType()));
670 InlinedRegionMetaData *metaData = InlinedRegionMetaData::GetInlinedRegionMetaData(allocAddr);
671 return metaData->GetRegionType();
672 }
673
GetUnitRole()674 UnitRole GetUnitRole() const { return static_cast<UnitRole>(metadata.unitRole); }
675
GetUnitIdx()676 size_t GetUnitIdx() const { return RegionDesc::UnitInfo::GetUnitIdx(reinterpret_cast<const UnitInfo*>(this)); }
677
GetRegionBase()678 HeapAddress GetRegionBase() const { return RegionDesc::GetUnitAddress(GetUnitIdx()); }
679
680 // This could only used to a `RegionDesc` which has been initialized
GetRegionBaseFast()681 HeapAddress GetRegionBaseFast() const
682 {
683 ASSERT(metadata.regionBase == GetRegionBase());
684 return metadata.regionBase;
685 }
686
GetRegionStart()687 HeapAddress GetRegionStart() const { return metadata.regionStart; }
688
GetRegionEnd()689 HeapAddress GetRegionEnd() const { return metadata.regionEnd; }
690
SetRegionAllocPtr(HeapAddress addr)691 void SetRegionAllocPtr(HeapAddress addr) { metadata.allocPtr = addr; }
692
GetRegionAllocPtr()693 HeapAddress GetRegionAllocPtr() const { return metadata.allocPtr; }
694
GetMarkingLine()695 HeapAddress GetMarkingLine() const { return metadata.markingLine; }
GetCopyLine()696 HeapAddress GetCopyLine() const { return metadata.forwardLine; }
697
SetMarkingLine()698 void SetMarkingLine()
699 {
700 if (metadata.markingLine == std::numeric_limits<uintptr_t>::max()) {
701 uintptr_t line = GetRegionAllocPtr();
702 metadata.markingLine = line;
703 DLOG(REGION, "set region %p(base=%#zx)@%#zx+%zu marking-line %#zx type %u",
704 this, GetRegionBase(), GetRegionStart(), GetRegionSize(), GetMarkingLine(), GetRegionType());
705 }
706 }
707
SetCopyLine()708 void SetCopyLine()
709 {
710 if (metadata.forwardLine == std::numeric_limits<uintptr_t>::max()) {
711 uintptr_t line = GetRegionAllocPtr();
712 metadata.forwardLine = line;
713 DLOG(REGION, "set region %p(base=%#zx)@%#zx+%zu copy-line %#zx type %u",
714 this, GetRegionBase(), GetRegionStart(), GetRegionSize(), GetCopyLine(), GetRegionType());
715 }
716 }
717
ClearMarkingCopyLine()718 void ClearMarkingCopyLine()
719 {
720 metadata.markingLine = std::numeric_limits<uintptr_t>::max();
721 metadata.forwardLine = std::numeric_limits<uintptr_t>::max();
722 }
723
ClearFreeSlot()724 void ClearFreeSlot()
725 {
726 metadata.freeSlot = nullptr;
727 }
728
IsNewObjectSinceMarking(const BaseObject * obj)729 bool IsNewObjectSinceMarking(const BaseObject* obj)
730 {
731 return GetMarkingLine() <= reinterpret_cast<uintptr_t>(obj);
732 }
733
IsNewObjectSinceForward(const BaseObject * obj)734 bool IsNewObjectSinceForward(const BaseObject* obj)
735 {
736 return GetCopyLine() <= reinterpret_cast<uintptr_t>(obj);
737 }
738
IsNewRegionSinceForward()739 bool IsNewRegionSinceForward() const
740 {
741 return GetCopyLine() == GetRegionStart();
742 }
743
IsInRecentSpace()744 bool IsInRecentSpace() const
745 {
746 RegionType type = GetRegionType();
747 return RegionDesc::IsInRecentSpace(type);
748 }
749
IsInYoungSpace()750 bool IsInYoungSpace() const
751 {
752 RegionType type = GetRegionType();
753 return RegionDesc::IsInYoungSpace(type);
754 }
755
IsInFromSpace()756 bool IsInFromSpace() const
757 {
758 RegionType type = GetRegionType();
759 return RegionDesc::IsInFromSpace(type);
760 }
761
IsInToSpace()762 bool IsInToSpace() const
763 {
764 RegionType type = GetRegionType();
765 return RegionDesc::IsInToSpace(type);
766 }
767
IsInOldSpace()768 bool IsInOldSpace() const
769 {
770 RegionType type = GetRegionType();
771 return RegionDesc::IsInOldSpace(type);
772 }
773
IncRawPointerObjectCount()774 int32_t IncRawPointerObjectCount()
775 {
776 int32_t oldCount = __atomic_fetch_add(&metadata.rawPointerObjectCount, 1, __ATOMIC_SEQ_CST);
777 LOGF_CHECK(oldCount >= 0) << "region " << this << " has wrong raw pointer count " << oldCount;
778 LOGF_CHECK(oldCount < MAX_RAW_POINTER_COUNT) << "inc raw-pointer-count overflow";
779 return oldCount;
780 }
781
DecRawPointerObjectCount()782 int32_t DecRawPointerObjectCount()
783 {
784 int32_t oldCount = __atomic_fetch_sub(&metadata.rawPointerObjectCount, 1, __ATOMIC_SEQ_CST);
785 LOGF_CHECK(oldCount > 0) << "dec raw-pointer-count underflow, please check whether releaseRawData is overused.";
786 return oldCount;
787 }
788
GetRawPointerObjectCount()789 int32_t GetRawPointerObjectCount() const
790 {
791 return __atomic_load_n(&metadata.rawPointerObjectCount, __ATOMIC_SEQ_CST);
792 }
793
CompareAndSwapRawPointerObjectCount(int32_t expectVal,int32_t newVal)794 bool CompareAndSwapRawPointerObjectCount(int32_t expectVal, int32_t newVal)
795 {
796 return __atomic_compare_exchange_n(&metadata.rawPointerObjectCount, &expectVal, newVal, false, __ATOMIC_SEQ_CST,
797 __ATOMIC_ACQUIRE);
798 }
799
Alloc(size_t size)800 uintptr_t Alloc(size_t size)
801 {
802 DCHECK_CC(size > 0);
803 size_t limit = GetRegionEnd();
804 if (metadata.allocPtr + size <= limit) {
805 uintptr_t addr = metadata.allocPtr;
806 metadata.allocPtr += size;
807 return addr;
808 } else {
809 return 0;
810 }
811 }
812
813 // for regions shared by multithreads
AtomicAlloc(size_t size)814 uintptr_t AtomicAlloc(size_t size)
815 {
816 uintptr_t addr = __atomic_fetch_add(&metadata.allocPtr, size, __ATOMIC_ACQ_REL);
817 // should not check allocPtr, because it might be shared
818 if ((addr < GetRegionEnd()) && (size <= GetRegionEnd() - addr)) {
819 return addr;
820 }
821 if (addr <= GetRegionEnd()) {
822 __atomic_store_n(&metadata.allocPtr, addr, __ATOMIC_SEQ_CST);
823 }
824 return 0;
825 }
826
827 // copyable during concurrent copying gc.
IsSmallRegion()828 bool IsSmallRegion() const { return static_cast<UnitRole>(metadata.unitRole) == UnitRole::SMALL_SIZED_UNITS; }
829
IsLargeRegion()830 ALWAYS_INLINE bool IsLargeRegion() const
831 {
832 return static_cast<UnitRole>(metadata.unitRole) == UnitRole::LARGE_SIZED_UNITS;
833 }
834
IsFixedRegion()835 bool IsFixedRegion() const
836 {
837 return (GetRegionType() == RegionType::FIXED_PINNED_REGION) ||
838 (GetRegionType() == RegionType::FULL_FIXED_PINNED_REGION);
839 }
840
IsThreadLocalRegion()841 bool IsThreadLocalRegion() const
842 {
843 return GetRegionType() == RegionType::THREAD_LOCAL_REGION ||
844 GetRegionType() == RegionType::THREAD_LOCAL_OLD_REGION;
845 }
846
IsPinnedRegion()847 bool IsPinnedRegion() const
848 {
849 return (GetRegionType() == RegionType::FULL_PINNED_REGION) ||
850 (GetRegionType() == RegionType::RECENT_PINNED_REGION);
851 }
852
IsReadOnlyRegion()853 bool IsReadOnlyRegion() const
854 {
855 return GetRegionType() == RegionType::READ_ONLY_REGION;
856 }
857
GetPrevRegion()858 RegionDesc* GetPrevRegion() const
859 {
860 if (UNLIKELY_CC(metadata.prevRegionIdx == NULLPTR_IDX)) {
861 return nullptr;
862 }
863 return reinterpret_cast<RegionDesc*>(UnitInfo::GetUnitInfo(metadata.prevRegionIdx));
864 }
865
CollectPinnedGarbage(BaseObject * obj,size_t cellCount)866 bool CollectPinnedGarbage(BaseObject* obj, size_t cellCount)
867 {
868 std::lock_guard<std::mutex> lg(metadata.regionMutex);
869 if (IsFreePinnedObject(obj)) {
870 return false;
871 }
872 size_t size = (cellCount + 1) * sizeof(uint64_t);
873 ObjectSlot* head = reinterpret_cast<ObjectSlot*>(obj);
874 head->SetNext(metadata.freeSlot, size);
875 metadata.freeSlot = head;
876 return true;
877 }
878
GetFreeSlot()879 HeapAddress GetFreeSlot()
880 {
881 if (metadata.freeSlot == nullptr) {
882 return 0;
883 }
884 ObjectSlot* res = metadata.freeSlot;
885 metadata.freeSlot = reinterpret_cast<ObjectSlot*>(res->next_);
886 res->next_ = 0;
887 res->isFree_ = 0;
888 return reinterpret_cast<HeapAddress>(res);
889 }
890
AllocPinnedFromFreeList()891 HeapAddress AllocPinnedFromFreeList()
892 {
893 std::lock_guard<std::mutex> lg(metadata.regionMutex);
894 HeapAddress addr = GetFreeSlot();
895 if (addr == 0) {
896 RegionDesc* region = GetNextRegion();
897 do {
898 if (region == nullptr) {
899 break;
900 }
901 addr = region->GetFreeSlot();
902 region = region->GetNextRegion();
903 } while (addr == 0);
904 }
905 return addr;
906 }
907
IsFreePinnedObject(BaseObject * object)908 bool IsFreePinnedObject(BaseObject* object)
909 {
910 ObjectSlot* slot = reinterpret_cast<ObjectSlot*>(object);
911 return slot->isFree_;
912 }
913
SetPrevRegion(const RegionDesc * r)914 void SetPrevRegion(const RegionDesc* r)
915 {
916 if (UNLIKELY_CC(r == nullptr)) {
917 metadata.prevRegionIdx = NULLPTR_IDX;
918 return;
919 }
920 size_t prevIdx = r->GetUnitIdx();
921 ASSERT_LOGF(prevIdx < NULLPTR_IDX, "exceeds the maxinum limit for region info");
922 metadata.prevRegionIdx = static_cast<uint32_t>(prevIdx);
923 }
924
GetNextRegion()925 RegionDesc* GetNextRegion() const
926 {
927 if (UNLIKELY_CC(metadata.nextRegionIdx == NULLPTR_IDX)) {
928 return nullptr;
929 }
930 DCHECK_CC(metadata.nextRegionIdx < UnitInfo::totalUnitCount);
931 return reinterpret_cast<RegionDesc*>(UnitInfo::GetUnitInfo(metadata.nextRegionIdx));
932 }
933
SetNextRegion(const RegionDesc * r)934 void SetNextRegion(const RegionDesc* r)
935 {
936 if (UNLIKELY_CC(r == nullptr)) {
937 metadata.nextRegionIdx = NULLPTR_IDX;
938 return;
939 }
940 size_t nextIdx = r->GetUnitIdx();
941 ASSERT_LOGF(nextIdx < NULLPTR_IDX, "exceeds the maxinum limit for region info");
942 metadata.nextRegionIdx = static_cast<uint32_t>(nextIdx);
943 }
944
IsFromRegion()945 bool IsFromRegion() const { return GetRegionType() == RegionType::FROM_REGION; }
946
IsAppSpawnRegion()947 bool IsAppSpawnRegion() const { return GetRegionType() == RegionType::APPSPAWN_REGION; }
948
IsUnmovableFromRegion()949 bool IsUnmovableFromRegion() const
950 {
951 return GetRegionType() == RegionType::EXEMPTED_FROM_REGION ||
952 GetRegionType() == RegionType::RAW_POINTER_REGION;
953 }
954
IsToRegion()955 bool IsToRegion() const { return GetRegionType() == RegionType::TO_REGION; }
IsOldRegion()956 bool IsOldRegion() const { return GetRegionType() == RegionType::OLD_REGION; }
957
IsGarbageRegion()958 bool IsGarbageRegion() const { return GetRegionType() == RegionType::GARBAGE_REGION; }
IsFreeRegion()959 bool IsFreeRegion() const { return static_cast<UnitRole>(metadata.unitRole) == UnitRole::FREE_UNITS; }
960
IsValidRegion()961 bool IsValidRegion() const
962 {
963 return static_cast<UnitRole>(metadata.unitRole) == UnitRole::SMALL_SIZED_UNITS ||
964 static_cast<UnitRole>(metadata.unitRole) == UnitRole::LARGE_SIZED_UNITS;
965 }
966
GetLiveByteCount()967 uint32_t GetLiveByteCount() const { return metadata.liveByteCount; }
968
ResetLiveByteCount()969 void ResetLiveByteCount() { metadata.liveByteCount = 0; }
970
AddLiveByteCount(uint32_t count)971 void AddLiveByteCount(uint32_t count)
972 {
973 (void)__atomic_fetch_add(&metadata.liveByteCount, count, __ATOMIC_ACQ_REL);
974 }
975
RemoveFromList()976 void RemoveFromList()
977 {
978 RegionDesc* prev = GetPrevRegion();
979 RegionDesc* next = GetNextRegion();
980 if (prev != nullptr) {
981 prev->SetNextRegion(next);
982 }
983 if (next != nullptr) {
984 next->SetPrevRegion(prev);
985 }
986 this->SetNextRegion(nullptr);
987 this->SetPrevRegion(nullptr);
988 }
989
GetAllocPtrOffset()990 static constexpr size_t GetAllocPtrOffset()
991 {
992 return offsetof(UnitMetadata, allocPtr);
993 }
994
GetRegionEndOffset()995 static constexpr size_t GetRegionEndOffset()
996 {
997 return offsetof(UnitMetadata, regionEnd);
998 }
999
1000 private:
1001 void VisitAllObjectsBefore(const std::function<void(BaseObject*)>&& func, uintptr_t end);
1002
1003 static constexpr int32_t MAX_RAW_POINTER_COUNT = std::numeric_limits<int32_t>::max();
1004 static constexpr int32_t BITS_4 = 4;
1005 static constexpr int32_t BITS_5 = 5;
1006
1007 enum RegionBitOffset : uint8_t {
1008 BIT_OFFSET_REGION_TYPE = 0,
1009 // use mark-bitmap pointer instead
1010 BIT_OFFSET_MARKED_REGION = BITS_5,
1011 BIT_OFFSET_ENQUEUED_REGION = 6,
1012 BIT_OFFSET_RESURRECTED_REGION = 7,
1013 BIT_OFFSET_FIXED_REGION = 8,
1014 BIT_OFFSET_REGION_CELLCOUNT = 9,
1015 BIT_OFFSET_IS_JITFORT_AWAIT_INSTALL = 16,
1016 };
1017
1018 struct ObjectSlot {
1019 HeapAddress next_ : 48;
1020 HeapAddress isFree_ : 1;
1021 HeapAddress padding : 15;
1022
SetNextObjectSlot1023 void SetNext(ObjectSlot* slot, size_t size)
1024 {
1025 next_ = reinterpret_cast<HeapAddress>(slot);
1026 isFree_ = 1;
1027 size_t extraSize = size - sizeof(ObjectSlot);
1028 if (extraSize > 0) {
1029 uintptr_t start = reinterpret_cast<uintptr_t>(this) + sizeof(ObjectSlot);
1030 LOGE_IF((memset_s(reinterpret_cast<void*>(start), extraSize, 0, extraSize) != EOK)) << "memset_s fail";
1031 }
1032 }
1033 };
1034
1035 class RegionLiveDesc {
1036 public:
1037 static constexpr HeapAddress TEMPORARY_PTR = 0x1234;
1038
Init(RegionDesc * region)1039 void Init(RegionDesc *region)
1040 {
1041 relatedRegion_ = region;
1042 ClearLiveInfo();
1043 }
1044
Fini()1045 void Fini()
1046 {
1047 relatedRegion_ = nullptr;
1048 ClearLiveInfo();
1049 }
1050
ClearLiveInfo()1051 void ClearLiveInfo()
1052 {
1053 markBitmap_ = nullptr;
1054 resurrectBitmap_ = nullptr;
1055 enqueueBitmap_ = nullptr;
1056 }
1057 private:
1058 RegionDesc *relatedRegion_ {nullptr};
1059 RegionBitmap *markBitmap_ {nullptr};
1060 RegionBitmap *resurrectBitmap_ {nullptr};
1061 RegionBitmap *enqueueBitmap_ {nullptr};
1062
1063 friend class RegionDesc;
1064 };
1065
1066 struct UnitMetadata {
1067 struct { // basic data for RegionDesc
1068 // for fast allocation, always at the start.
1069 uintptr_t allocPtr;
1070 uintptr_t regionEnd;
1071
1072 // watermark set when gc phase transitions to pre-marking.
1073 uintptr_t markingLine;
1074 uintptr_t forwardLine;
1075 ObjectSlot* freeSlot;
1076 // `regionStart` is the header of the data, and `regionBase` is the header of the total region
1077 /**
1078 * | *********************************Region*******************************|
1079 * | InlinedRegionMetaData | *****************Region data******************|
1080 * ^ ^
1081 * | |
1082 * regionBase regionStart
1083 */
1084 uintptr_t regionStart;
1085 uintptr_t regionBase;
1086
1087 uint32_t nextRegionIdx;
1088 uint32_t prevRegionIdx; // support fast deletion for region list.
1089
1090 uint32_t liveByteCount;
1091 int32_t rawPointerObjectCount;
1092 };
1093
1094 RegionLiveDesc liveInfo_ {};
1095
1096 RegionRSet* regionRSet = nullptr;;
1097
1098 // the writing operation in C++ Bit-Field feature is not atomic, the we wants to
1099 // change the value, we must use specific interface implenmented by BitFields.
1100 union {
1101 struct {
1102 uint8_t unitRole : BITS_5;
1103 };
1104 BitFields<uint8_t> unitBits;
1105 };
1106
1107 // the writing operation in C++ Bit-Field feature is not atomic, the we wants to
1108 // change the value, we must use specific interface implenmented by BitFields.
1109 union {
1110 struct {
1111 RegionType regionType : BITS_5;
1112
1113 // true if this unit belongs to a ghost region, which is an unreal region for keeping reclaimed
1114 // from-region. ghost region is set up to memorize a from-region before from-space is forwarded. this
1115 // flag is cleared when ghost-from-space is cleared. Note this flag is essentially important for
1116 // FindToVersion().
1117 uint8_t isMarked : 1;
1118 uint8_t isEnqueued : 1;
1119 uint8_t isResurrected : 1;
1120 uint8_t isFixed : 1;
1121 uint8_t cellCount : 7;
1122 // Only valid in huge region. To mark the JitFort code await for install.
1123 // An awaiting JitFort does not hold valid data on and no parent reference, but considered as alive.
1124 uint8_t isJitFortAwaitInstall : 1;
1125 };
1126 BitFields<uint32_t> regionBits;
1127 };
1128
1129 std::mutex regionMutex;
1130 };
1131
1132 class UnitInfo {
1133 public:
1134 // propgated from RegionManager
1135 static uintptr_t heapStartAddress; // the address of the first region space to allocate objects
1136 static size_t totalUnitCount;
1137 static uintptr_t unitInfoStart; // the address of the first UnitInfo
1138
1139 constexpr static uint32_t INVALID_IDX = std::numeric_limits<uint32_t>::max();
GetUnitIdxAt(uintptr_t allocAddr)1140 static size_t GetUnitIdxAt(uintptr_t allocAddr)
1141 {
1142 if (heapStartAddress <= allocAddr && allocAddr < (heapStartAddress + totalUnitCount * UNIT_SIZE)) {
1143 return (allocAddr - heapStartAddress) / UNIT_SIZE;
1144 }
1145
1146 LOG_COMMON(FATAL) << "Unresolved fatal";
1147 UNREACHABLE_CC();
1148 }
1149
GetUnitInfoAt(uintptr_t allocAddr)1150 static UnitInfo* GetUnitInfoAt(uintptr_t allocAddr)
1151 {
1152 return GetUnitInfo(GetUnitIdxAt(allocAddr));
1153 }
1154
1155 // the start address for allocation
GetUnitAddress(size_t idx)1156 static HeapAddress GetUnitAddress(size_t idx)
1157 {
1158 CHECK_CC(idx < totalUnitCount);
1159 return heapStartAddress + idx * UNIT_SIZE;
1160 }
1161
GetUnitInfo(size_t idx)1162 static UnitInfo* GetUnitInfo(size_t idx)
1163 {
1164 CHECK_CC(idx < totalUnitCount);
1165 return reinterpret_cast<UnitInfo*>(heapStartAddress - (idx + 1) * sizeof(UnitInfo));
1166 }
1167
GetUnitIdx(const UnitInfo * unit)1168 static size_t GetUnitIdx(const UnitInfo* unit)
1169 {
1170 uintptr_t ptr = reinterpret_cast<uintptr_t>(unit);
1171 DCHECK_CC(unitInfoStart <= ptr && ptr < heapStartAddress);
1172 return ((heapStartAddress - ptr) / sizeof(UnitInfo)) - 1;
1173 }
1174
1175 UnitInfo() = delete;
1176 UnitInfo(const UnitInfo&) = delete;
1177 UnitInfo& operator=(const UnitInfo&) = delete;
1178 ~UnitInfo() = delete;
1179
1180 // These interfaces are used to make sure the writing operations of value in C++ Bit Field will be atomic.
SetUnitRole(UnitRole role)1181 void SetUnitRole(UnitRole role) { metadata_.unitBits.AtomicSetValue(0, BITS_5, static_cast<uint8_t>(role)); }
1182
SetMarkedRegionFlag(uint8_t flag)1183 void SetMarkedRegionFlag(uint8_t flag)
1184 {
1185 metadata_.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_MARKED_REGION, 1, flag);
1186 }
1187
SetEnqueuedRegionFlag(uint8_t flag)1188 void SetEnqueuedRegionFlag(uint8_t flag)
1189 {
1190 metadata_.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_ENQUEUED_REGION, 1, flag);
1191 }
1192
SetResurrectedRegionFlag(uint8_t flag)1193 void SetResurrectedRegionFlag(uint8_t flag)
1194 {
1195 metadata_.regionBits.AtomicSetValue(RegionBitOffset::BIT_OFFSET_RESURRECTED_REGION, 1, flag);
1196 }
1197
ToFreeRegion()1198 void ToFreeRegion() { InitFreeRegion(GetUnitIdx(this), 1); }
1199
ClearUnit()1200 void ClearUnit() { ClearUnits(GetUnitIdx(this), 1); }
1201
ReleaseUnit()1202 void ReleaseUnit() { ReleaseUnits(GetUnitIdx(this), 1); }
1203
GetMetadata()1204 UnitMetadata& GetMetadata() { return metadata_; }
1205
GetUnitRole()1206 UnitRole GetUnitRole() const { return static_cast<UnitRole>(metadata_.unitRole); }
1207
1208 private:
1209 UnitMetadata metadata_;
1210
1211 friend class RegionDesc;
1212 };
1213
1214 public:
1215 // inline copy some data at the begin of the region data, to support fast-path in barrier or smth else.
1216 // NOTE the data consistency between data in header and that in `RegionDesc`.
1217 // this could ONLY used in region that is ALIVE.
1218 class InlinedRegionMetaData {
1219 public:
GetInlinedRegionMetaData(RegionDesc * region)1220 static InlinedRegionMetaData *GetInlinedRegionMetaData(RegionDesc *region)
1221 {
1222 InlinedRegionMetaData *data = GetInlinedRegionMetaData(region->GetRegionStart());
1223 DCHECK_CC(data->regionDesc_ == region);
1224 return data;
1225 }
GetInlinedRegionMetaData(uintptr_t allocAddr)1226 static InlinedRegionMetaData *GetInlinedRegionMetaData(uintptr_t allocAddr)
1227 {
1228 return reinterpret_cast<InlinedRegionMetaData *>(allocAddr & ~DEFAULT_REGION_UNIT_MASK);
1229 }
1230
InlinedRegionMetaData(RegionDesc * regionDesc)1231 explicit InlinedRegionMetaData(RegionDesc *regionDesc)
1232 : regionDesc_(regionDesc), regionRSet_(regionDesc->GetRSet()), regionType_(regionDesc->GetRegionType())
1233 {
1234 // Since this is a backup copy of `RegionDesc`, create rset at first to guarantee data consistency
1235 DCHECK_CC(regionRSet_ != nullptr);
1236 // Not insert to regionList and reset regionType yet
1237 DCHECK_CC(regionType_ == RegionType::FREE_REGION);
1238 DCHECK_CC(regionType_ == regionDesc_->GetRegionType());
1239 }
1240 ~InlinedRegionMetaData() = default;
1241
SetRegionType(RegionType type)1242 void SetRegionType(RegionType type)
1243 {
1244 DCHECK_CC(RegionDesc::IsAliveRegionType(type));
1245 DCHECK_CC(type == regionDesc_->GetRegionType());
1246 regionType_ = type;
1247 }
1248
GetRegionDesc()1249 RegionDesc *GetRegionDesc() const
1250 {
1251 return regionDesc_;
1252 }
1253
GetRegionRSet()1254 RegionRSet *GetRegionRSet() const
1255 {
1256 return regionRSet_;
1257 }
1258
GetRegionType()1259 RegionType GetRegionType() const
1260 {
1261 DCHECK_CC(RegionDesc::IsAliveRegionType(regionType_));
1262 return regionType_;
1263 }
1264
IsInRecentSpace()1265 bool IsInRecentSpace() const
1266 {
1267 RegionType type = GetRegionType();
1268 return RegionDesc::IsInRecentSpace(type);
1269 }
1270
IsInYoungSpace()1271 bool IsInYoungSpace() const
1272 {
1273 RegionType type = GetRegionType();
1274 return RegionDesc::IsInYoungSpace(type);
1275 }
1276
IsInFromSpace()1277 bool IsInFromSpace() const
1278 {
1279 RegionType type = GetRegionType();
1280 return RegionDesc::IsInFromSpace(type);
1281 }
1282
IsInToSpace()1283 bool IsInToSpace() const
1284 {
1285 RegionType type = GetRegionType();
1286 return RegionDesc::IsInToSpace(type);
1287 }
1288
IsInOldSpace()1289 bool IsInOldSpace() const
1290 {
1291 RegionType type = GetRegionType();
1292 return RegionDesc::IsInOldSpace(type);
1293 }
1294
IsFromRegion()1295 bool IsFromRegion() const
1296 {
1297 RegionType type = GetRegionType();
1298 return type == RegionType::FROM_REGION;
1299 }
1300
IsInYoungSpaceForWB()1301 bool IsInYoungSpaceForWB() const
1302 {
1303 RegionType type = GetRegionType();
1304 return RegionDesc::IsInYoungSpaceForWB(type);
1305 }
1306
1307 inline HeapAddress GetRegionStart() const;
1308
GetRegionBase()1309 HeapAddress GetRegionBase() const
1310 {
1311 uintptr_t base = reinterpret_cast<uintptr_t>(this);
1312 ASSERT(base == regionDesc_->GetRegionBaseFast());
1313 return static_cast<HeapAddress>(base);
1314 }
1315
GetAddressOffset(HeapAddress address)1316 size_t GetAddressOffset(HeapAddress address) const
1317 {
1318 DCHECK_CC(GetRegionBase() <= address);
1319 return (address - GetRegionBase());
1320 }
1321
MarkRSetCardTable(BaseObject * obj)1322 bool MarkRSetCardTable(BaseObject *obj)
1323 {
1324 size_t offset = GetAddressOffset(static_cast<HeapAddress>(reinterpret_cast<uintptr_t>(obj)));
1325 return GetRegionRSet()->MarkCardTable(offset);
1326 }
1327 private:
1328 RegionDesc *regionDesc_ {nullptr};
1329 RegionRSet *regionRSet_ {nullptr};
1330 RegionType regionType_ {};
1331 // fixme: inline more
1332
1333 friend class RegionDesc;
1334 };
1335 // should keep as same as the align of BaseObject
1336 static constexpr size_t UNIT_BEGIN_ALIGN = 8;
1337 // default common region unit header size.
1338 static constexpr size_t UNIT_HEADER_SIZE = AlignUp<size_t>(sizeof(InlinedRegionMetaData), UNIT_BEGIN_ALIGN);
1339 // default common region unit available size.
1340 static constexpr size_t UNIT_AVAILABLE_SIZE = UNIT_SIZE - UNIT_HEADER_SIZE;
1341
1342 private:
1343
InitRegionDesc(size_t nUnit,UnitRole uClass)1344 void InitRegionDesc(size_t nUnit, UnitRole uClass)
1345 {
1346 DCHECK_CC(uClass != UnitRole::SUBORDINATE_UNIT);
1347 size_t base = GetRegionBase();
1348 metadata.regionBase = base;
1349 metadata.regionStart = base + RegionDesc::UNIT_HEADER_SIZE;
1350 ASSERT(metadata.regionStart % UNIT_BEGIN_ALIGN == 0);
1351 metadata.allocPtr = GetRegionStart();
1352 metadata.regionEnd = base + nUnit * RegionDesc::UNIT_SIZE;
1353 DCHECK_CC(GetRegionStart() < GetRegionEnd());
1354 metadata.prevRegionIdx = NULLPTR_IDX;
1355 metadata.nextRegionIdx = NULLPTR_IDX;
1356 metadata.liveByteCount = 0;
1357 metadata.freeSlot = nullptr;
1358 SetRegionType(RegionType::FREE_REGION);
1359 SetUnitRole(uClass);
1360 ClearMarkingCopyLine();
1361 SetMarkedRegionFlag(0);
1362 SetEnqueuedRegionFlag(0);
1363 SetResurrectedRegionFlag(0);
1364 __atomic_store_n(&metadata.rawPointerObjectCount, 0, __ATOMIC_SEQ_CST);
1365 #ifdef USE_HWASAN
1366 ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<const volatile void *>(metadata.regionBase),
1367 nUnit * RegionDesc::UNIT_SIZE);
1368 uintptr_t pAddr = metadata.regionBase;
1369 uintptr_t pSize = nUnit * RegionDesc::UNIT_SIZE;
1370 LOG_COMMON(DEBUG) << std::hex << "set [" << pAddr <<
1371 std::hex << ", " << (pAddr + pSize) << ") unpoisoned\n";
1372 #endif
1373 }
1374
ResetRegion(size_t nUnit,UnitRole uClass)1375 void ResetRegion(size_t nUnit, UnitRole uClass)
1376 {
1377 DCHECK_CC(metadata.regionRSet != nullptr);
1378 ClearRSet();
1379 InitRegionDesc(nUnit, uClass);
1380 InitMetaData(nUnit, uClass);
1381 std::atomic_thread_fence(std::memory_order_seq_cst);
1382 }
1383
InitRegion(size_t nUnit,UnitRole uClass)1384 void InitRegion(size_t nUnit, UnitRole uClass)
1385 {
1386 DCHECK_CC(uClass != UnitRole::FREE_UNITS); //fixme: remove `UnitRole::SUBORDINATE_UNIT`
1387 DCHECK_CC(uClass != UnitRole::SUBORDINATE_UNIT); //fixme: remove `UnitRole::SUBORDINATE_UNIT`
1388 InitRegionDesc(nUnit, uClass);
1389 DCHECK_CC(metadata.regionRSet == nullptr);
1390 metadata.regionRSet = RegionRSet::CreateRegionRSet(GetRegionBaseSize());
1391 InitMetaData(nUnit, uClass);
1392 std::atomic_thread_fence(std::memory_order_seq_cst);
1393 }
1394
InitMetaData(size_t nUnit,UnitRole uClass)1395 void InitMetaData(size_t nUnit, UnitRole uClass)
1396 {
1397 metadata.liveInfo_.Init(this);
1398 HeapAddress header = GetRegionBase();
1399 void *ptr = reinterpret_cast<void *>(static_cast<uintptr_t>(header));
1400 new (ptr) InlinedRegionMetaData(this);
1401
1402 // initialize region's subordinate units.
1403 UnitInfo* unit = reinterpret_cast<UnitInfo*>(this) - (nUnit - 1);
1404 for (size_t i = 0; i < nUnit - 1; i++) {
1405 DCHECK_CC(uClass == UnitRole::LARGE_SIZED_UNITS);
1406 unit[i].metadata_.liveInfo_.Fini();
1407 }
1408 }
1409
1410 static constexpr uint32_t NULLPTR_IDX = UnitInfo::INVALID_IDX;
1411 UnitMetadata metadata;
1412 public:
1413 friend constexpr size_t GetMetaDataInRegionOffset();
1414 static constexpr size_t REGION_RSET_IN_INLINED_METADATA_OFFSET = MEMBER_OFFSET(InlinedRegionMetaData, regionRSet_);
1415 static constexpr size_t REGION_TYPE_IN_INLINED_METADATA_OFFSET = MEMBER_OFFSET(InlinedRegionMetaData, regionType_);
1416 };
1417
GetRegionStart()1418 HeapAddress RegionDesc::InlinedRegionMetaData::GetRegionStart() const
1419 {
1420 HeapAddress addr = static_cast<HeapAddress>(reinterpret_cast<uintptr_t>(this) + RegionDesc::UNIT_HEADER_SIZE);
1421 DCHECK_CC(addr == regionDesc_->GetRegionStart());
1422 return addr;
1423 }
1424 } // namespace common
1425
1426 #endif // COMMON_COMPONENTS_HEAP_ALLOCATOR_REGION_INFO_H
1427