1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef ECMASCRIPT_MEM_REGION_H
17 #define ECMASCRIPT_MEM_REGION_H
18
19 #include <type_traits>
20
21 #include "common_components/base/asan_interface.h"
22 #include "ecmascript/base/aligned_struct.h"
23 #include "ecmascript/js_tagged_value.h"
24 #include "ecmascript/mem/free_object_list.h"
25 #include "ecmascript/mem/gc_bitset.h"
26 #include "ecmascript/mem/remembered_set.h"
27 #include "ecmascript/mem/mem_common.h"
28 #include "ecmascript/platform/map.h"
29
30 #include "ecmascript/platform/mutex.h"
31
32 #include "securec.h"
33
34 namespace panda {
35 namespace ecmascript {
36 class JSThread;
37
38 enum RegionSpaceFlag {
39 UNINITIALIZED = 0,
40 // We should avoid using the lower 3 bits (bits 0 to 2).
41 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
42
43 // Bits 3 to 7 are reserved to denote the space where the region is located.
44 IN_YOUNG_SPACE = 0x08,
45 IN_SNAPSHOT_SPACE = 0x09,
46 IN_HUGE_OBJECT_SPACE = 0x0A,
47 IN_OLD_SPACE = 0x0B,
48 IN_NON_MOVABLE_SPACE = 0x0C,
49 IN_MACHINE_CODE_SPACE = 0x0D,
50 IN_READ_ONLY_SPACE = 0X0E,
51 IN_APPSPAWN_SPACE = 0x0F,
52 IN_HUGE_MACHINE_CODE_SPACE = 0x10,
53 IN_SHARED_NON_MOVABLE = 0x11,
54 IN_SHARED_OLD_SPACE = 0x12,
55 IN_SHARED_APPSPAWN_SPACE = 0X13,
56 IN_SHARED_HUGE_OBJECT_SPACE = 0x14,
57 IN_SHARED_READ_ONLY_SPACE = 0x15,
58
59 VALID_SPACE_MASK = 0xFF,
60
61 GENERAL_OLD_BEGIN = IN_SNAPSHOT_SPACE,
62 GENERAL_OLD_END = IN_HUGE_MACHINE_CODE_SPACE,
63 SHARED_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
64 SHARED_SPACE_END = IN_SHARED_READ_ONLY_SPACE,
65 SHARED_SWEEPABLE_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
66 SHARED_SWEEPABLE_SPACE_END = IN_SHARED_HUGE_OBJECT_SPACE,
67
68 HEAP_SPACE_BEGIN = IN_YOUNG_SPACE,
69 HEAP_SPACE_END = IN_SHARED_READ_ONLY_SPACE
70 };
71
72 enum RegionGCFlags {
73 // We should avoid using the lower 3 bits (bits 0 to 2).
74 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
75
76 // Below flags are used for GC, and each flag has a dedicated bit starting from the 3rd bit.
77 NEVER_EVACUATE = 1 << 3,
78 HAS_AGE_MARK = 1 << 4,
79 BELOW_AGE_MARK = 1 << 5,
80 IN_COLLECT_SET = 1 << 6,
81 IN_NEW_TO_NEW_SET = 1 << 7,
82 // Bits 8 to 10 (the lower 3 bits for the next byte) are also excluded for the sake of
83 // INVALID_VALUE in ZAP_MEM.
84 HAS_BEEN_SWEPT = 1 << 11,
85 NEED_RELOCATE = 1 << 12,
86 // ONLY used for heap verification.
87 IN_INACTIVE_SEMI_SPACE = 1 << 13,
88 IN_NEW_TO_OLD_SET = 1 << 14,
89 IN_SHARED_COLLECT_SET = 1 << 15,
90 };
91
92 // Currently only use for region in LinearSpace, to check if the region is allocated during concurrent marking.
93 enum class RegionTypeFlag : uint8_t {
94 DEFAULT = 0,
95 // We should avoid using the lower 3 bits (bits 0 to 2).
96 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
97
98 // Region is allocated before concurrent marking, but some new object may be allocated here
99 // during concurrent marking.
100 HALF_FRESH = 0x08,
101 // Region is allocated during concurrent marking.
102 FRESH = 0x09,
103 };
104
105 enum RSetType {
106 OLD_TO_NEW,
107 LOCAL_TO_SHARE,
108 };
109
110 enum class RSetSwapFlag : uint8_t {
111 // Both LocalToShare and oldToNew are not swapped. It means the bitset in it is available
112 NO_SWAPPED = 0,
113
114 // LocalToShare are swapped. It means the bitset in LocalToShare is unavailable
115 LOCAL_TO_SHARE_SWAPPED_MASK = 0b001,
116
117 // LocalToShare are collected. It means the bitset in LocalToShare is unavailable
118 LOCAL_TO_SHARE_COLLECTED_MASK = 0b010,
119
120 // oldToNew are swapped. It means the bitset in oldToNew is unavailable
121 OLD_TO_NEW_SWAPPED_MASK = 0b100,
122 };
123
ToSpaceTypeName(uint8_t space)124 static inline std::string ToSpaceTypeName(uint8_t space)
125 {
126 switch (space) {
127 case RegionSpaceFlag::IN_YOUNG_SPACE:
128 return "young space";
129 case RegionSpaceFlag::IN_SNAPSHOT_SPACE:
130 return "snapshot space";
131 case RegionSpaceFlag::IN_HUGE_OBJECT_SPACE:
132 return "huge object space";
133 case RegionSpaceFlag::IN_OLD_SPACE:
134 return "old space";
135 case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
136 return "non movable space";
137 case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
138 return "machine code space";
139 case RegionSpaceFlag::IN_READ_ONLY_SPACE:
140 return "read only space";
141 case RegionSpaceFlag::IN_APPSPAWN_SPACE:
142 return "appspawn space";
143 case RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE:
144 return "huge machine code space";
145 case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
146 return "shared non movable space";
147 case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
148 return "shared old space";
149 case RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE:
150 return "shared read only space";
151 case RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE:
152 return "shared huge object space";
153 case RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE:
154 return "shared appspawn space";
155 default:
156 return "invalid space";
157 }
158 }
159
160 // |---------------------------------------------------------------------------------------|
161 // | Region (256 kb) |
162 // |---------------------------------|--------------------------------|--------------------|
163 // | Head (sizeof(Region)) | Mark bitset (4kb) | Data |
164 // |---------------------------------|--------------------------------|--------------------|
165
166 class Region {
167 public:
Region(NativeAreaAllocator * allocator,uintptr_t allocateBase,uintptr_t begin,uintptr_t end,RegionSpaceFlag spaceType,RegionTypeFlag typeFlag)168 Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t begin, uintptr_t end,
169 RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
170 : packedData_(begin, end, spaceType, typeFlag),
171 nativeAreaAllocator_(allocator),
172 allocateBase_(allocateBase),
173 end_(end),
174 highWaterMark_(end),
175 aliveObject_(0),
176 wasted_(0),
177 snapshotData_(0) {}
178
179 // JitFort space is divided into regions (JitForRegion) to enable
180 // reusing free_object_list and free_object_set operations for
181 // JitFort space, and GC marking actually happens in corresponding
182 // MachineCode objects where JitFort space is allocated to. So no
183 // gc mark bits needed in JitFortRegions.
Region(NativeAreaAllocator * allocator,uintptr_t allocateBase,uintptr_t end,RegionSpaceFlag spaceType)184 Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t end,
185 RegionSpaceFlag spaceType)
186 : packedData_(allocateBase, spaceType), // no markGCBitset_ for JitFort
187 nativeAreaAllocator_(allocator),
188 allocateBase_(allocateBase),
189 end_(end),
190 highWaterMark_(end),
191 aliveObject_(0),
192 wasted_(0),
193 snapshotData_(0) {}
194
195 ~Region() = default;
196
197 NO_COPY_SEMANTIC(Region);
198 NO_MOVE_SEMANTIC(Region);
199
200 enum RegionSpaceKind { InYoung, InGeneralOld, Other };
201
202 template <RegionSpaceKind kind>
203 class Updater final {
204 public:
Updater(uintptr_t updateAddress,Region & region)205 Updater(uintptr_t updateAddress, Region& region)
206 : bitsetUpdater_(updateAddress),
207 region_(region)
208 {
209 }
210
211 NO_COPY_SEMANTIC(Updater);
212
~Updater()213 ARK_INLINE ~Updater()
214 {
215 Flush();
216 }
217
UpdateLocalToShare()218 ARK_INLINE void UpdateLocalToShare()
219 {
220 bitsetUpdater_.Update(LocalToShareIdx);
221 }
222
223 template <RegionSpaceKind T = kind, std::enable_if_t<T == InGeneralOld, int> = 0>
UpdateOldToNew()224 ARK_INLINE void UpdateOldToNew()
225 {
226 bitsetUpdater_.Update(OldToNewIdx);
227 }
228
Next()229 ARK_INLINE void Next()
230 {
231 if (bitsetUpdater_.Next()) {
232 Flush();
233 }
234 }
235
236 private:
237 ARK_INLINE void Consume(size_t idx, uintptr_t updateAddress, uint32_t mask);
238
239 ARK_INLINE void Flush();
240
CalculateBitSetNum()241 static constexpr size_t CalculateBitSetNum()
242 {
243 constexpr size_t InYoungBitSetNum = 2;
244 constexpr size_t InGeneralOldBitSetNum = 2;
245 constexpr size_t OtherBitSetNum = 1;
246 switch (kind) {
247 case InYoung:
248 return InYoungBitSetNum;
249 case InGeneralOld:
250 return InGeneralOldBitSetNum;
251 case Other:
252 return OtherBitSetNum;
253 }
254 return 0;
255 }
256
257 static constexpr size_t BitSetNum = CalculateBitSetNum();
258 static constexpr size_t LocalToShareIdx = 0;
259 static constexpr size_t OldToNewIdx = 1;
260 GCBitSetUpdater<BitSetNum> bitsetUpdater_;
261 Region& region_;
262 };
263
Initialize()264 void Initialize()
265 {
266 lock_ = new Mutex();
267 if (InSparseSpace()) {
268 InitializeFreeObjectSets();
269 }
270 }
271
LinkNext(Region * next)272 void LinkNext(Region *next)
273 {
274 next_ = next;
275 }
276
GetNext()277 Region *GetNext() const
278 {
279 return next_;
280 }
281
LinkPrev(Region * prev)282 void LinkPrev(Region *prev)
283 {
284 prev_ = prev;
285 }
286
GetPrev()287 Region *GetPrev() const
288 {
289 return prev_;
290 }
291
GetBegin()292 uintptr_t GetBegin() const
293 {
294 return packedData_.begin_;
295 }
296
GetEnd()297 uintptr_t GetEnd() const
298 {
299 return end_;
300 }
301
GetHighWaterMark()302 uintptr_t GetHighWaterMark() const
303 {
304 return highWaterMark_;
305 }
306
GetCapacity()307 size_t GetCapacity() const
308 {
309 return end_ - allocateBase_;
310 }
311
GetSize()312 size_t GetSize() const
313 {
314 return end_ - packedData_.begin_;
315 }
316
IsGCFlagSet(RegionGCFlags flag)317 bool IsGCFlagSet(RegionGCFlags flag) const
318 {
319 return (packedData_.flags_.gcFlags_ & flag) == flag;
320 }
321
SetGCFlag(RegionGCFlags flag)322 void SetGCFlag(RegionGCFlags flag)
323 {
324 packedData_.flags_.gcFlags_ |= flag;
325 }
326
ClearGCFlag(RegionGCFlags flag)327 void ClearGCFlag(RegionGCFlags flag)
328 {
329 // NOLINTNEXTLINE(hicpp-signed-bitwise)
330 packedData_.flags_.gcFlags_ &= ~flag;
331 }
332
GetSpaceTypeName()333 std::string GetSpaceTypeName()
334 {
335 return ToSpaceTypeName(packedData_.flags_.spaceFlag_);
336 }
337
GetSpaceType()338 uint8_t GetSpaceType() const
339 {
340 return packedData_.flags_.spaceFlag_;
341 }
342
343 // Mark bitset
344 GCBitset *GetMarkGCBitset() const;
345 bool AtomicMark(void *address);
346 // Objects in fresh region should only mark in JS Thread.
347 bool NonAtomicMark(void *address);
348 void ClearMark(void *address);
349 bool Test(void *addr) const;
350 bool Test(uintptr_t addr) const;
351 // ONLY used for heap verification.
352 bool TestOldToNew(uintptr_t addr);
353 bool TestLocalToShare(uintptr_t addr);
354 template <typename Visitor>
355 void IterateAllMarkedBits(Visitor &&visitor) const;
356 void ClearMarkGCBitset();
357 // local to share remembered set
358 bool HasLocalToShareRememberedSet() const;
359 RememberedSet *CollectLocalToShareRSet();
360 void InsertLocalToShareRSet(uintptr_t addr);
361 template<RegionSpaceKind kind>
362 Updater<kind> GetBatchRSetUpdater(uintptr_t addr);
363 void AtomicInsertLocalToShareRSet(uintptr_t addr);
364 void ClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
365 void AtomicClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
366 void AtomicClearSweepingLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
367 template <typename Visitor>
368 void IterateAllLocalToShareBits(Visitor visitor);
369 void DeleteLocalToShareRSet();
370 void DeleteSweepingLocalToShareRSet();
371 // Cross region remembered set
372 void InsertCrossRegionRSet(uintptr_t addr);
373 void AtomicInsertCrossRegionRSet(uintptr_t addr);
374 template <typename Visitor>
375 void IterateAllCrossRegionBits(Visitor visitor) const;
376 void ClearCrossRegionRSet();
377 void ClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
378 void AtomicClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
379 void DeleteCrossRegionRSet();
380 // Old to new remembered set
381 void InsertOldToNewRSet(uintptr_t addr);
382 void ClearOldToNewRSet(uintptr_t addr);
383
384 template <typename Visitor>
385 void IterateAllOldToNewBits(Visitor visitor);
386 void ClearOldToNewRSet();
387 void ClearOldToNewRSetInRange(uintptr_t start, uintptr_t end);
388 void DeleteOldToNewRSet();
389
390 void AtomicClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
391 void ClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
392 void DeleteSweepingOldToNewRSet();
393 template <typename Visitor>
394 void AtomicIterateAllSweepingRSetBits(Visitor visitor);
395 template <typename Visitor>
396 void IterateAllSweepingRSetBits(Visitor visitor);
397
ObjectAddressToRange(BaseObject * obj)398 static Region *ObjectAddressToRange(BaseObject *obj)
399 {
400 return reinterpret_cast<Region *>(ToUintPtr(obj) & ~DEFAULT_REGION_MASK);
401 }
402
ObjectAddressToRange(TaggedObject * obj)403 static Region *ObjectAddressToRange(TaggedObject *obj)
404 {
405 return reinterpret_cast<Region *>(ToUintPtr(obj) & ~DEFAULT_REGION_MASK);
406 }
407
ObjectAddressToRange(uintptr_t objAddress)408 static Region *ObjectAddressToRange(uintptr_t objAddress)
409 {
410 return reinterpret_cast<Region *>(objAddress & ~DEFAULT_REGION_MASK);
411 }
412
GetRegionAvailableSize()413 static size_t GetRegionAvailableSize()
414 {
415 size_t regionHeaderSize = AlignUp(sizeof(Region), static_cast<size_t>(MemAlignment::MEM_ALIGN_REGION));
416 size_t bitsetSize = GCBitset::SizeOfGCBitset(DEFAULT_REGION_SIZE - regionHeaderSize);
417 return DEFAULT_REGION_SIZE - regionHeaderSize - bitsetSize;
418 }
419
ClearMembers()420 void ClearMembers()
421 {
422 if (lock_ != nullptr) {
423 delete lock_;
424 lock_ = nullptr;
425 }
426 }
427
Invalidate()428 void Invalidate()
429 {
430 ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void *>(GetBegin()), GetSize());
431 packedData_.flags_.spaceFlag_ = RegionSpaceFlag::UNINITIALIZED;
432 }
433
ResetRegionFlag(RegionSpaceFlag spaceFlag,RegionGCFlags gcFlag)434 void ResetRegionFlag(RegionSpaceFlag spaceFlag, RegionGCFlags gcFlag)
435 {
436 packedData_.flags_.spaceFlag_ = spaceFlag;
437 packedData_.flags_.gcFlags_ = gcFlag;
438 }
439
440 uint8_t GetRegionSpaceFlag();
441 void SetRSetSwapFlag(RSetSwapFlag mask);
442 void ClearRSetSwapFlag(RSetSwapFlag mask);
443
SetRegionSpaceFlag(RegionSpaceFlag flag)444 void SetRegionSpaceFlag(RegionSpaceFlag flag)
445 {
446 packedData_.flags_.spaceFlag_ = flag;
447 }
448
InYoungSpace()449 bool InYoungSpace() const
450 {
451 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_YOUNG_SPACE;
452 }
453
InOldSpace()454 bool InOldSpace() const
455 {
456 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_OLD_SPACE;
457 }
458
InYoungOrOldSpace()459 bool InYoungOrOldSpace() const
460 {
461 return InYoungSpace() || InOldSpace();
462 }
463
InGeneralOldSpace()464 bool InGeneralOldSpace() const
465 {
466 ASSERT(packedData_.flags_.spaceFlag_ != 0);
467 auto flag = packedData_.flags_.spaceFlag_;
468 return flag >= RegionSpaceFlag::GENERAL_OLD_BEGIN && flag <= RegionSpaceFlag::GENERAL_OLD_END;
469 }
470
InHugeObjectSpace()471 bool InHugeObjectSpace() const
472 {
473 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE;
474 }
475
InMachineCodeSpace()476 bool InMachineCodeSpace() const
477 {
478 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_MACHINE_CODE_SPACE;
479 }
480
InHugeMachineCodeSpace()481 bool InHugeMachineCodeSpace() const
482 {
483 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE;
484 }
485
InNonMovableSpace()486 bool InNonMovableSpace() const
487 {
488 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_NON_MOVABLE_SPACE;
489 }
490
InSnapshotSpace()491 bool InSnapshotSpace() const
492 {
493 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SNAPSHOT_SPACE;
494 }
495
InReadOnlySpace()496 bool InReadOnlySpace() const
497 {
498 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_READ_ONLY_SPACE;
499 }
500
InSharedOldSpace()501 bool InSharedOldSpace() const
502 {
503 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_OLD_SPACE;
504 }
505
InSharedNonMovableSpace()506 bool InSharedNonMovableSpace() const
507 {
508 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_NON_MOVABLE;
509 }
510
InSharedHugeObjectSpace()511 bool InSharedHugeObjectSpace() const
512 {
513 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE;
514 }
515
InSharedReadOnlySpace()516 bool InSharedReadOnlySpace() const
517 {
518 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE;
519 }
520
InSharedAppSpawnSpace()521 bool InSharedAppSpawnSpace() const
522 {
523 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE;
524 }
525
InAppSpawnSpace()526 bool InAppSpawnSpace() const
527 {
528 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_APPSPAWN_SPACE;
529 }
530
531 // Not including shared read only space.
InSharedSweepableSpace()532 bool InSharedSweepableSpace() const
533 {
534 auto flag = packedData_.flags_.spaceFlag_;
535 return flag >= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN &&
536 flag <= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_END;
537 }
538
InSharedHeap()539 bool InSharedHeap() const
540 {
541 ASSERT(!g_isEnableCMCGC);
542 auto flag = packedData_.flags_.spaceFlag_;
543 return flag >= RegionSpaceFlag::SHARED_SPACE_BEGIN && flag <= RegionSpaceFlag::SHARED_SPACE_END;
544 }
545
InSparseSpace()546 bool InSparseSpace() const
547 {
548 auto flag = packedData_.flags_.spaceFlag_;
549 switch (flag) {
550 case RegionSpaceFlag::IN_OLD_SPACE:
551 case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
552 case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
553 case RegionSpaceFlag::IN_APPSPAWN_SPACE:
554 case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
555 case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
556 return true;
557 default:
558 return false;
559 }
560 }
561
InHeapSpace()562 bool InHeapSpace() const
563 {
564 uint8_t space = packedData_.flags_.spaceFlag_;
565 return space >= RegionSpaceFlag::HEAP_SPACE_BEGIN && space <= RegionSpaceFlag::HEAP_SPACE_END;
566 }
567
InCollectSet()568 bool InCollectSet() const
569 {
570 return IsGCFlagSet(RegionGCFlags::IN_COLLECT_SET);
571 }
572
InSCollectSet()573 bool InSCollectSet() const
574 {
575 return IsGCFlagSet(RegionGCFlags::IN_SHARED_COLLECT_SET);
576 }
577
InYoungSpaceOrCSet()578 bool InYoungSpaceOrCSet() const
579 {
580 return InYoungSpace() || InCollectSet();
581 }
582
InNewToNewSet()583 bool InNewToNewSet() const
584 {
585 return IsGCFlagSet(RegionGCFlags::IN_NEW_TO_NEW_SET);
586 }
587
InNewToOldSet()588 bool InNewToOldSet() const
589 {
590 return IsGCFlagSet(RegionGCFlags::IN_NEW_TO_OLD_SET);
591 }
592
HasAgeMark()593 bool HasAgeMark() const
594 {
595 return IsGCFlagSet(RegionGCFlags::HAS_AGE_MARK);
596 }
597
BelowAgeMark()598 bool BelowAgeMark() const
599 {
600 return IsGCFlagSet(RegionGCFlags::BELOW_AGE_MARK);
601 }
602
NeedRelocate()603 bool NeedRelocate() const
604 {
605 return IsGCFlagSet(RegionGCFlags::NEED_RELOCATE);
606 }
607
608 // ONLY used for heap verification.
InInactiveSemiSpace()609 bool InInactiveSemiSpace() const
610 {
611 return IsGCFlagSet(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
612 }
613
614 // ONLY used for heap verification.
InActiveSemiSpace()615 bool InActiveSemiSpace() const
616 {
617 return InYoungSpace() && !InInactiveSemiSpace();
618 }
619
GetRegionTypeFlag()620 RegionTypeFlag GetRegionTypeFlag() const
621 {
622 return packedData_.typeFlag_;
623 }
624
SetRegionTypeFlag(RegionTypeFlag typeFlag)625 void SetRegionTypeFlag(RegionTypeFlag typeFlag)
626 {
627 packedData_.typeFlag_ = typeFlag;
628 }
629
ResetRegionTypeFlag()630 void ResetRegionTypeFlag()
631 {
632 SetRegionTypeFlag(RegionTypeFlag::DEFAULT);
633 }
634
IsFreshRegion()635 bool IsFreshRegion() const
636 {
637 return GetRegionTypeFlag() == RegionTypeFlag::FRESH;
638 }
639
IsHalfFreshRegion()640 bool IsHalfFreshRegion() const
641 {
642 return GetRegionTypeFlag() == RegionTypeFlag::HALF_FRESH;
643 }
644
645 // ONLY used for heap verification.
SetInactiveSemiSpace()646 void SetInactiveSemiSpace()
647 {
648 SetGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
649 }
650
651 // ONLY used for heap verification.
ResetInactiveSemiSpace()652 void ResetInactiveSemiSpace()
653 {
654 ClearGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
655 }
656
SetSwept()657 void SetSwept()
658 {
659 SetGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
660 }
661
ResetSwept()662 void ResetSwept()
663 {
664 ClearGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
665 }
666
InRange(uintptr_t address)667 bool InRange(uintptr_t address) const
668 {
669 return address >= packedData_.begin_ && address <= end_;
670 }
671
GetAllocateBase()672 uintptr_t GetAllocateBase() const
673 {
674 return allocateBase_;
675 }
676
677 size_t GetAllocatedBytes(uintptr_t top = 0)
678 {
679 ASSERT(top == 0 || InRange(top));
680 return (top == 0) ? (highWaterMark_ - packedData_.begin_) : (top - packedData_.begin_);
681 }
682
SetHighWaterMark(uintptr_t mark)683 void SetHighWaterMark(uintptr_t mark)
684 {
685 ASSERT(InRange(mark));
686 highWaterMark_ = mark;
687 }
688
SetReadOnlyAndMarked()689 void SetReadOnlyAndMarked()
690 {
691 packedData_.markGCBitset_->SetAllBits(packedData_.bitsetSize_);
692 PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READ);
693 }
694
ClearReadOnly()695 void ClearReadOnly()
696 {
697 PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READWRITE);
698 }
699
InitializeFreeObjectSets()700 void InitializeFreeObjectSets()
701 {
702 FreeObjectSet<FreeObject> **sets = new FreeObjectSet<FreeObject> *[FreeObjectList<FreeObject>::NumberOfSets()];
703 for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
704 sets[i] = new FreeObjectSet<FreeObject>(i);
705 }
706 freeObjectSets_ = Span<FreeObjectSet<FreeObject> *>(sets, FreeObjectList<FreeObject>::NumberOfSets());
707 }
708
DestroyFreeObjectSets()709 void DestroyFreeObjectSets()
710 {
711 for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
712 delete freeObjectSets_[i];
713 freeObjectSets_[i] = nullptr;
714 }
715 delete[] freeObjectSets_.data();
716 }
717
GetFreeObjectSet(SetType type)718 FreeObjectSet<FreeObject> *GetFreeObjectSet(SetType type)
719 {
720 // Thread safe
721 if (freeObjectSets_[type] == nullptr) {
722 freeObjectSets_[type] = new FreeObjectSet<FreeObject>(type);
723 }
724 return freeObjectSets_[type];
725 }
726
727 template<class Callback>
EnumerateFreeObjectSets(Callback cb)728 void EnumerateFreeObjectSets(Callback cb)
729 {
730 for (auto set : freeObjectSets_) {
731 cb(set);
732 }
733 }
734
735 template<class Callback>
REnumerateFreeObjectSets(Callback cb)736 void REnumerateFreeObjectSets(Callback cb)
737 {
738 auto last = freeObjectSets_.crbegin();
739 auto first = freeObjectSets_.crend();
740 for (; last != first; last++) {
741 if (!cb(*last)) {
742 break;
743 }
744 }
745 }
746
IncreaseAliveObject(size_t size)747 void IncreaseAliveObject(size_t size)
748 {
749 aliveObject_.fetch_add(size, std::memory_order_relaxed);
750 }
751
SetRegionAliveSize()752 void SetRegionAliveSize()
753 {
754 gcAliveSize_ = aliveObject_;
755 }
756
ResetAliveObject()757 void ResetAliveObject()
758 {
759 aliveObject_ = 0;
760 }
761
AliveObject()762 size_t AliveObject() const
763 {
764 return aliveObject_.load(std::memory_order_relaxed);
765 }
766
GetGCAliveSize()767 size_t GetGCAliveSize() const
768 {
769 return gcAliveSize_;
770 }
771
MostObjectAlive()772 bool MostObjectAlive() const
773 {
774 return aliveObject_ > MOST_OBJECT_ALIVE_THRESHOLD_PERCENT * GetSize();
775 }
776
BelowCompressThreasholdAlive()777 bool BelowCompressThreasholdAlive() const
778 {
779 return gcAliveSize_ < COMPRESS_THREASHOLD_PERCENT * GetSize();
780 }
781
ResetWasted()782 void ResetWasted()
783 {
784 wasted_ = 0;
785 }
786
IncreaseWasted(uint64_t size)787 void IncreaseWasted(uint64_t size)
788 {
789 wasted_ += size;
790 }
791
GetWastedSize()792 uint64_t GetWastedSize()
793 {
794 return wasted_;
795 }
796
GetSnapshotData()797 uint64_t GetSnapshotData()
798 {
799 return snapshotData_;
800 }
801
SetSnapshotData(uint64_t value)802 void SetSnapshotData(uint64_t value)
803 {
804 snapshotData_ = value;
805 }
806
SwapOldToNewRSetForCS()807 void SwapOldToNewRSetForCS()
808 {
809 sweepingOldToNewRSet_ = packedData_.oldToNewSet_;
810 packedData_.oldToNewSet_ = nullptr;
811 if (sweepingOldToNewRSet_ != nullptr) {
812 SetRSetSwapFlag(RSetSwapFlag::OLD_TO_NEW_SWAPPED_MASK);
813 }
814 }
815
SwapLocalToShareRSetForCS()816 void SwapLocalToShareRSetForCS()
817 {
818 sweepingLocalToShareRSet_ = packedData_.localToShareSet_;
819 packedData_.localToShareSet_ = nullptr;
820 if (sweepingLocalToShareRSet_ != nullptr) {
821 SetRSetSwapFlag(RSetSwapFlag::LOCAL_TO_SHARE_SWAPPED_MASK);
822 }
823 }
824
SetLocalHeap(uintptr_t localHeap)825 void SetLocalHeap(uintptr_t localHeap)
826 {
827 ASSERT(localHeap != (uintptr_t)nullptr);
828 localHeap_ = localHeap;
829 }
830
GetLocalHeap(void)831 uintptr_t GetLocalHeap(void)
832 {
833 return localHeap_;
834 }
835
836 // should call in js-thread
837 void MergeOldToNewRSetForCS();
838 void MergeLocalToShareRSetForCS();
839
840 // should call in daemon-thread, or in js-thread in RUNNING state
841 void MergeLocalToShareRSetForCM(RememberedSet *set);
842
843 struct alignas(JSTaggedValue::TaggedTypeSize()) PackedPtr : public base::AlignedPointer {
844 uint8_t spaceFlag_;
845 uint16_t gcFlags_;
846 };
847
848 struct PackedData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
849 base::AlignedPointer,
850 base::AlignedPointer,
851 base::AlignedPointer,
852 base::AlignedPointer,
853 base::AlignedPointer,
854 base::AlignedPointer,
855 base::AlignedSize,
856 base::AlignedUint8> {
857 enum class Index : size_t {
858 FlagsIndex = 0,
859 TypeFlagIndex,
860 MarkGCBitSetIndex,
861 OldToNewSetIndex,
862 LocalToShareSetIndex,
863 BeginIndex,
864 BitSetSizeIndex,
865 RSetSwapFlagIndex,
866 NumOfMembers
867 };
868
869 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
870
PackedDataPackedData871 inline PackedData(uintptr_t begin, uintptr_t end, RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
872 {
873 flags_.spaceFlag_ = spaceType;
874 flags_.gcFlags_ = 0;
875 typeFlag_ = typeFlag;
876 bitsetSize_ = (spaceType == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE ||
877 spaceType == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE ||
878 spaceType == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE) ?
879 GCBitset::BYTE_PER_WORD : GCBitset::SizeOfGCBitset(end - begin);
880 markGCBitset_ = new (ToVoidPtr(begin)) GCBitset();
881 markGCBitset_->Clear(bitsetSize_);
882 begin_ = AlignUp(begin + bitsetSize_, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
883 // The object region marked with poison until it is allocated if is_asan is true
884 #ifdef ARK_ASAN_ON
885 ASAN_POISON_MEMORY_REGION(reinterpret_cast<void *>(begin_), (end - begin_));
886 #endif
887 }
888
PackedDataPackedData889 inline PackedData(uintptr_t begin, RegionSpaceFlag spaceType)
890 {
891 flags_.spaceFlag_ = spaceType;
892 flags_.gcFlags_ = 0;
893 typeFlag_ = RegionTypeFlag::DEFAULT;
894 // no markGCBitset
895 begin_ = begin;
896 markGCBitset_ = nullptr;
897 }
898
GetFlagsOffsetPackedData899 static size_t GetFlagsOffset(bool isArch32)
900 {
901 return GetOffset<static_cast<size_t>(Index::FlagsIndex)>(isArch32);
902 }
903
GetTypeFlagOffsetPackedData904 static size_t GetTypeFlagOffset(bool isArch32)
905 {
906 return GetOffset<static_cast<size_t>(Index::TypeFlagIndex)>(isArch32);
907 }
908
GetGCBitsetOffsetPackedData909 static size_t GetGCBitsetOffset(bool isArch32)
910 {
911 return GetOffset<static_cast<size_t>(Index::MarkGCBitSetIndex)>(isArch32);
912 }
913
GetOldToNewSetOffsetPackedData914 static size_t GetOldToNewSetOffset(bool isArch32)
915 {
916 return GetOffset<static_cast<size_t>(Index::OldToNewSetIndex)>(isArch32);
917 }
918
GetLocalToShareSetOffsetPackedData919 static size_t GetLocalToShareSetOffset(bool isArch32)
920 {
921 return GetOffset<static_cast<size_t>(Index::LocalToShareSetIndex)>(isArch32);
922 }
923
GetBeginOffsetPackedData924 static size_t GetBeginOffset(bool isArch32)
925 {
926 return GetOffset<static_cast<size_t>(Index::BeginIndex)>(isArch32);
927 }
928
GetRSetSwapFlagOffsetPackedData929 static size_t GetRSetSwapFlagOffset(bool isArch32)
930 {
931 return GetOffset<static_cast<size_t>(Index::RSetSwapFlagIndex)>(isArch32);
932 }
933
934 alignas(EAS) PackedPtr flags_;
935 // Use different UIntPtr from flags_ to prevent the potential data race.
936 // Be careful when storing to this value, currently this is only from JS_Thread during ConcurrentMarking,
937 // or from GC_Thread during GC ClearTask.
938 alignas(EAS) RegionTypeFlag typeFlag_;
939 alignas(EAS) GCBitset *markGCBitset_ {nullptr};
940 alignas(EAS) RememberedSet *oldToNewSet_ {nullptr};
941 alignas(EAS) RememberedSet *localToShareSet_ {nullptr};
942 alignas(EAS) uintptr_t begin_ {0};
943 alignas(EAS) size_t bitsetSize_ {0};
944 // RSetSwapFlag_ represents if the oldToNewSet_ and localToShareSet_ are swapped, when they are swapped,
945 // the data in it are untrusted.
946 alignas(EAS) uint8_t RSetSwapFlag_ {0};
947 };
948 STATIC_ASSERT_EQ_ARCH(sizeof(PackedData), PackedData::SizeArch32, PackedData::SizeArch64);
949
950 static constexpr double MOST_OBJECT_ALIVE_THRESHOLD_PERCENT = 0.8;
951 static constexpr double AVERAGE_REGION_EVACUATE_SIZE = MOST_OBJECT_ALIVE_THRESHOLD_PERCENT *
952 DEFAULT_REGION_SIZE / 2; // 2 means half
953 private:
954 static constexpr double COMPRESS_THREASHOLD_PERCENT = 0.1;
955
956 RememberedSet *CreateRememberedSet();
957 RememberedSet *GetOrCreateCrossRegionRememberedSet();
958 RememberedSet *GetOrCreateOldToNewRememberedSet();
959 RememberedSet *GetOrCreateLocalToShareRememberedSet();
960
961 inline RememberedSet *CreateOldToNewRememberedSet();
962 inline RememberedSet *CreateLocalToShareRememberedSet();
963
964 PackedData packedData_;
965 NativeAreaAllocator *nativeAreaAllocator_;
966
967 uintptr_t allocateBase_;
968 uintptr_t end_;
969 uintptr_t highWaterMark_;
970 std::atomic_size_t aliveObject_ {0};
971 size_t gcAliveSize_ {0};
972 Region *next_ {nullptr};
973 Region *prev_ {nullptr};
974
975 RememberedSet *crossRegionSet_ {nullptr};
976 RememberedSet *sweepingOldToNewRSet_ {nullptr};
977 RememberedSet *sweepingLocalToShareRSet_ {nullptr};
978 Span<FreeObjectSet<FreeObject> *> freeObjectSets_;
979 Mutex *lock_ {nullptr};
980 uint64_t wasted_;
981 // snapshotdata_ is used to encode the region for snapshot. Its upper 32 bits are used to store the size of
982 // the huge object, and the lower 32 bits are used to store the region index
983 uint64_t snapshotData_;
984 uintptr_t localHeap_ {0};
985
986 friend class Snapshot;
987 friend class SnapshotProcessor;
988 friend class RuntimeStubs;
989 };
990 } // namespace ecmascript
991 } // namespace panda
992 #endif // ECMASCRIPT_MEM_REGION_H
993