1 /*
2 * Copyright (c) 2022-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef ECMASCRIPT_MEM_REGION_H
17 #define ECMASCRIPT_MEM_REGION_H
18
19 #include <type_traits>
20
21 #include "ecmascript/base/aligned_struct.h"
22 #include "ecmascript/base/asan_interface.h"
23 #include "ecmascript/js_tagged_value.h"
24 #include "ecmascript/mem/free_object_list.h"
25 #include "ecmascript/mem/gc_bitset.h"
26 #include "ecmascript/mem/remembered_set.h"
27 #include "ecmascript/mem/mem_common.h"
28 #include "ecmascript/platform/map.h"
29
30 #include "ecmascript/platform/mutex.h"
31
32 #include "securec.h"
33
34 namespace panda {
35 namespace ecmascript {
36 class JSThread;
37
38 enum RegionSpaceFlag {
39 UNINITIALIZED = 0,
40 // We should avoid using the lower 3 bits (bits 0 to 2).
41 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
42
43 // Bits 3 to 7 are reserved to denote the space where the region is located.
44 IN_YOUNG_SPACE = 0x08,
45 IN_SNAPSHOT_SPACE = 0x09,
46 IN_HUGE_OBJECT_SPACE = 0x0A,
47 IN_OLD_SPACE = 0x0B,
48 IN_NON_MOVABLE_SPACE = 0x0C,
49 IN_MACHINE_CODE_SPACE = 0x0D,
50 IN_READ_ONLY_SPACE = 0X0E,
51 IN_APPSPAWN_SPACE = 0x0F,
52 IN_HUGE_MACHINE_CODE_SPACE = 0x10,
53 IN_SHARED_NON_MOVABLE = 0x11,
54 IN_SHARED_OLD_SPACE = 0x12,
55 IN_SHARED_APPSPAWN_SPACE = 0X13,
56 IN_SHARED_HUGE_OBJECT_SPACE = 0x14,
57 IN_SHARED_READ_ONLY_SPACE = 0x15,
58
59 VALID_SPACE_MASK = 0xFF,
60
61 GENERAL_OLD_BEGIN = IN_SNAPSHOT_SPACE,
62 GENERAL_OLD_END = IN_HUGE_MACHINE_CODE_SPACE,
63 SHARED_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
64 SHARED_SPACE_END = IN_SHARED_READ_ONLY_SPACE,
65 SHARED_SWEEPABLE_SPACE_BEGIN = IN_SHARED_NON_MOVABLE,
66 SHARED_SWEEPABLE_SPACE_END = IN_SHARED_HUGE_OBJECT_SPACE,
67
68 HEAP_SPACE_BEGIN = IN_YOUNG_SPACE,
69 HEAP_SPACE_END = IN_SHARED_READ_ONLY_SPACE
70 };
71
72 enum RegionGCFlags {
73 // We should avoid using the lower 3 bits (bits 0 to 2).
74 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
75
76 // Below flags are used for GC, and each flag has a dedicated bit starting from the 3rd bit.
77 NEVER_EVACUATE = 1 << 3,
78 HAS_AGE_MARK = 1 << 4,
79 BELOW_AGE_MARK = 1 << 5,
80 IN_COLLECT_SET = 1 << 6,
81 IN_NEW_TO_NEW_SET = 1 << 7,
82 // Bits 8 to 10 (the lower 3 bits for the next byte) are also excluded for the sake of
83 // INVALID_VALUE in ZAP_MEM.
84 HAS_BEEN_SWEPT = 1 << 11,
85 NEED_RELOCATE = 1 << 12,
86 // ONLY used for heap verification.
87 IN_INACTIVE_SEMI_SPACE = 1 << 13,
88 IN_NEW_TO_OLD_SET = 1 << 14,
89 IN_SHARED_COLLECT_SET = 1 << 15,
90 };
91
92 // Currently only use for region in LinearSpace, to check if the region is allocated during concurrent marking.
93 enum class RegionTypeFlag : uint8_t {
94 DEFAULT = 0,
95 // We should avoid using the lower 3 bits (bits 0 to 2).
96 // If ZAP_MEM is enabled, the value of the lower 3 bits conflicts with the INVALID_VALUE.
97
98 // Region is allocated before concurrent marking, but some new object may be allocated here
99 // during concurrent marking.
100 HALF_FRESH = 0x08,
101 // Region is allocated during concurrent marking.
102 FRESH = 0x09,
103 };
104
105 enum RSetType {
106 OLD_TO_NEW,
107 LOCAL_TO_SHARE,
108 };
109
110 enum class RSetSwapFlag : uint8_t {
111 // Both LocalToShare and oldToNew are not swapped. It means the bitset in it is available
112 NO_SWAPPED = 0,
113
114 // LocalToShare are swapped. It means the bitset in LocalToShare is unavailable
115 LOCAL_TO_SHARE_SWAPPED_MASK = 0b001,
116
117 // LocalToShare are collected. It means the bitset in LocalToShare is unavailable
118 LOCAL_TO_SHARE_COLLECTED_MASK = 0b010,
119
120 // oldToNew are swapped. It means the bitset in oldToNew is unavailable
121 OLD_TO_NEW_SWAPPED_MASK = 0b100,
122 };
123
ToSpaceTypeName(uint8_t space)124 static inline std::string ToSpaceTypeName(uint8_t space)
125 {
126 switch (space) {
127 case RegionSpaceFlag::IN_YOUNG_SPACE:
128 return "young space";
129 case RegionSpaceFlag::IN_SNAPSHOT_SPACE:
130 return "snapshot space";
131 case RegionSpaceFlag::IN_HUGE_OBJECT_SPACE:
132 return "huge object space";
133 case RegionSpaceFlag::IN_OLD_SPACE:
134 return "old space";
135 case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
136 return "non movable space";
137 case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
138 return "machine code space";
139 case RegionSpaceFlag::IN_READ_ONLY_SPACE:
140 return "read only space";
141 case RegionSpaceFlag::IN_APPSPAWN_SPACE:
142 return "appspawn space";
143 case RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE:
144 return "huge machine code space";
145 case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
146 return "shared non movable space";
147 case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
148 return "shared old space";
149 case RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE:
150 return "shared read only space";
151 case RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE:
152 return "shared huge object space";
153 case RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE:
154 return "shared appspawn space";
155 default:
156 return "invalid space";
157 }
158 }
159
160 // |---------------------------------------------------------------------------------------|
161 // | Region (256 kb) |
162 // |---------------------------------|--------------------------------|--------------------|
163 // | Head (sizeof(Region)) | Mark bitset (4kb) | Data |
164 // |---------------------------------|--------------------------------|--------------------|
165
166 class Region {
167 public:
Region(NativeAreaAllocator * allocator,uintptr_t allocateBase,uintptr_t begin,uintptr_t end,RegionSpaceFlag spaceType,RegionTypeFlag typeFlag)168 Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t begin, uintptr_t end,
169 RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
170 : packedData_(begin, end, spaceType, typeFlag),
171 nativeAreaAllocator_(allocator),
172 allocateBase_(allocateBase),
173 end_(end),
174 highWaterMark_(end),
175 aliveObject_(0),
176 wasted_(0),
177 snapshotData_(0) {}
178
179 // JitFort space is divided into regions (JitForRegion) to enable
180 // reusing free_object_list and free_object_set operations for
181 // JitFort space, and GC marking actually happens in corresponding
182 // MachineCode objects where JitFort space is allocated to. So no
183 // gc mark bits needed in JitFortRegions.
Region(NativeAreaAllocator * allocator,uintptr_t allocateBase,uintptr_t end,RegionSpaceFlag spaceType)184 Region(NativeAreaAllocator *allocator, uintptr_t allocateBase, uintptr_t end,
185 RegionSpaceFlag spaceType)
186 : packedData_(allocateBase, spaceType), // no markGCBitset_ for JitFort
187 nativeAreaAllocator_(allocator),
188 allocateBase_(allocateBase),
189 end_(end),
190 highWaterMark_(end),
191 aliveObject_(0),
192 wasted_(0),
193 snapshotData_(0) {}
194
195 ~Region() = default;
196
197 NO_COPY_SEMANTIC(Region);
198 NO_MOVE_SEMANTIC(Region);
199
200 enum RegionSpaceKind { InYoung, InGeneralOld, Other };
201
202 template <RegionSpaceKind kind>
203 class Updater final {
204 public:
Updater(uintptr_t updateAddress,Region & region)205 Updater(uintptr_t updateAddress, Region& region)
206 : bitsetUpdater_(updateAddress),
207 region_(region)
208 {
209 }
210
211 NO_COPY_SEMANTIC(Updater);
212
~Updater()213 ARK_INLINE ~Updater()
214 {
215 Flush();
216 }
217
UpdateLocalToShare()218 ARK_INLINE void UpdateLocalToShare()
219 {
220 bitsetUpdater_.Update(LocalToShareIdx);
221 }
222
223 template <RegionSpaceKind T = kind, std::enable_if_t<T == InGeneralOld, int> = 0>
UpdateOldToNew()224 ARK_INLINE void UpdateOldToNew()
225 {
226 bitsetUpdater_.Update(OldToNewIdx);
227 }
228
Next()229 ARK_INLINE void Next()
230 {
231 if (bitsetUpdater_.Next()) {
232 Flush();
233 }
234 }
235
236 private:
237 ARK_INLINE void Consume(size_t idx, uintptr_t updateAddress, uint32_t mask);
238
239 ARK_INLINE void Flush();
240
CalculateBitSetNum()241 static constexpr size_t CalculateBitSetNum()
242 {
243 constexpr size_t InYoungBitSetNum = 2;
244 constexpr size_t InGeneralOldBitSetNum = 2;
245 constexpr size_t OtherBitSetNum = 1;
246 switch (kind) {
247 case InYoung:
248 return InYoungBitSetNum;
249 case InGeneralOld:
250 return InGeneralOldBitSetNum;
251 case Other:
252 return OtherBitSetNum;
253 }
254 return 0;
255 }
256
257 static constexpr size_t BitSetNum = CalculateBitSetNum();
258 static constexpr size_t LocalToShareIdx = 0;
259 static constexpr size_t OldToNewIdx = 1;
260 GCBitSetUpdater<BitSetNum> bitsetUpdater_;
261 Region& region_;
262 };
263
Initialize()264 void Initialize()
265 {
266 lock_ = new Mutex();
267 if (InSparseSpace()) {
268 InitializeFreeObjectSets();
269 }
270 }
271
LinkNext(Region * next)272 void LinkNext(Region *next)
273 {
274 next_ = next;
275 }
276
GetNext()277 Region *GetNext() const
278 {
279 return next_;
280 }
281
LinkPrev(Region * prev)282 void LinkPrev(Region *prev)
283 {
284 prev_ = prev;
285 }
286
GetPrev()287 Region *GetPrev() const
288 {
289 return prev_;
290 }
291
GetBegin()292 uintptr_t GetBegin() const
293 {
294 return packedData_.begin_;
295 }
296
GetEnd()297 uintptr_t GetEnd() const
298 {
299 return end_;
300 }
301
GetHighWaterMark()302 uintptr_t GetHighWaterMark() const
303 {
304 return highWaterMark_;
305 }
306
GetCapacity()307 size_t GetCapacity() const
308 {
309 return end_ - allocateBase_;
310 }
311
GetSize()312 size_t GetSize() const
313 {
314 return end_ - packedData_.begin_;
315 }
316
IsGCFlagSet(RegionGCFlags flag)317 bool IsGCFlagSet(RegionGCFlags flag) const
318 {
319 return (packedData_.flags_.gcFlags_ & flag) == flag;
320 }
321
SetGCFlag(RegionGCFlags flag)322 void SetGCFlag(RegionGCFlags flag)
323 {
324 packedData_.flags_.gcFlags_ |= flag;
325 }
326
ClearGCFlag(RegionGCFlags flag)327 void ClearGCFlag(RegionGCFlags flag)
328 {
329 // NOLINTNEXTLINE(hicpp-signed-bitwise)
330 packedData_.flags_.gcFlags_ &= ~flag;
331 }
332
GetSpaceTypeName()333 std::string GetSpaceTypeName()
334 {
335 return ToSpaceTypeName(packedData_.flags_.spaceFlag_);
336 }
337
GetSpaceType()338 uint8_t GetSpaceType() const
339 {
340 return packedData_.flags_.spaceFlag_;
341 }
342
343 // Mark bitset
344 GCBitset *GetMarkGCBitset() const;
345 bool AtomicMark(void *address);
346 // Objects in fresh region should only mark in JS Thread.
347 bool NonAtomicMark(void *address);
348 void ClearMark(void *address);
349 bool Test(void *addr) const;
350 bool Test(uintptr_t addr) const;
351 // ONLY used for heap verification.
352 bool TestOldToNew(uintptr_t addr);
353 bool TestLocalToShare(uintptr_t addr);
354 template <typename Visitor>
355 void IterateAllMarkedBits(Visitor &&visitor) const;
356 void ClearMarkGCBitset();
357 // local to share remembered set
358 bool HasLocalToShareRememberedSet() const;
359 RememberedSet *CollectLocalToShareRSet();
360 void InsertLocalToShareRSet(uintptr_t addr);
361 template<RegionSpaceKind kind>
362 Updater<kind> GetBatchRSetUpdater(uintptr_t addr);
363 void AtomicInsertLocalToShareRSet(uintptr_t addr);
364 void ClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
365 void AtomicClearLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
366 void AtomicClearSweepingLocalToShareRSetInRange(uintptr_t start, uintptr_t end);
367 template <typename Visitor>
368 void IterateAllLocalToShareBits(Visitor visitor);
369 void DeleteLocalToShareRSet();
370 void DeleteSweepingLocalToShareRSet();
371 // Cross region remembered set
372 void InsertCrossRegionRSet(uintptr_t addr);
373 void AtomicInsertCrossRegionRSet(uintptr_t addr);
374 template <typename Visitor>
375 void IterateAllCrossRegionBits(Visitor visitor) const;
376 void ClearCrossRegionRSet();
377 void ClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
378 void AtomicClearCrossRegionRSetInRange(uintptr_t start, uintptr_t end);
379 void DeleteCrossRegionRSet();
380 // Old to new remembered set
381 void InsertOldToNewRSet(uintptr_t addr);
382 void ClearOldToNewRSet(uintptr_t addr);
383
384 template <typename Visitor>
385 void IterateAllOldToNewBits(Visitor visitor);
386 void ClearOldToNewRSet();
387 void ClearOldToNewRSetInRange(uintptr_t start, uintptr_t end);
388 void DeleteOldToNewRSet();
389
390 void AtomicClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
391 void ClearSweepingOldToNewRSetInRange(uintptr_t start, uintptr_t end);
392 void DeleteSweepingOldToNewRSet();
393 template <typename Visitor>
394 void AtomicIterateAllSweepingRSetBits(Visitor visitor);
395 template <typename Visitor>
396 void IterateAllSweepingRSetBits(Visitor visitor);
397
ObjectAddressToRange(TaggedObject * obj)398 static Region *ObjectAddressToRange(TaggedObject *obj)
399 {
400 return reinterpret_cast<Region *>(ToUintPtr(obj) & ~DEFAULT_REGION_MASK);
401 }
402
ObjectAddressToRange(uintptr_t objAddress)403 static Region *ObjectAddressToRange(uintptr_t objAddress)
404 {
405 return reinterpret_cast<Region *>(objAddress & ~DEFAULT_REGION_MASK);
406 }
407
GetRegionAvailableSize()408 static size_t GetRegionAvailableSize()
409 {
410 size_t regionHeaderSize = AlignUp(sizeof(Region), static_cast<size_t>(MemAlignment::MEM_ALIGN_REGION));
411 size_t bitsetSize = GCBitset::SizeOfGCBitset(DEFAULT_REGION_SIZE - regionHeaderSize);
412 return DEFAULT_REGION_SIZE - regionHeaderSize - bitsetSize;
413 }
414
ClearMembers()415 void ClearMembers()
416 {
417 if (lock_ != nullptr) {
418 delete lock_;
419 lock_ = nullptr;
420 }
421 }
422
Invalidate()423 void Invalidate()
424 {
425 ASAN_UNPOISON_MEMORY_REGION(reinterpret_cast<void *>(GetBegin()), GetSize());
426 packedData_.flags_.spaceFlag_ = RegionSpaceFlag::UNINITIALIZED;
427 }
428
ResetRegionFlag(RegionSpaceFlag spaceFlag,RegionGCFlags gcFlag)429 void ResetRegionFlag(RegionSpaceFlag spaceFlag, RegionGCFlags gcFlag)
430 {
431 packedData_.flags_.spaceFlag_ = spaceFlag;
432 packedData_.flags_.gcFlags_ = gcFlag;
433 }
434
435 uint8_t GetRegionSpaceFlag();
436 void SetRSetSwapFlag(RSetSwapFlag mask);
437 void ClearRSetSwapFlag(RSetSwapFlag mask);
438
SetRegionSpaceFlag(RegionSpaceFlag flag)439 void SetRegionSpaceFlag(RegionSpaceFlag flag)
440 {
441 packedData_.flags_.spaceFlag_ = flag;
442 }
443
InYoungSpace()444 bool InYoungSpace() const
445 {
446 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_YOUNG_SPACE;
447 }
448
InOldSpace()449 bool InOldSpace() const
450 {
451 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_OLD_SPACE;
452 }
453
InYoungOrOldSpace()454 bool InYoungOrOldSpace() const
455 {
456 return InYoungSpace() || InOldSpace();
457 }
458
InGeneralOldSpace()459 bool InGeneralOldSpace() const
460 {
461 ASSERT(packedData_.flags_.spaceFlag_ != 0);
462 auto flag = packedData_.flags_.spaceFlag_;
463 return flag >= RegionSpaceFlag::GENERAL_OLD_BEGIN && flag <= RegionSpaceFlag::GENERAL_OLD_END;
464 }
465
InHugeObjectSpace()466 bool InHugeObjectSpace() const
467 {
468 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE;
469 }
470
InMachineCodeSpace()471 bool InMachineCodeSpace() const
472 {
473 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_MACHINE_CODE_SPACE;
474 }
475
InHugeMachineCodeSpace()476 bool InHugeMachineCodeSpace() const
477 {
478 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE;
479 }
480
InNonMovableSpace()481 bool InNonMovableSpace() const
482 {
483 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_NON_MOVABLE_SPACE;
484 }
485
InSnapshotSpace()486 bool InSnapshotSpace() const
487 {
488 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SNAPSHOT_SPACE;
489 }
490
InReadOnlySpace()491 bool InReadOnlySpace() const
492 {
493 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_READ_ONLY_SPACE;
494 }
495
InSharedOldSpace()496 bool InSharedOldSpace() const
497 {
498 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_OLD_SPACE;
499 }
500
InSharedNonMovableSpace()501 bool InSharedNonMovableSpace() const
502 {
503 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_NON_MOVABLE;
504 }
505
InSharedHugeObjectSpace()506 bool InSharedHugeObjectSpace() const
507 {
508 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE;
509 }
510
InSharedReadOnlySpace()511 bool InSharedReadOnlySpace() const
512 {
513 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_READ_ONLY_SPACE;
514 }
515
InSharedAppSpawnSpace()516 bool InSharedAppSpawnSpace() const
517 {
518 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_SHARED_APPSPAWN_SPACE;
519 }
520
InAppSpawnSpace()521 bool InAppSpawnSpace() const
522 {
523 return packedData_.flags_.spaceFlag_ == RegionSpaceFlag::IN_APPSPAWN_SPACE;
524 }
525
526 // Not including shared read only space.
InSharedSweepableSpace()527 bool InSharedSweepableSpace() const
528 {
529 auto flag = packedData_.flags_.spaceFlag_;
530 return flag >= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_BEGIN &&
531 flag <= RegionSpaceFlag::SHARED_SWEEPABLE_SPACE_END;
532 }
533
InSharedHeap()534 bool InSharedHeap() const
535 {
536 auto flag = packedData_.flags_.spaceFlag_;
537 return flag >= RegionSpaceFlag::SHARED_SPACE_BEGIN && flag <= RegionSpaceFlag::SHARED_SPACE_END;
538 }
539
InSparseSpace()540 bool InSparseSpace() const
541 {
542 auto flag = packedData_.flags_.spaceFlag_;
543 switch (flag) {
544 case RegionSpaceFlag::IN_OLD_SPACE:
545 case RegionSpaceFlag::IN_NON_MOVABLE_SPACE:
546 case RegionSpaceFlag::IN_MACHINE_CODE_SPACE:
547 case RegionSpaceFlag::IN_APPSPAWN_SPACE:
548 case RegionSpaceFlag::IN_SHARED_NON_MOVABLE:
549 case RegionSpaceFlag::IN_SHARED_OLD_SPACE:
550 return true;
551 default:
552 return false;
553 }
554 }
555
InHeapSpace()556 bool InHeapSpace() const
557 {
558 uint8_t space = packedData_.flags_.spaceFlag_;
559 return space >= RegionSpaceFlag::HEAP_SPACE_BEGIN && space <= RegionSpaceFlag::HEAP_SPACE_END;
560 }
561
InCollectSet()562 bool InCollectSet() const
563 {
564 return IsGCFlagSet(RegionGCFlags::IN_COLLECT_SET);
565 }
566
InSCollectSet()567 bool InSCollectSet() const
568 {
569 return IsGCFlagSet(RegionGCFlags::IN_SHARED_COLLECT_SET);
570 }
571
InYoungSpaceOrCSet()572 bool InYoungSpaceOrCSet() const
573 {
574 return InYoungSpace() || InCollectSet();
575 }
576
InNewToNewSet()577 bool InNewToNewSet() const
578 {
579 return IsGCFlagSet(RegionGCFlags::IN_NEW_TO_NEW_SET);
580 }
581
InNewToOldSet()582 bool InNewToOldSet() const
583 {
584 return IsGCFlagSet(RegionGCFlags::IN_NEW_TO_OLD_SET);
585 }
586
HasAgeMark()587 bool HasAgeMark() const
588 {
589 return IsGCFlagSet(RegionGCFlags::HAS_AGE_MARK);
590 }
591
BelowAgeMark()592 bool BelowAgeMark() const
593 {
594 return IsGCFlagSet(RegionGCFlags::BELOW_AGE_MARK);
595 }
596
NeedRelocate()597 bool NeedRelocate() const
598 {
599 return IsGCFlagSet(RegionGCFlags::NEED_RELOCATE);
600 }
601
602 // ONLY used for heap verification.
InInactiveSemiSpace()603 bool InInactiveSemiSpace() const
604 {
605 return IsGCFlagSet(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
606 }
607
608 // ONLY used for heap verification.
InActiveSemiSpace()609 bool InActiveSemiSpace() const
610 {
611 return InYoungSpace() && !InInactiveSemiSpace();
612 }
613
GetRegionTypeFlag()614 RegionTypeFlag GetRegionTypeFlag() const
615 {
616 return packedData_.typeFlag_;
617 }
618
SetRegionTypeFlag(RegionTypeFlag typeFlag)619 void SetRegionTypeFlag(RegionTypeFlag typeFlag)
620 {
621 packedData_.typeFlag_ = typeFlag;
622 }
623
ResetRegionTypeFlag()624 void ResetRegionTypeFlag()
625 {
626 SetRegionTypeFlag(RegionTypeFlag::DEFAULT);
627 }
628
IsFreshRegion()629 bool IsFreshRegion() const
630 {
631 return GetRegionTypeFlag() == RegionTypeFlag::FRESH;
632 }
633
IsHalfFreshRegion()634 bool IsHalfFreshRegion() const
635 {
636 return GetRegionTypeFlag() == RegionTypeFlag::HALF_FRESH;
637 }
638
639 // ONLY used for heap verification.
SetInactiveSemiSpace()640 void SetInactiveSemiSpace()
641 {
642 SetGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
643 }
644
645 // ONLY used for heap verification.
ResetInactiveSemiSpace()646 void ResetInactiveSemiSpace()
647 {
648 ClearGCFlag(RegionGCFlags::IN_INACTIVE_SEMI_SPACE);
649 }
650
SetSwept()651 void SetSwept()
652 {
653 SetGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
654 }
655
ResetSwept()656 void ResetSwept()
657 {
658 ClearGCFlag(RegionGCFlags::HAS_BEEN_SWEPT);
659 }
660
InRange(uintptr_t address)661 bool InRange(uintptr_t address) const
662 {
663 return address >= packedData_.begin_ && address <= end_;
664 }
665
GetAllocateBase()666 uintptr_t GetAllocateBase() const
667 {
668 return allocateBase_;
669 }
670
671 size_t GetAllocatedBytes(uintptr_t top = 0)
672 {
673 ASSERT(top == 0 || InRange(top));
674 return (top == 0) ? (highWaterMark_ - packedData_.begin_) : (top - packedData_.begin_);
675 }
676
SetHighWaterMark(uintptr_t mark)677 void SetHighWaterMark(uintptr_t mark)
678 {
679 ASSERT(InRange(mark));
680 highWaterMark_ = mark;
681 }
682
SetReadOnlyAndMarked()683 void SetReadOnlyAndMarked()
684 {
685 packedData_.markGCBitset_->SetAllBits(packedData_.bitsetSize_);
686 PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READ);
687 }
688
ClearReadOnly()689 void ClearReadOnly()
690 {
691 PageProtect(reinterpret_cast<void *>(allocateBase_), GetCapacity(), PAGE_PROT_READWRITE);
692 }
693
InitializeFreeObjectSets()694 void InitializeFreeObjectSets()
695 {
696 FreeObjectSet<FreeObject> **sets = new FreeObjectSet<FreeObject> *[FreeObjectList<FreeObject>::NumberOfSets()];
697 for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
698 sets[i] = new FreeObjectSet<FreeObject>(i);
699 }
700 freeObjectSets_ = Span<FreeObjectSet<FreeObject> *>(sets, FreeObjectList<FreeObject>::NumberOfSets());
701 }
702
DestroyFreeObjectSets()703 void DestroyFreeObjectSets()
704 {
705 for (int i = 0; i < FreeObjectList<FreeObject>::NumberOfSets(); i++) {
706 delete freeObjectSets_[i];
707 freeObjectSets_[i] = nullptr;
708 }
709 delete[] freeObjectSets_.data();
710 }
711
GetFreeObjectSet(SetType type)712 FreeObjectSet<FreeObject> *GetFreeObjectSet(SetType type)
713 {
714 // Thread safe
715 if (freeObjectSets_[type] == nullptr) {
716 freeObjectSets_[type] = new FreeObjectSet<FreeObject>(type);
717 }
718 return freeObjectSets_[type];
719 }
720
721 template<class Callback>
EnumerateFreeObjectSets(Callback cb)722 void EnumerateFreeObjectSets(Callback cb)
723 {
724 for (auto set : freeObjectSets_) {
725 cb(set);
726 }
727 }
728
729 template<class Callback>
REnumerateFreeObjectSets(Callback cb)730 void REnumerateFreeObjectSets(Callback cb)
731 {
732 auto last = freeObjectSets_.crbegin();
733 auto first = freeObjectSets_.crend();
734 for (; last != first; last++) {
735 if (!cb(*last)) {
736 break;
737 }
738 }
739 }
740
IncreaseAliveObject(size_t size)741 void IncreaseAliveObject(size_t size)
742 {
743 aliveObject_.fetch_add(size, std::memory_order_relaxed);
744 }
745
SetRegionAliveSize()746 void SetRegionAliveSize()
747 {
748 gcAliveSize_ = aliveObject_;
749 }
750
ResetAliveObject()751 void ResetAliveObject()
752 {
753 aliveObject_ = 0;
754 }
755
AliveObject()756 size_t AliveObject() const
757 {
758 return aliveObject_.load(std::memory_order_relaxed);
759 }
760
GetGCAliveSize()761 size_t GetGCAliveSize() const
762 {
763 return gcAliveSize_;
764 }
765
MostObjectAlive()766 bool MostObjectAlive() const
767 {
768 return aliveObject_ > MOST_OBJECT_ALIVE_THRESHOLD_PERCENT * GetSize();
769 }
770
BelowCompressThreasholdAlive()771 bool BelowCompressThreasholdAlive() const
772 {
773 return gcAliveSize_ < COMPRESS_THREASHOLD_PERCENT * GetSize();
774 }
775
ResetWasted()776 void ResetWasted()
777 {
778 wasted_ = 0;
779 }
780
IncreaseWasted(uint64_t size)781 void IncreaseWasted(uint64_t size)
782 {
783 wasted_ += size;
784 }
785
GetWastedSize()786 uint64_t GetWastedSize()
787 {
788 return wasted_;
789 }
790
GetSnapshotData()791 uint64_t GetSnapshotData()
792 {
793 return snapshotData_;
794 }
795
SetSnapshotData(uint64_t value)796 void SetSnapshotData(uint64_t value)
797 {
798 snapshotData_ = value;
799 }
800
SwapOldToNewRSetForCS()801 void SwapOldToNewRSetForCS()
802 {
803 sweepingOldToNewRSet_ = packedData_.oldToNewSet_;
804 packedData_.oldToNewSet_ = nullptr;
805 if (sweepingOldToNewRSet_ != nullptr) {
806 SetRSetSwapFlag(RSetSwapFlag::OLD_TO_NEW_SWAPPED_MASK);
807 }
808 }
809
SwapLocalToShareRSetForCS()810 void SwapLocalToShareRSetForCS()
811 {
812 sweepingLocalToShareRSet_ = packedData_.localToShareSet_;
813 packedData_.localToShareSet_ = nullptr;
814 if (sweepingLocalToShareRSet_ != nullptr) {
815 SetRSetSwapFlag(RSetSwapFlag::LOCAL_TO_SHARE_SWAPPED_MASK);
816 }
817 }
818
SetLocalHeap(uintptr_t localHeap)819 void SetLocalHeap(uintptr_t localHeap)
820 {
821 ASSERT(localHeap != (uintptr_t)nullptr);
822 localHeap_ = localHeap;
823 }
824
GetLocalHeap(void)825 uintptr_t GetLocalHeap(void)
826 {
827 return localHeap_;
828 }
829
830 // should call in js-thread
831 void MergeOldToNewRSetForCS();
832 void MergeLocalToShareRSetForCS();
833
834 // should call in daemon-thread, or in js-thread in RUNNING state
835 void MergeLocalToShareRSetForCM(RememberedSet *set);
836
837 struct alignas(JSTaggedValue::TaggedTypeSize()) PackedPtr : public base::AlignedPointer {
838 uint8_t spaceFlag_;
839 uint16_t gcFlags_;
840 };
841
842 struct PackedData : public base::AlignedStruct<JSTaggedValue::TaggedTypeSize(),
843 base::AlignedPointer,
844 base::AlignedPointer,
845 base::AlignedPointer,
846 base::AlignedPointer,
847 base::AlignedPointer,
848 base::AlignedPointer,
849 base::AlignedSize,
850 base::AlignedUint8> {
851 enum class Index : size_t {
852 FlagsIndex = 0,
853 TypeFlagIndex,
854 MarkGCBitSetIndex,
855 OldToNewSetIndex,
856 LocalToShareSetIndex,
857 BeginIndex,
858 BitSetSizeIndex,
859 RSetSwapFlagIndex,
860 NumOfMembers
861 };
862
863 static_assert(static_cast<size_t>(Index::NumOfMembers) == NumOfTypes);
864
PackedDataPackedData865 inline PackedData(uintptr_t begin, uintptr_t end, RegionSpaceFlag spaceType, RegionTypeFlag typeFlag)
866 {
867 flags_.spaceFlag_ = spaceType;
868 flags_.gcFlags_ = 0;
869 typeFlag_ = typeFlag;
870 bitsetSize_ = (spaceType == RegionSpaceFlag::IN_HUGE_OBJECT_SPACE ||
871 spaceType == RegionSpaceFlag::IN_HUGE_MACHINE_CODE_SPACE ||
872 spaceType == RegionSpaceFlag::IN_SHARED_HUGE_OBJECT_SPACE) ?
873 GCBitset::BYTE_PER_WORD : GCBitset::SizeOfGCBitset(end - begin);
874 markGCBitset_ = new (ToVoidPtr(begin)) GCBitset();
875 markGCBitset_->Clear(bitsetSize_);
876 begin_ = AlignUp(begin + bitsetSize_, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
877 // The object region marked with poison until it is allocated if is_asan is true
878 #ifdef ARK_ASAN_ON
879 ASAN_POISON_MEMORY_REGION(reinterpret_cast<void *>(begin_), (end - begin_));
880 #endif
881 }
882
PackedDataPackedData883 inline PackedData(uintptr_t begin, RegionSpaceFlag spaceType)
884 {
885 flags_.spaceFlag_ = spaceType;
886 flags_.gcFlags_ = 0;
887 typeFlag_ = RegionTypeFlag::DEFAULT;
888 // no markGCBitset
889 begin_ = begin;
890 markGCBitset_ = nullptr;
891 }
892
GetFlagsOffsetPackedData893 static size_t GetFlagsOffset(bool isArch32)
894 {
895 return GetOffset<static_cast<size_t>(Index::FlagsIndex)>(isArch32);
896 }
897
GetTypeFlagOffsetPackedData898 static size_t GetTypeFlagOffset(bool isArch32)
899 {
900 return GetOffset<static_cast<size_t>(Index::TypeFlagIndex)>(isArch32);
901 }
902
GetGCBitsetOffsetPackedData903 static size_t GetGCBitsetOffset(bool isArch32)
904 {
905 return GetOffset<static_cast<size_t>(Index::MarkGCBitSetIndex)>(isArch32);
906 }
907
GetOldToNewSetOffsetPackedData908 static size_t GetOldToNewSetOffset(bool isArch32)
909 {
910 return GetOffset<static_cast<size_t>(Index::OldToNewSetIndex)>(isArch32);
911 }
912
GetLocalToShareSetOffsetPackedData913 static size_t GetLocalToShareSetOffset(bool isArch32)
914 {
915 return GetOffset<static_cast<size_t>(Index::LocalToShareSetIndex)>(isArch32);
916 }
917
GetBeginOffsetPackedData918 static size_t GetBeginOffset(bool isArch32)
919 {
920 return GetOffset<static_cast<size_t>(Index::BeginIndex)>(isArch32);
921 }
922
GetRSetSwapFlagOffsetPackedData923 static size_t GetRSetSwapFlagOffset(bool isArch32)
924 {
925 return GetOffset<static_cast<size_t>(Index::RSetSwapFlagIndex)>(isArch32);
926 }
927
928 alignas(EAS) PackedPtr flags_;
929 // Use different UIntPtr from flags_ to prevent the potential data race.
930 // Be careful when storing to this value, currently this is only from JS_Thread during ConcurrentMarking,
931 // or from GC_Thread during GC ClearTask.
932 alignas(EAS) RegionTypeFlag typeFlag_;
933 alignas(EAS) GCBitset *markGCBitset_ {nullptr};
934 alignas(EAS) RememberedSet *oldToNewSet_ {nullptr};
935 alignas(EAS) RememberedSet *localToShareSet_ {nullptr};
936 alignas(EAS) uintptr_t begin_ {0};
937 alignas(EAS) size_t bitsetSize_ {0};
938 // RSetSwapFlag_ represents if the oldToNewSet_ and localToShareSet_ are swapped, when they are swapped,
939 // the data in it are untrusted.
940 alignas(EAS) uint8_t RSetSwapFlag_ {0};
941 };
942 STATIC_ASSERT_EQ_ARCH(sizeof(PackedData), PackedData::SizeArch32, PackedData::SizeArch64);
943
944 static constexpr double MOST_OBJECT_ALIVE_THRESHOLD_PERCENT = 0.8;
945 static constexpr double AVERAGE_REGION_EVACUATE_SIZE = MOST_OBJECT_ALIVE_THRESHOLD_PERCENT *
946 DEFAULT_REGION_SIZE / 2; // 2 means half
947 private:
948 static constexpr double COMPRESS_THREASHOLD_PERCENT = 0.1;
949
950 RememberedSet *CreateRememberedSet();
951 RememberedSet *GetOrCreateCrossRegionRememberedSet();
952 RememberedSet *GetOrCreateOldToNewRememberedSet();
953 RememberedSet *GetOrCreateLocalToShareRememberedSet();
954
955 inline RememberedSet *CreateOldToNewRememberedSet();
956 inline RememberedSet *CreateLocalToShareRememberedSet();
957
958 PackedData packedData_;
959 NativeAreaAllocator *nativeAreaAllocator_;
960
961 uintptr_t allocateBase_;
962 uintptr_t end_;
963 uintptr_t highWaterMark_;
964 std::atomic_size_t aliveObject_ {0};
965 size_t gcAliveSize_ {0};
966 Region *next_ {nullptr};
967 Region *prev_ {nullptr};
968
969 RememberedSet *crossRegionSet_ {nullptr};
970 RememberedSet *sweepingOldToNewRSet_ {nullptr};
971 RememberedSet *sweepingLocalToShareRSet_ {nullptr};
972 Span<FreeObjectSet<FreeObject> *> freeObjectSets_;
973 Mutex *lock_ {nullptr};
974 uint64_t wasted_;
975 // snapshotdata_ is used to encode the region for snapshot. Its upper 32 bits are used to store the size of
976 // the huge object, and the lower 32 bits are used to store the region index
977 uint64_t snapshotData_;
978 uintptr_t localHeap_ {0};
979
980 friend class Snapshot;
981 friend class SnapshotProcessor;
982 friend class RuntimeStubs;
983 };
984 } // namespace ecmascript
985 } // namespace panda
986 #endif // ECMASCRIPT_MEM_REGION_H
987