• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_HEAP_H
17 #define ECMASCRIPT_MEM_HEAP_H
18 
19 #include "ecmascript/base/config.h"
20 #include "ecmascript/frames.h"
21 #include "ecmascript/js_thread.h"
22 #include "ecmascript/mem/linear_space.h"
23 #include "ecmascript/mem/mark_stack.h"
24 #include "ecmascript/mem/sparse_space.h"
25 #include "ecmascript/mem/work_manager.h"
26 #include "ecmascript/taskpool/taskpool.h"
27 
28 namespace panda::ecmascript {
29 class ConcurrentMarker;
30 class ConcurrentSweeper;
31 class EcmaVM;
32 class FullGC;
33 class HeapRegionAllocator;
34 class HeapTracker;
35 class Marker;
36 class MemController;
37 class NativeAreaAllocator;
38 class ParallelEvacuator;
39 class PartialGC;
40 class STWYoungGC;
41 class JSNativePointer;
42 
43 enum class MarkType : uint8_t {
44     MARK_YOUNG,
45     MARK_FULL
46 };
47 
48 enum class MemGrowingType : uint8_t {
49     HIGH_THROUGHPUT,
50     CONSERVATIVE,
51     PRESSURE
52 };
53 
54 enum class IdleHeapSizePtr : uint8_t {
55     IDLE_HEAP_SIZE_1 = 0,
56     IDLE_HEAP_SIZE_2,
57     IDLE_HEAP_SIZE_3
58 };
59 
60 struct IdleData {
61     int64_t idleHeapObjectSize1 {0};
62     int64_t idleHeapObjectSize2 {0};
63     int64_t idleHeapObjectSize3 {0};
64     IdleHeapSizePtr curPtr_ {IdleHeapSizePtr::IDLE_HEAP_SIZE_1};
65 
66     static constexpr int64_t REST_HEAP_GROWTH_LIMIT = 200_KB;
CheckIsRestIdleData67     bool CheckIsRest()
68     {
69         if (abs(idleHeapObjectSize1 - idleHeapObjectSize2) < REST_HEAP_GROWTH_LIMIT &&
70             abs(idleHeapObjectSize2 - idleHeapObjectSize3) < REST_HEAP_GROWTH_LIMIT) {
71             return true;
72         }
73         return false;
74     }
75 
SetNextValueIdleData76     void SetNextValue(int64_t idleHeapObjectSize)
77     {
78         switch (curPtr_) {
79             case IdleHeapSizePtr::IDLE_HEAP_SIZE_1:
80                 idleHeapObjectSize1 = idleHeapObjectSize;
81                 curPtr_ = IdleHeapSizePtr::IDLE_HEAP_SIZE_2;
82                 break;
83             case IdleHeapSizePtr::IDLE_HEAP_SIZE_2:
84                 idleHeapObjectSize2 = idleHeapObjectSize;
85                 curPtr_ = IdleHeapSizePtr::IDLE_HEAP_SIZE_3;
86                 break;
87             case IdleHeapSizePtr::IDLE_HEAP_SIZE_3:
88                 idleHeapObjectSize3 = idleHeapObjectSize;
89                 curPtr_ = IdleHeapSizePtr::IDLE_HEAP_SIZE_1;
90                 break;
91             default:
92                 LOG_ECMA(FATAL) << "this branch is unreachable";
93                 UNREACHABLE();
94         }
95     }
96 };
97 
98 enum class HeapMode {
99     NORMAL,
100     SPAWN,
101     SHARE,
102 };
103 
104 class Heap {
105 public:
106     explicit Heap(EcmaVM *ecmaVm);
107     ~Heap() = default;
108     NO_COPY_SEMANTIC(Heap);
109     NO_MOVE_SEMANTIC(Heap);
110     void Initialize();
111     void Destroy();
112     void Prepare();
113     void Resume(TriggerGCType gcType);
114     void ResumeForAppSpawn();
115     void CompactHeapBeforeFork();
116     void DisableParallelGC();
117     void EnableParallelGC();
118     // fixme: Rename NewSpace to YoungSpace.
119     // This is the active young generation space that the new objects are allocated in
120     // or copied into (from the other semi space) during semi space GC.
GetNewSpace()121     SemiSpace *GetNewSpace() const
122     {
123         return activeSemiSpace_;
124     }
125 
126     /*
127      * Return the original active space where the objects are to be evacuated during semi space GC.
128      * This should be invoked only in the evacuation phase of semi space GC.
129      * fixme: Get rid of this interface or make it safe considering the above implicit limitation / requirement.
130      */
GetFromSpaceDuringEvacuation()131     SemiSpace *GetFromSpaceDuringEvacuation() const
132     {
133         return inactiveSemiSpace_;
134     }
135 
GetOldSpace()136     OldSpace *GetOldSpace() const
137     {
138         return oldSpace_;
139     }
140 
GetNonMovableSpace()141     NonMovableSpace *GetNonMovableSpace() const
142     {
143         return nonMovableSpace_;
144     }
145 
GetHugeObjectSpace()146     HugeObjectSpace *GetHugeObjectSpace() const
147     {
148         return hugeObjectSpace_;
149     }
150 
GetMachineCodeSpace()151     MachineCodeSpace *GetMachineCodeSpace() const
152     {
153         return machineCodeSpace_;
154     }
155 
GetSnapshotSpace()156     SnapshotSpace *GetSnapshotSpace() const
157     {
158         return snapshotSpace_;
159     }
160 
GetReadOnlySpace()161     ReadOnlySpace *GetReadOnlySpace() const
162     {
163         return readOnlySpace_;
164     }
165 
GetAppSpawnSpace()166     AppSpawnSpace *GetAppSpawnSpace() const
167     {
168         return appSpawnSpace_;
169     }
170 
GetSpaceWithType(MemSpaceType type)171     SparseSpace *GetSpaceWithType(MemSpaceType type) const
172     {
173         switch (type) {
174             case MemSpaceType::OLD_SPACE:
175                 return oldSpace_;
176                 break;
177             case MemSpaceType::NON_MOVABLE:
178                 return nonMovableSpace_;
179                 break;
180             case MemSpaceType::MACHINE_CODE_SPACE:
181                 return machineCodeSpace_;
182                 break;
183             default:
184                 UNREACHABLE();
185                 break;
186         }
187     }
188 
GetSTWYoungGC()189     STWYoungGC *GetSTWYoungGC() const
190     {
191         return stwYoungGC_;
192     }
193 
GetPartialGC()194     PartialGC *GetPartialGC() const
195     {
196         return partialGC_;
197     }
198 
GetFullGC()199     FullGC *GetFullGC() const
200     {
201         return fullGC_;
202     }
203 
GetSweeper()204     ConcurrentSweeper *GetSweeper() const
205     {
206         return sweeper_;
207     }
208 
GetEvacuator()209     ParallelEvacuator *GetEvacuator() const
210     {
211         return evacuator_;
212     }
213 
GetConcurrentMarker()214     ConcurrentMarker *GetConcurrentMarker() const
215     {
216         return concurrentMarker_;
217     }
218 
GetNonMovableMarker()219     Marker *GetNonMovableMarker() const
220     {
221         return nonMovableMarker_;
222     }
223 
GetSemiGCMarker()224     Marker *GetSemiGCMarker() const
225     {
226         return semiGCMarker_;
227     }
228 
GetCompressGCMarker()229     Marker *GetCompressGCMarker() const
230     {
231         return compressGCMarker_;
232     }
233 
GetEcmaVM()234     EcmaVM *GetEcmaVM() const
235     {
236         return ecmaVm_;
237     }
238 
GetJSThread()239     JSThread *GetJSThread() const
240     {
241         return thread_;
242     }
243 
GetWorkManager()244     WorkManager *GetWorkManager() const
245     {
246         return workManager_;
247     }
248 
GetMemController()249     MemController *GetMemController() const
250     {
251         return memController_;
252     }
253 
254     /*
255      * For object allocations.
256      */
257 
258     // Young
259     inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass);
260     inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass, size_t size);
261     inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass);
262     inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass, size_t size);
263     inline TaggedObject *AllocateYoungOrHugeObject(size_t size);
264     inline uintptr_t AllocateYoungSync(size_t size);
265     inline TaggedObject *TryAllocateYoungGeneration(JSHClass *hclass, size_t size);
266     // Old
267     inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass);
268     inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass, size_t size);
269     // Non-movable
270     inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass);
271     inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass, size_t size);
272     inline TaggedObject *AllocateClassClass(JSHClass *hclass, size_t size);
273     // Huge
274     inline TaggedObject *AllocateHugeObject(JSHClass *hclass, size_t size);
275     inline TaggedObject *AllocateHugeObject(size_t size);
276     // Machine code
277     inline TaggedObject *AllocateMachineCodeObject(JSHClass *hclass, size_t size);
278     // Snapshot
279     inline uintptr_t AllocateSnapshotSpace(size_t size);
280 
GetNativeAreaAllocator()281     NativeAreaAllocator *GetNativeAreaAllocator() const
282     {
283         return nativeAreaAllocator_;
284     }
285 
GetHeapRegionAllocator()286     HeapRegionAllocator *GetHeapRegionAllocator() const
287     {
288         return heapRegionAllocator_;
289     }
290 
291     /*
292      * GC triggers.
293      */
294 
295     void CollectGarbage(TriggerGCType gcType);
296 
297     void CheckAndTriggerOldGC(size_t size = 0);
298 
299     /*
300      * Parallel GC related configurations and utilities.
301      */
302 
303     void PostParallelGCTask(ParallelGCTaskPhase taskPhase);
304 
IsParallelGCEnabled()305     bool IsParallelGCEnabled() const
306     {
307         return parallelGC_;
308     }
309     void ChangeGCParams(bool inBackground);
310     void TriggerIdleCollection(int idleMicroSec);
311     void NotifyMemoryPressure(bool inHighMemoryPressure);
312     bool CheckCanDistributeTask();
313 
314     void WaitRunningTaskFinished();
315 
316     void TryTriggerConcurrentMarking();
317     void AdjustBySurvivalRate(size_t originalNewSpaceSize);
318     void TriggerConcurrentMarking();
319 
320     /*
321      * Wait for existing concurrent marking tasks to be finished (if any).
322      * Return true if there's ongoing concurrent marking.
323      */
324     bool CheckOngoingConcurrentMarking();
325 
326     /*
327      * Functions invoked during GC.
328      */
329 
SetMarkType(MarkType markType)330     void SetMarkType(MarkType markType)
331     {
332         markType_ = markType;
333     }
334 
IsFullMark()335     bool IsFullMark() const
336     {
337         return markType_ == MarkType::MARK_FULL;
338     }
339 
340     inline void SwapNewSpace();
341 
342     inline bool MoveYoungRegionSync(Region *region);
343     inline void MergeToOldSpaceSync(LocalSpace *localSpace);
344 
345     template<class Callback>
346     void EnumerateOldSpaceRegions(const Callback &cb, Region *region = nullptr) const;
347 
348     template<class Callback>
349     void EnumerateNonNewSpaceRegions(const Callback &cb) const;
350 
351     template<class Callback>
352     void EnumerateNonNewSpaceRegionsWithRecord(const Callback &cb) const;
353 
354     template<class Callback>
355     void EnumerateNewSpaceRegions(const Callback &cb) const;
356 
357     template<class Callback>
358     void EnumerateSnapshotSpaceRegions(const Callback &cb) const;
359 
360     template<class Callback>
361     void EnumerateNonMovableRegions(const Callback &cb) const;
362 
363     template<class Callback>
364     inline void EnumerateRegions(const Callback &cb) const;
365 
366     inline void ClearSlotsRange(Region *current, uintptr_t freeStart, uintptr_t freeEnd);
367 
368     void WaitAllTasksFinished();
369     void WaitConcurrentMarkingFinished();
370 
GetMemGrowingType()371     MemGrowingType GetMemGrowingType() const
372     {
373         return memGrowingtype_;
374     }
375 
SetMemGrowingType(MemGrowingType memGrowingType)376     void SetMemGrowingType(MemGrowingType memGrowingType)
377     {
378         memGrowingtype_ = memGrowingType;
379     }
380 
381     inline size_t GetCommittedSize() const;
382 
383     inline size_t GetHeapObjectSize() const;
384 
385     inline int32_t GetHeapObjectCount() const;
386 
GetPromotedSize()387     size_t GetPromotedSize() const
388     {
389         return promotedSize_;
390     }
391 
392     size_t GetArrayBufferSize() const;
393 
GetMaxMarkTaskCount()394     uint32_t GetMaxMarkTaskCount() const
395     {
396         return maxMarkTaskCount_;
397     }
398 
GetMaxEvacuateTaskCount()399     uint32_t GetMaxEvacuateTaskCount() const
400     {
401         return maxEvacuateTaskCount_;
402     }
403 
404     /*
405      * Heap tracking will be used by tools like heap profiler etc.
406      */
407 
StartHeapTracking(HeapTracker * tracker)408     void StartHeapTracking(HeapTracker *tracker)
409     {
410         WaitAllTasksFinished();
411         parallelGC_ = false;
412         tracker_ = tracker;
413     }
414 
StopHeapTracking()415     void StopHeapTracking()
416     {
417         WaitAllTasksFinished();
418         parallelGC_ = true;
419         tracker_ = nullptr;
420     }
421 
422     inline void OnAllocateEvent(TaggedObject* address, size_t size);
423     inline void OnMoveEvent(uintptr_t address, TaggedObject* forwardAddress, size_t size);
424     void AddToKeptObjects(JSHandle<JSTaggedValue> value) const;
425     void ClearKeptObjects() const;
426     /*
427      * Funtions used by heap verification.
428      */
429 
430     template<class Callback>
431     void IterateOverObjects(const Callback &cb) const;
432 
433     bool IsAlive(TaggedObject *object) const;
434     bool ContainObject(TaggedObject *object) const;
435 
436     size_t VerifyHeapObjects() const;
437     size_t VerifyOldToNewRSet() const;
438     void StatisticHeapObject(TriggerGCType gcType) const;
439     void PrintHeapInfo(TriggerGCType gcType) const;
440 
OldSpaceExceedCapacity(size_t size)441     bool OldSpaceExceedCapacity(size_t size) const
442     {
443         size_t totalSize = oldSpace_->GetCommittedSize() + hugeObjectSpace_->GetCommittedSize() + size;
444         return totalSize >= oldSpace_->GetMaximumCapacity() + oldSpace_->GetOutOfMemoryOvershootSize();
445     }
446 
OldSpaceExceedLimit()447     bool OldSpaceExceedLimit() const
448     {
449         size_t totalSize = oldSpace_->GetHeapObjectSize() + hugeObjectSpace_->GetHeapObjectSize();
450         return totalSize >= oldSpace_->GetInitialCapacity();
451     }
452 
453     void AdjustSpaceSizeForAppSpawn();
454 #if ECMASCRIPT_ENABLE_HEAP_VERIFY
IsVerifying()455     bool IsVerifying() const
456     {
457         return isVerifying_;
458     }
459 #endif
ShouldMoveToRoSpace(JSHClass * hclass,TaggedObject * object)460     static bool ShouldMoveToRoSpace(JSHClass *hclass, TaggedObject *object)
461     {
462         return hclass->IsString() && !Region::ObjectAddressToRange(object)->InHugeObjectSpace();
463     }
464 
IsFullMarkRequested()465     bool IsFullMarkRequested() const
466     {
467         return fullMarkRequested_;
468     }
469 
SetFullMarkRequestedState(bool fullMarkRequested)470     void SetFullMarkRequestedState(bool fullMarkRequested)
471     {
472         fullMarkRequested_ = fullMarkRequested;
473     }
474 
ShouldThrowOOMError(bool shouldThrow)475     void ShouldThrowOOMError(bool shouldThrow)
476     {
477         shouldThrowOOMError_ = shouldThrow;
478     }
479 
SetHeapMode(HeapMode mode)480     void SetHeapMode(HeapMode mode)
481     {
482         mode_ = mode;
483     }
484 
485     void ThrowOutOfMemoryError(size_t size, std::string functionName);
486 
487     void IncreaseNativeBindingSize(bool nonMovable, size_t size);
488     void IncreaseNativeBindingSize(JSNativePointer *object);
ResetNativeBindingSize()489     void ResetNativeBindingSize()
490     {
491         activeSemiSpace_->ResetNativeBindingSize();
492         nonNewSpaceNativeBindingSize_ = 0;
493     }
494 
GetNativeBindingSize()495     size_t GetNativeBindingSize() const
496     {
497         return activeSemiSpace_->GetNativeBindingSize() + nonNewSpaceNativeBindingSize_;
498     }
499 
GetNonNewSpaceNativeBindingSize()500     size_t GetNonNewSpaceNativeBindingSize() const
501     {
502         return nonNewSpaceNativeBindingSize_;
503     }
504 private:
505     static constexpr int64_t WAIT_FOR_APP_START_UP = 200;
506     static constexpr int IDLE_TIME_REMARK = 10;
507     static constexpr int IDLE_TIME_LIMIT = 15;  // if idle time over 15ms we can do something
508     static constexpr int MIN_OLD_GC_LIMIT = 10000;  // 10s
509     static constexpr int REST_HEAP_GROWTH_LIMIT = 2_MB;
510     void FatalOutOfMemoryError(size_t size, std::string functionName);
511     void RecomputeLimits();
512     void AdjustOldSpaceLimit();
513     // record lastRegion for each space, which will be used in ReclaimRegions()
514     void PrepareRecordRegionsForReclaim();
515     TriggerGCType SelectGCType() const;
516     void IncreaseTaskCount();
517     void ReduceTaskCount();
518     void WaitClearTaskFinished();
519     void InvokeWeakNodeSecondPassCallback();
520     inline void ReclaimRegions(TriggerGCType gcType);
521 
522     class ParallelGCTask : public Task {
523     public:
ParallelGCTask(int32_t id,Heap * heap,ParallelGCTaskPhase taskPhase)524         ParallelGCTask(int32_t id, Heap *heap, ParallelGCTaskPhase taskPhase)
525             : Task(id), heap_(heap), taskPhase_(taskPhase) {};
526         ~ParallelGCTask() override = default;
527         bool Run(uint32_t threadIndex) override;
528 
529         NO_COPY_SEMANTIC(ParallelGCTask);
530         NO_MOVE_SEMANTIC(ParallelGCTask);
531 
532     private:
533         Heap *heap_ {nullptr};
534         ParallelGCTaskPhase taskPhase_;
535     };
536 
537     class AsyncClearTask : public Task {
538     public:
AsyncClearTask(int32_t id,Heap * heap,TriggerGCType type)539         AsyncClearTask(int32_t id, Heap *heap, TriggerGCType type)
540             : Task(id), heap_(heap), gcType_(type) {}
541         ~AsyncClearTask() override = default;
542         bool Run(uint32_t threadIndex) override;
543 
544         NO_COPY_SEMANTIC(AsyncClearTask);
545         NO_MOVE_SEMANTIC(AsyncClearTask);
546     private:
547         Heap *heap_;
548         TriggerGCType gcType_;
549     };
550 
551     EcmaVM *ecmaVm_ {nullptr};
552     JSThread *thread_ {nullptr};
553 
554     /*
555      * Heap spaces.
556      */
557 
558     /*
559      * Young generation spaces where most new objects are allocated.
560      * (only one of the spaces is active at a time in semi space GC).
561      */
562     SemiSpace *activeSemiSpace_ {nullptr};
563     SemiSpace *inactiveSemiSpace_ {nullptr};
564 
565     // Old generation spaces where some long living objects are allocated or promoted.
566     OldSpace *oldSpace_ {nullptr};
567     OldSpace *compressSpace_ {nullptr};
568     ReadOnlySpace *readOnlySpace_ {nullptr};
569     AppSpawnSpace *appSpawnSpace_ {nullptr};
570     // Spaces used for special kinds of objects.
571     NonMovableSpace *nonMovableSpace_ {nullptr};
572     MachineCodeSpace *machineCodeSpace_ {nullptr};
573     HugeObjectSpace *hugeObjectSpace_ {nullptr};
574     SnapshotSpace *snapshotSpace_ {nullptr};
575 
576     /*
577      * Garbage collectors collecting garbage in different scopes.
578      */
579 
580     /*
581      * Semi sapce GC which collects garbage only in young spaces.
582      * This is however optional for now because the partial GC also covers its functionality.
583      */
584     STWYoungGC *stwYoungGC_ {nullptr};
585 
586     /*
587      * The mostly used partial GC which collects garbage in young spaces,
588      * and part of old spaces if needed determined by GC heuristics.
589      */
590     PartialGC *partialGC_ {nullptr};
591 
592     // Full collector which collects garbage in all valid heap spaces.
593     FullGC *fullGC_ {nullptr};
594 
595     // Concurrent marker which coordinates actions of GC markers and mutators.
596     ConcurrentMarker *concurrentMarker_ {nullptr};
597 
598     // Concurrent sweeper which coordinates actions of sweepers (in spaces excluding young semi spaces) and mutators.
599     ConcurrentSweeper *sweeper_ {nullptr};
600 
601     // Parallel evacuator which evacuates objects from one space to another one.
602     ParallelEvacuator *evacuator_ {nullptr};
603 
604     /*
605      * Different kinds of markers used by different collectors.
606      * Depending on the collector algorithm, some markers can do simple marking
607      *  while some others need to handle object movement.
608      */
609     Marker *nonMovableMarker_ {nullptr};
610     Marker *semiGCMarker_ {nullptr};
611     Marker *compressGCMarker_ {nullptr};
612 
613     // Work manager managing the tasks mostly generated in the GC mark phase.
614     WorkManager *workManager_ {nullptr};
615 
616     MarkType markType_ {MarkType::MARK_YOUNG};
617 
618     bool parallelGC_ {true};
619     bool fullGCRequested_ {false};
620     bool fullMarkRequested_ {false};
621     bool oldSpaceLimitAdjusted_ {false};
622     bool shouldThrowOOMError_ {false};
623     bool runningSecondPassCallbacks_ {false};
624     bool enableIdleGC_ {true};
625     bool waitForStartUp_ {true};
626     bool couldIdleGC_ {false};
627     HeapMode mode_ { HeapMode::NORMAL };
628 
629     size_t globalSpaceAllocLimit_ {0};
630     size_t promotedSize_ {0};
631     size_t semiSpaceCopiedSize_ {0};
632     size_t nonNewSpaceNativeBindingSize_{0};
633     size_t globalSpaceNativeLimit_ {0};
634     size_t idleHeapObjectSize_ {0};
635     size_t idleOldSpace_ {16_MB};
636     size_t triggerRestIdleSize_ {0};
637     MemGrowingType memGrowingtype_ {MemGrowingType::HIGH_THROUGHPUT};
638 
639     bool clearTaskFinished_ {true};
640     os::memory::Mutex waitClearTaskFinishedMutex_;
641     os::memory::ConditionVariable waitClearTaskFinishedCV_;
642     int64_t idleTime_ {0};
643     uint32_t runningTaskCount_ {0};
644     // parallel marker task number.
645     uint32_t maxMarkTaskCount_ {0};
646     // parallel evacuator task number.
647     uint32_t maxEvacuateTaskCount_ {0};
648     os::memory::Mutex waitTaskFinishedMutex_;
649     os::memory::ConditionVariable waitTaskFinishedCV_;
650 
651     /*
652      * The memory controller providing memory statistics (by allocations and coleections),
653      * which is used for GC heuristics.
654      */
655     MemController *memController_ {nullptr};
656 
657     // Region allocators.
658     NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
659     HeapRegionAllocator *heapRegionAllocator_ {nullptr};
660 
661     // The tracker tracking heap object allocation and movement events.
662     HeapTracker *tracker_ {nullptr};
663 
664     IdleData *idleData_;
665 
666 #if ECMASCRIPT_ENABLE_HEAP_VERIFY
667     bool isVerifying_ {false};
668 #endif
669 };
670 }  // namespace panda::ecmascript
671 
672 #endif  // ECMASCRIPT_MEM_HEAP_H
673