• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_HEAP_H
17 #define ECMASCRIPT_MEM_HEAP_H
18 
19 #include "ecmascript/base/config.h"
20 #include "ecmascript/frames.h"
21 #include "ecmascript/js_thread.h"
22 #include "ecmascript/mem/linear_space.h"
23 #include "ecmascript/mem/mark_stack.h"
24 #include "ecmascript/mem/sparse_space.h"
25 #include "ecmascript/mem/work_manager.h"
26 #include "ecmascript/taskpool/taskpool.h"
27 
28 namespace panda::ecmascript {
29 class ConcurrentMarker;
30 class ConcurrentSweeper;
31 class EcmaVM;
32 class FullGC;
33 class HeapRegionAllocator;
34 class HeapTracker;
35 #if !WIN_OR_MAC_OR_IOS_PLATFORM
36 class HeapProfilerInterface;
37 class HeapProfiler;
38 #endif
39 class IncrementalMarker;
40 class JSNativePointer;
41 class Marker;
42 class MemController;
43 class NativeAreaAllocator;
44 class ParallelEvacuator;
45 class PartialGC;
46 class STWYoungGC;
47 
48 using IdleNotifyStatusCallback = std::function<void(bool)>;
49 
50 enum class IdleTaskType : uint8_t {
51     NO_TASK,
52     YOUNG_GC,
53     FINISH_MARKING,
54     INCREMENTAL_MARK
55 };
56 
57 enum class MarkType : uint8_t {
58     MARK_YOUNG,
59     MARK_FULL
60 };
61 
62 enum class MemGrowingType : uint8_t {
63     HIGH_THROUGHPUT,
64     CONSERVATIVE,
65     PRESSURE
66 };
67 
68 enum class HeapMode {
69     NORMAL,
70     SPAWN,
71     SHARE,
72 };
73 
74 class Heap {
75 public:
76     explicit Heap(EcmaVM *ecmaVm);
77     ~Heap() = default;
78     NO_COPY_SEMANTIC(Heap);
79     NO_MOVE_SEMANTIC(Heap);
80     void Initialize();
81     void Destroy();
82     void Prepare();
83     void Resume(TriggerGCType gcType);
84     void ResumeForAppSpawn();
85     void CompactHeapBeforeFork();
86     void DisableParallelGC();
87     void EnableParallelGC();
88     // fixme: Rename NewSpace to YoungSpace.
89     // This is the active young generation space that the new objects are allocated in
90     // or copied into (from the other semi space) during semi space GC.
GetNewSpace()91     SemiSpace *GetNewSpace() const
92     {
93         return activeSemiSpace_;
94     }
95 
96     /*
97      * Return the original active space where the objects are to be evacuated during semi space GC.
98      * This should be invoked only in the evacuation phase of semi space GC.
99      * fixme: Get rid of this interface or make it safe considering the above implicit limitation / requirement.
100      */
GetFromSpaceDuringEvacuation()101     SemiSpace *GetFromSpaceDuringEvacuation() const
102     {
103         return inactiveSemiSpace_;
104     }
105 
GetOldSpace()106     OldSpace *GetOldSpace() const
107     {
108         return oldSpace_;
109     }
110 
GetNonMovableSpace()111     NonMovableSpace *GetNonMovableSpace() const
112     {
113         return nonMovableSpace_;
114     }
115 
GetHugeObjectSpace()116     HugeObjectSpace *GetHugeObjectSpace() const
117     {
118         return hugeObjectSpace_;
119     }
120 
GetMachineCodeSpace()121     MachineCodeSpace *GetMachineCodeSpace() const
122     {
123         return machineCodeSpace_;
124     }
125 
GetSnapshotSpace()126     SnapshotSpace *GetSnapshotSpace() const
127     {
128         return snapshotSpace_;
129     }
130 
GetReadOnlySpace()131     ReadOnlySpace *GetReadOnlySpace() const
132     {
133         return readOnlySpace_;
134     }
135 
GetAppSpawnSpace()136     AppSpawnSpace *GetAppSpawnSpace() const
137     {
138         return appSpawnSpace_;
139     }
140 
GetSpaceWithType(MemSpaceType type)141     SparseSpace *GetSpaceWithType(MemSpaceType type) const
142     {
143         switch (type) {
144             case MemSpaceType::OLD_SPACE:
145                 return oldSpace_;
146             case MemSpaceType::NON_MOVABLE:
147                 return nonMovableSpace_;
148             case MemSpaceType::MACHINE_CODE_SPACE:
149                 return machineCodeSpace_;
150             default:
151                 LOG_ECMA(FATAL) << "this branch is unreachable";
152                 UNREACHABLE();
153                 break;
154         }
155     }
156 
GetSTWYoungGC()157     STWYoungGC *GetSTWYoungGC() const
158     {
159         return stwYoungGC_;
160     }
161 
GetPartialGC()162     PartialGC *GetPartialGC() const
163     {
164         return partialGC_;
165     }
166 
GetFullGC()167     FullGC *GetFullGC() const
168     {
169         return fullGC_;
170     }
171 
GetSweeper()172     ConcurrentSweeper *GetSweeper() const
173     {
174         return sweeper_;
175     }
176 
GetEvacuator()177     ParallelEvacuator *GetEvacuator() const
178     {
179         return evacuator_;
180     }
181 
GetConcurrentMarker()182     ConcurrentMarker *GetConcurrentMarker() const
183     {
184         return concurrentMarker_;
185     }
186 
GetIncrementalMarker()187     IncrementalMarker *GetIncrementalMarker() const
188     {
189         return incrementalMarker_;
190     }
191 
GetNonMovableMarker()192     Marker *GetNonMovableMarker() const
193     {
194         return nonMovableMarker_;
195     }
196 
GetSemiGCMarker()197     Marker *GetSemiGCMarker() const
198     {
199         return semiGCMarker_;
200     }
201 
GetCompressGCMarker()202     Marker *GetCompressGCMarker() const
203     {
204         return compressGCMarker_;
205     }
206 
GetEcmaVM()207     EcmaVM *GetEcmaVM() const
208     {
209         return ecmaVm_;
210     }
211 
GetJSThread()212     JSThread *GetJSThread() const
213     {
214         return thread_;
215     }
216 
GetWorkManager()217     WorkManager *GetWorkManager() const
218     {
219         return workManager_;
220     }
221 
GetMemController()222     MemController *GetMemController() const
223     {
224         return memController_;
225     }
226 
227     /*
228      * For object allocations.
229      */
230 
231     // Young
232     inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass);
233     inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass, size_t size);
234     inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass);
235     inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass, size_t size);
236     inline TaggedObject *AllocateYoungOrHugeObject(size_t size);
237     inline uintptr_t AllocateYoungSync(size_t size);
238     inline TaggedObject *TryAllocateYoungGeneration(JSHClass *hclass, size_t size);
239     // Old
240     inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass);
241     inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass, size_t size);
242     // Non-movable
243     inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass);
244     inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass, size_t size);
245     inline TaggedObject *AllocateClassClass(JSHClass *hclass, size_t size);
246     // Huge
247     inline TaggedObject *AllocateHugeObject(JSHClass *hclass, size_t size);
248     inline TaggedObject *AllocateHugeObject(size_t size);
249     // Machine code
250     inline TaggedObject *AllocateMachineCodeObject(JSHClass *hclass, size_t size);
251     // Snapshot
252     inline uintptr_t AllocateSnapshotSpace(size_t size);
253 
GetNativeAreaAllocator()254     NativeAreaAllocator *GetNativeAreaAllocator() const
255     {
256         return nativeAreaAllocator_;
257     }
258 
GetHeapRegionAllocator()259     HeapRegionAllocator *GetHeapRegionAllocator() const
260     {
261         return heapRegionAllocator_;
262     }
263 
264     /*
265      * GC triggers.
266      */
267 
268     void CollectGarbage(TriggerGCType gcType, GCReason reason = GCReason::OTHER);
269 
270     void CheckAndTriggerOldGC(size_t size = 0);
271     TriggerGCType SelectGCType() const;
272     /*
273      * Parallel GC related configurations and utilities.
274      */
275 
276     void PostParallelGCTask(ParallelGCTaskPhase taskPhase);
277 
IsParallelGCEnabled()278     bool IsParallelGCEnabled() const
279     {
280         return parallelGC_;
281     }
282     void ChangeGCParams(bool inBackground);
283     void TriggerIdleCollection(int idleMicroSec);
284     void NotifyMemoryPressure(bool inHighMemoryPressure);
285     bool CheckCanDistributeTask();
286 
287     void WaitRunningTaskFinished();
288 
289     void TryTriggerConcurrentMarking();
290     void AdjustBySurvivalRate(size_t originalNewSpaceSize);
291     void TriggerConcurrentMarking();
292 
293     void TryTriggerIdleCollection();
294     void TryTriggerIncrementalMarking();
295     void CalculateIdleDuration();
296 
297     /*
298      * Wait for existing concurrent marking tasks to be finished (if any).
299      * Return true if there's ongoing concurrent marking.
300      */
301     bool CheckOngoingConcurrentMarking();
302 
303     /*
304      * Functions invoked during GC.
305      */
306 
SetMarkType(MarkType markType)307     void SetMarkType(MarkType markType)
308     {
309         markType_ = markType;
310     }
311 
IsFullMark()312     bool IsFullMark() const
313     {
314         return markType_ == MarkType::MARK_FULL;
315     }
316 
317     inline void SwapNewSpace();
318     inline void SwapOldSpace();
319 
320     inline bool MoveYoungRegionSync(Region *region);
321     inline void MergeToOldSpaceSync(LocalSpace *localSpace);
322 
323     template<class Callback>
324     void EnumerateOldSpaceRegions(const Callback &cb, Region *region = nullptr) const;
325 
326     template<class Callback>
327     void EnumerateNonNewSpaceRegions(const Callback &cb) const;
328 
329     template<class Callback>
330     void EnumerateNonNewSpaceRegionsWithRecord(const Callback &cb) const;
331 
332     template<class Callback>
333     void EnumerateNewSpaceRegions(const Callback &cb) const;
334 
335     template<class Callback>
336     void EnumerateSnapshotSpaceRegions(const Callback &cb) const;
337 
338     template<class Callback>
339     void EnumerateNonMovableRegions(const Callback &cb) const;
340 
341     template<class Callback>
342     inline void EnumerateRegions(const Callback &cb) const;
343 
344     inline void ClearSlotsRange(Region *current, uintptr_t freeStart, uintptr_t freeEnd);
345 
346     void WaitAllTasksFinished();
347     void WaitConcurrentMarkingFinished();
348 
GetMemGrowingType()349     MemGrowingType GetMemGrowingType() const
350     {
351         return memGrowingtype_;
352     }
353 
SetMemGrowingType(MemGrowingType memGrowingType)354     void SetMemGrowingType(MemGrowingType memGrowingType)
355     {
356         memGrowingtype_ = memGrowingType;
357     }
358 
359     inline size_t GetCommittedSize() const;
360 
361     inline size_t GetHeapObjectSize() const;
362     size_t GetLiveObjectSize() const;
363 
364     inline uint32_t GetHeapObjectCount() const;
365 
GetPromotedSize()366     size_t GetPromotedSize() const
367     {
368         return promotedSize_;
369     }
370 
371     size_t GetArrayBufferSize() const;
372 
GetMaxMarkTaskCount()373     uint32_t GetMaxMarkTaskCount() const
374     {
375         return maxMarkTaskCount_;
376     }
377 
GetMaxEvacuateTaskCount()378     uint32_t GetMaxEvacuateTaskCount() const
379     {
380         return maxEvacuateTaskCount_;
381     }
382 
383     /*
384      * Receive callback function to control idletime.
385      */
386     inline void InitializeIdleStatusControl(std::function<void(bool)> callback);
387 
DisableNotifyIdle()388     void DisableNotifyIdle()
389     {
390         if (notifyIdleStatusCallback != nullptr) {
391             notifyIdleStatusCallback(true);
392         }
393     }
394 
EnableNotifyIdle()395     void EnableNotifyIdle()
396     {
397         if (enableIdleGC_ && notifyIdleStatusCallback != nullptr) {
398             notifyIdleStatusCallback(false);
399         }
400     }
401 
SetIdleTask(IdleTaskType task)402     void SetIdleTask(IdleTaskType task)
403     {
404         idleTask_ = task;
405     }
406 
407     void ClearIdleTask();
408 
409 #if defined(ECMASCRIPT_SUPPORT_HEAPPROFILER)
StartHeapTracking()410     void StartHeapTracking()
411     {
412         WaitAllTasksFinished();
413     }
414 
StopHeapTracking()415     void StopHeapTracking()
416     {
417         WaitAllTasksFinished();
418     }
419 #endif
420     void OnAllocateEvent(TaggedObject* address, size_t size);
421     void OnMoveEvent(uintptr_t address, TaggedObject* forwardAddress, size_t size);
422     void AddToKeptObjects(JSHandle<JSTaggedValue> value) const;
423     void ClearKeptObjects() const;
424 
425     // add allocationInspector to each space
426     void AddAllocationInspectorToAllSpaces(AllocationInspector *inspector);
427 
428     // clear allocationInspector from each space
429     void ClearAllocationInspectorFromAllSpaces();
430 
431     /*
432      * Funtions used by heap verification.
433      */
434 
435     template<class Callback>
436     void IterateOverObjects(const Callback &cb) const;
437 
438     bool IsAlive(TaggedObject *object) const;
439     bool ContainObject(TaggedObject *object) const;
440 
441     size_t VerifyHeapObjects() const;
442     size_t VerifyOldToNewRSet() const;
443     void StatisticHeapObject(TriggerGCType gcType) const;
444     void PrintHeapInfo(TriggerGCType gcType) const;
445 
OldSpaceExceedCapacity(size_t size)446     bool OldSpaceExceedCapacity(size_t size) const
447     {
448         size_t totalSize = oldSpace_->GetCommittedSize() + hugeObjectSpace_->GetCommittedSize() + size;
449         return totalSize >= oldSpace_->GetMaximumCapacity() + oldSpace_->GetOutOfMemoryOvershootSize();
450     }
451 
OldSpaceExceedLimit()452     bool OldSpaceExceedLimit() const
453     {
454         size_t totalSize = oldSpace_->GetHeapObjectSize() + hugeObjectSpace_->GetHeapObjectSize();
455         return totalSize >= oldSpace_->GetInitialCapacity();
456     }
457 
458     void AdjustSpaceSizeForAppSpawn();
459 #if ECMASCRIPT_ENABLE_HEAP_VERIFY
IsVerifying()460     bool IsVerifying() const
461     {
462         return isVerifying_;
463     }
464 #endif
ShouldMoveToRoSpace(JSHClass * hclass,TaggedObject * object)465     static bool ShouldMoveToRoSpace(JSHClass *hclass, TaggedObject *object)
466     {
467         return hclass->IsString() && !Region::ObjectAddressToRange(object)->InHugeObjectSpace();
468     }
469 
IsFullMarkRequested()470     bool IsFullMarkRequested() const
471     {
472         return fullMarkRequested_;
473     }
474 
SetFullMarkRequestedState(bool fullMarkRequested)475     void SetFullMarkRequestedState(bool fullMarkRequested)
476     {
477         fullMarkRequested_ = fullMarkRequested;
478     }
479 
ShouldThrowOOMError(bool shouldThrow)480     void ShouldThrowOOMError(bool shouldThrow)
481     {
482         shouldThrowOOMError_ = shouldThrow;
483     }
484 
SetHeapMode(HeapMode mode)485     void SetHeapMode(HeapMode mode)
486     {
487         mode_ = mode;
488     }
489 
490     void ThrowOutOfMemoryError(size_t size, std::string functionName);
491 
492     void IncreaseNativeBindingSize(bool nonMovable, size_t size);
493     void IncreaseNativeBindingSize(JSNativePointer *object);
ResetNativeBindingSize()494     void ResetNativeBindingSize()
495     {
496         activeSemiSpace_->ResetNativeBindingSize();
497         nonNewSpaceNativeBindingSize_ = 0;
498     }
499 
GetNativeBindingSize()500     size_t GetNativeBindingSize() const
501     {
502         return activeSemiSpace_->GetNativeBindingSize() + nonNewSpaceNativeBindingSize_;
503     }
504 
GetGlobalNativeSize()505     size_t GetGlobalNativeSize() const
506     {
507         return GetNativeBindingSize() + nativeAreaAllocator_->GetNativeMemoryUsage();
508     }
509 
GlobalNativeSizeLargerThanLimit()510     bool GlobalNativeSizeLargerThanLimit() const
511     {
512         return GetGlobalNativeSize() >= globalSpaceNativeLimit_;
513     }
514 
GetNonNewSpaceNativeBindingSize()515     size_t GetNonNewSpaceNativeBindingSize() const
516     {
517         return nonNewSpaceNativeBindingSize_;
518     }
519 
520     void TryTriggerFullMarkByNativeSize();
521 
NotifyHeapAliveSizeAfterGC(size_t size)522     void NotifyHeapAliveSizeAfterGC(size_t size)
523     {
524         heapAliveSizeAfterGC_ = size;
525     }
526 
GetHeapAliveSizeAfterGC()527     size_t GetHeapAliveSizeAfterGC() const
528     {
529         return heapAliveSizeAfterGC_;
530     }
531 
IsInBackground()532     bool IsInBackground() const
533     {
534         return inBackground_;
535     }
536 private:
537     static constexpr int IDLE_TIME_LIMIT = 10;  // if idle time over 10ms we can do something
538     static constexpr int ALLOCATE_SIZE_LIMIT = 100_KB;
539     static constexpr int IDLE_MAINTAIN_TIME = 500;
540     static constexpr int BACKGROUND_GROW_LIMIT = 2_MB;
541     void FatalOutOfMemoryError(size_t size, std::string functionName);
542     void RecomputeLimits();
543     void AdjustOldSpaceLimit();
544     // record lastRegion for each space, which will be used in ReclaimRegions()
545     void PrepareRecordRegionsForReclaim();
546     void IncreaseTaskCount();
547     void ReduceTaskCount();
548     void WaitClearTaskFinished();
549     void InvokeWeakNodeNativeFinalizeCallback();
550     inline void ReclaimRegions(TriggerGCType gcType);
551 
552     class ParallelGCTask : public Task {
553     public:
ParallelGCTask(int32_t id,Heap * heap,ParallelGCTaskPhase taskPhase)554         ParallelGCTask(int32_t id, Heap *heap, ParallelGCTaskPhase taskPhase)
555             : Task(id), heap_(heap), taskPhase_(taskPhase) {};
556         ~ParallelGCTask() override = default;
557         bool Run(uint32_t threadIndex) override;
558 
559         NO_COPY_SEMANTIC(ParallelGCTask);
560         NO_MOVE_SEMANTIC(ParallelGCTask);
561 
562     private:
563         Heap *heap_ {nullptr};
564         ParallelGCTaskPhase taskPhase_;
565     };
566 
567     class AsyncClearTask : public Task {
568     public:
AsyncClearTask(int32_t id,Heap * heap,TriggerGCType type)569         AsyncClearTask(int32_t id, Heap *heap, TriggerGCType type)
570             : Task(id), heap_(heap), gcType_(type) {}
571         ~AsyncClearTask() override = default;
572         bool Run(uint32_t threadIndex) override;
573 
574         NO_COPY_SEMANTIC(AsyncClearTask);
575         NO_MOVE_SEMANTIC(AsyncClearTask);
576     private:
577         Heap *heap_;
578         TriggerGCType gcType_;
579     };
580 
581     EcmaVM *ecmaVm_ {nullptr};
582     JSThread *thread_ {nullptr};
583 
584     /*
585      * Heap spaces.
586      */
587 
588     /*
589      * Young generation spaces where most new objects are allocated.
590      * (only one of the spaces is active at a time in semi space GC).
591      */
592     SemiSpace *activeSemiSpace_ {nullptr};
593     SemiSpace *inactiveSemiSpace_ {nullptr};
594 
595     // Old generation spaces where some long living objects are allocated or promoted.
596     OldSpace *oldSpace_ {nullptr};
597     OldSpace *compressSpace_ {nullptr};
598     ReadOnlySpace *readOnlySpace_ {nullptr};
599     AppSpawnSpace *appSpawnSpace_ {nullptr};
600     // Spaces used for special kinds of objects.
601     NonMovableSpace *nonMovableSpace_ {nullptr};
602     MachineCodeSpace *machineCodeSpace_ {nullptr};
603     HugeObjectSpace *hugeObjectSpace_ {nullptr};
604     SnapshotSpace *snapshotSpace_ {nullptr};
605 
606     /*
607      * Garbage collectors collecting garbage in different scopes.
608      */
609 
610     /*
611      * Semi sapce GC which collects garbage only in young spaces.
612      * This is however optional for now because the partial GC also covers its functionality.
613      */
614     STWYoungGC *stwYoungGC_ {nullptr};
615 
616     /*
617      * The mostly used partial GC which collects garbage in young spaces,
618      * and part of old spaces if needed determined by GC heuristics.
619      */
620     PartialGC *partialGC_ {nullptr};
621 
622     // Full collector which collects garbage in all valid heap spaces.
623     FullGC *fullGC_ {nullptr};
624 
625     // Concurrent marker which coordinates actions of GC markers and mutators.
626     ConcurrentMarker *concurrentMarker_ {nullptr};
627 
628     // Concurrent sweeper which coordinates actions of sweepers (in spaces excluding young semi spaces) and mutators.
629     ConcurrentSweeper *sweeper_ {nullptr};
630 
631     // Parallel evacuator which evacuates objects from one space to another one.
632     ParallelEvacuator *evacuator_ {nullptr};
633 
634     // Incremental marker which coordinates actions of GC markers in idle time.
635     IncrementalMarker *incrementalMarker_ {nullptr};
636 
637     /*
638      * Different kinds of markers used by different collectors.
639      * Depending on the collector algorithm, some markers can do simple marking
640      *  while some others need to handle object movement.
641      */
642     Marker *nonMovableMarker_ {nullptr};
643     Marker *semiGCMarker_ {nullptr};
644     Marker *compressGCMarker_ {nullptr};
645 
646     // Work manager managing the tasks mostly generated in the GC mark phase.
647     WorkManager *workManager_ {nullptr};
648 
649     MarkType markType_ {MarkType::MARK_YOUNG};
650 
651     bool parallelGC_ {true};
652     bool fullGCRequested_ {false};
653     bool fullMarkRequested_ {false};
654     bool oldSpaceLimitAdjusted_ {false};
655     bool shouldThrowOOMError_ {false};
656     bool runningNativeFinalizeCallbacks_ {false};
657     bool enableIdleGC_ {false};
658     HeapMode mode_ { HeapMode::NORMAL };
659 
660     size_t globalSpaceAllocLimit_ {0};
661     size_t promotedSize_ {0};
662     size_t semiSpaceCopiedSize_ {0};
663     size_t nonNewSpaceNativeBindingSize_{0};
664     size_t globalSpaceNativeLimit_ {0};
665     MemGrowingType memGrowingtype_ {MemGrowingType::HIGH_THROUGHPUT};
666 
667     bool clearTaskFinished_ {true};
668     os::memory::Mutex waitClearTaskFinishedMutex_;
669     os::memory::ConditionVariable waitClearTaskFinishedCV_;
670     uint32_t runningTaskCount_ {0};
671     // parallel marker task number.
672     uint32_t maxMarkTaskCount_ {0};
673     // parallel evacuator task number.
674     uint32_t maxEvacuateTaskCount_ {0};
675     os::memory::Mutex waitTaskFinishedMutex_;
676     os::memory::ConditionVariable waitTaskFinishedCV_;
677 
678     /*
679      * The memory controller providing memory statistics (by allocations and coleections),
680      * which is used for GC heuristics.
681      */
682     MemController *memController_ {nullptr};
683 
684     // Region allocators.
685     NativeAreaAllocator *nativeAreaAllocator_ {nullptr};
686     HeapRegionAllocator *heapRegionAllocator_ {nullptr};
687 
688     // Application status
689     bool inBackground_ {false};
690 
691     IdleNotifyStatusCallback notifyIdleStatusCallback {nullptr};
692 
693     IdleTaskType idleTask_ {IdleTaskType::NO_TASK};
694     float idlePredictDuration_ {0.0f};
695     size_t heapAliveSizeAfterGC_ {0};
696     double idleTaskFinishTime_ {0.0};
697 #if ECMASCRIPT_ENABLE_HEAP_VERIFY
698     bool isVerifying_ {false};
699 #endif
700 };
701 }  // namespace panda::ecmascript
702 
703 #endif  // ECMASCRIPT_MEM_HEAP_H
704