1 /* 2 * Copyright (c) 2021 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef ECMASCRIPT_MEM_HEAP_H 17 #define ECMASCRIPT_MEM_HEAP_H 18 19 #include "ecmascript/base/config.h" 20 #include "ecmascript/frames.h" 21 #include "ecmascript/js_thread.h" 22 #include "ecmascript/mem/linear_space.h" 23 #include "ecmascript/mem/mark_stack.h" 24 #include "ecmascript/mem/sparse_space.h" 25 #include "ecmascript/mem/work_manager.h" 26 #include "ecmascript/taskpool/taskpool.h" 27 28 namespace panda::ecmascript { 29 class ConcurrentMarker; 30 class ConcurrentSweeper; 31 class EcmaVM; 32 class FullGC; 33 class HeapRegionAllocator; 34 class HeapTracker; 35 #if !WIN_OR_MAC_OR_IOS_PLATFORM 36 class HeapProfilerInterface; 37 class HeapProfiler; 38 #endif 39 class IncrementalMarker; 40 class JSNativePointer; 41 class Marker; 42 class MemController; 43 class NativeAreaAllocator; 44 class ParallelEvacuator; 45 class PartialGC; 46 class STWYoungGC; 47 48 using IdleNotifyStatusCallback = std::function<void(bool)>; 49 50 enum class IdleTaskType : uint8_t { 51 NO_TASK, 52 YOUNG_GC, 53 FINISH_MARKING, 54 INCREMENTAL_MARK 55 }; 56 57 enum class MarkType : uint8_t { 58 MARK_YOUNG, 59 MARK_FULL 60 }; 61 62 enum class MemGrowingType : uint8_t { 63 HIGH_THROUGHPUT, 64 CONSERVATIVE, 65 PRESSURE 66 }; 67 68 enum class HeapMode { 69 NORMAL, 70 SPAWN, 71 SHARE, 72 }; 73 74 class Heap { 75 public: 76 explicit Heap(EcmaVM *ecmaVm); 77 ~Heap() = default; 78 NO_COPY_SEMANTIC(Heap); 79 NO_MOVE_SEMANTIC(Heap); 80 void Initialize(); 81 void Destroy(); 82 void Prepare(); 83 void Resume(TriggerGCType gcType); 84 void ResumeForAppSpawn(); 85 void CompactHeapBeforeFork(); 86 void DisableParallelGC(); 87 void EnableParallelGC(); 88 // fixme: Rename NewSpace to YoungSpace. 89 // This is the active young generation space that the new objects are allocated in 90 // or copied into (from the other semi space) during semi space GC. GetNewSpace()91 SemiSpace *GetNewSpace() const 92 { 93 return activeSemiSpace_; 94 } 95 96 /* 97 * Return the original active space where the objects are to be evacuated during semi space GC. 98 * This should be invoked only in the evacuation phase of semi space GC. 99 * fixme: Get rid of this interface or make it safe considering the above implicit limitation / requirement. 100 */ GetFromSpaceDuringEvacuation()101 SemiSpace *GetFromSpaceDuringEvacuation() const 102 { 103 return inactiveSemiSpace_; 104 } 105 GetOldSpace()106 OldSpace *GetOldSpace() const 107 { 108 return oldSpace_; 109 } 110 GetNonMovableSpace()111 NonMovableSpace *GetNonMovableSpace() const 112 { 113 return nonMovableSpace_; 114 } 115 GetHugeObjectSpace()116 HugeObjectSpace *GetHugeObjectSpace() const 117 { 118 return hugeObjectSpace_; 119 } 120 GetMachineCodeSpace()121 MachineCodeSpace *GetMachineCodeSpace() const 122 { 123 return machineCodeSpace_; 124 } 125 GetSnapshotSpace()126 SnapshotSpace *GetSnapshotSpace() const 127 { 128 return snapshotSpace_; 129 } 130 GetReadOnlySpace()131 ReadOnlySpace *GetReadOnlySpace() const 132 { 133 return readOnlySpace_; 134 } 135 GetAppSpawnSpace()136 AppSpawnSpace *GetAppSpawnSpace() const 137 { 138 return appSpawnSpace_; 139 } 140 GetSpaceWithType(MemSpaceType type)141 SparseSpace *GetSpaceWithType(MemSpaceType type) const 142 { 143 switch (type) { 144 case MemSpaceType::OLD_SPACE: 145 return oldSpace_; 146 case MemSpaceType::NON_MOVABLE: 147 return nonMovableSpace_; 148 case MemSpaceType::MACHINE_CODE_SPACE: 149 return machineCodeSpace_; 150 default: 151 LOG_ECMA(FATAL) << "this branch is unreachable"; 152 UNREACHABLE(); 153 break; 154 } 155 } 156 GetSTWYoungGC()157 STWYoungGC *GetSTWYoungGC() const 158 { 159 return stwYoungGC_; 160 } 161 GetPartialGC()162 PartialGC *GetPartialGC() const 163 { 164 return partialGC_; 165 } 166 GetFullGC()167 FullGC *GetFullGC() const 168 { 169 return fullGC_; 170 } 171 GetSweeper()172 ConcurrentSweeper *GetSweeper() const 173 { 174 return sweeper_; 175 } 176 GetEvacuator()177 ParallelEvacuator *GetEvacuator() const 178 { 179 return evacuator_; 180 } 181 GetConcurrentMarker()182 ConcurrentMarker *GetConcurrentMarker() const 183 { 184 return concurrentMarker_; 185 } 186 GetIncrementalMarker()187 IncrementalMarker *GetIncrementalMarker() const 188 { 189 return incrementalMarker_; 190 } 191 GetNonMovableMarker()192 Marker *GetNonMovableMarker() const 193 { 194 return nonMovableMarker_; 195 } 196 GetSemiGCMarker()197 Marker *GetSemiGCMarker() const 198 { 199 return semiGCMarker_; 200 } 201 GetCompressGCMarker()202 Marker *GetCompressGCMarker() const 203 { 204 return compressGCMarker_; 205 } 206 GetEcmaVM()207 EcmaVM *GetEcmaVM() const 208 { 209 return ecmaVm_; 210 } 211 GetJSThread()212 JSThread *GetJSThread() const 213 { 214 return thread_; 215 } 216 GetWorkManager()217 WorkManager *GetWorkManager() const 218 { 219 return workManager_; 220 } 221 GetMemController()222 MemController *GetMemController() const 223 { 224 return memController_; 225 } 226 227 /* 228 * For object allocations. 229 */ 230 231 // Young 232 inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass); 233 inline TaggedObject *AllocateYoungOrHugeObject(JSHClass *hclass, size_t size); 234 inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass); 235 inline TaggedObject *AllocateReadOnlyOrHugeObject(JSHClass *hclass, size_t size); 236 inline TaggedObject *AllocateYoungOrHugeObject(size_t size); 237 inline uintptr_t AllocateYoungSync(size_t size); 238 inline TaggedObject *TryAllocateYoungGeneration(JSHClass *hclass, size_t size); 239 // Old 240 inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass); 241 inline TaggedObject *AllocateOldOrHugeObject(JSHClass *hclass, size_t size); 242 // Non-movable 243 inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass); 244 inline TaggedObject *AllocateNonMovableOrHugeObject(JSHClass *hclass, size_t size); 245 inline TaggedObject *AllocateClassClass(JSHClass *hclass, size_t size); 246 // Huge 247 inline TaggedObject *AllocateHugeObject(JSHClass *hclass, size_t size); 248 inline TaggedObject *AllocateHugeObject(size_t size); 249 // Machine code 250 inline TaggedObject *AllocateMachineCodeObject(JSHClass *hclass, size_t size); 251 // Snapshot 252 inline uintptr_t AllocateSnapshotSpace(size_t size); 253 GetNativeAreaAllocator()254 NativeAreaAllocator *GetNativeAreaAllocator() const 255 { 256 return nativeAreaAllocator_; 257 } 258 GetHeapRegionAllocator()259 HeapRegionAllocator *GetHeapRegionAllocator() const 260 { 261 return heapRegionAllocator_; 262 } 263 264 /* 265 * GC triggers. 266 */ 267 268 void CollectGarbage(TriggerGCType gcType, GCReason reason = GCReason::OTHER); 269 270 void CheckAndTriggerOldGC(size_t size = 0); 271 TriggerGCType SelectGCType() const; 272 /* 273 * Parallel GC related configurations and utilities. 274 */ 275 276 void PostParallelGCTask(ParallelGCTaskPhase taskPhase); 277 IsParallelGCEnabled()278 bool IsParallelGCEnabled() const 279 { 280 return parallelGC_; 281 } 282 void ChangeGCParams(bool inBackground); 283 void TriggerIdleCollection(int idleMicroSec); 284 void NotifyMemoryPressure(bool inHighMemoryPressure); 285 bool CheckCanDistributeTask(); 286 287 void WaitRunningTaskFinished(); 288 289 void TryTriggerConcurrentMarking(); 290 void AdjustBySurvivalRate(size_t originalNewSpaceSize); 291 void TriggerConcurrentMarking(); 292 293 void TryTriggerIdleCollection(); 294 void TryTriggerIncrementalMarking(); 295 void CalculateIdleDuration(); 296 297 /* 298 * Wait for existing concurrent marking tasks to be finished (if any). 299 * Return true if there's ongoing concurrent marking. 300 */ 301 bool CheckOngoingConcurrentMarking(); 302 303 /* 304 * Functions invoked during GC. 305 */ 306 SetMarkType(MarkType markType)307 void SetMarkType(MarkType markType) 308 { 309 markType_ = markType; 310 } 311 IsFullMark()312 bool IsFullMark() const 313 { 314 return markType_ == MarkType::MARK_FULL; 315 } 316 317 inline void SwapNewSpace(); 318 inline void SwapOldSpace(); 319 320 inline bool MoveYoungRegionSync(Region *region); 321 inline void MergeToOldSpaceSync(LocalSpace *localSpace); 322 323 template<class Callback> 324 void EnumerateOldSpaceRegions(const Callback &cb, Region *region = nullptr) const; 325 326 template<class Callback> 327 void EnumerateNonNewSpaceRegions(const Callback &cb) const; 328 329 template<class Callback> 330 void EnumerateNonNewSpaceRegionsWithRecord(const Callback &cb) const; 331 332 template<class Callback> 333 void EnumerateNewSpaceRegions(const Callback &cb) const; 334 335 template<class Callback> 336 void EnumerateSnapshotSpaceRegions(const Callback &cb) const; 337 338 template<class Callback> 339 void EnumerateNonMovableRegions(const Callback &cb) const; 340 341 template<class Callback> 342 inline void EnumerateRegions(const Callback &cb) const; 343 344 inline void ClearSlotsRange(Region *current, uintptr_t freeStart, uintptr_t freeEnd); 345 346 void WaitAllTasksFinished(); 347 void WaitConcurrentMarkingFinished(); 348 GetMemGrowingType()349 MemGrowingType GetMemGrowingType() const 350 { 351 return memGrowingtype_; 352 } 353 SetMemGrowingType(MemGrowingType memGrowingType)354 void SetMemGrowingType(MemGrowingType memGrowingType) 355 { 356 memGrowingtype_ = memGrowingType; 357 } 358 359 inline size_t GetCommittedSize() const; 360 361 inline size_t GetHeapObjectSize() const; 362 363 inline uint32_t GetHeapObjectCount() const; 364 GetPromotedSize()365 size_t GetPromotedSize() const 366 { 367 return promotedSize_; 368 } 369 370 size_t GetArrayBufferSize() const; 371 GetMaxMarkTaskCount()372 uint32_t GetMaxMarkTaskCount() const 373 { 374 return maxMarkTaskCount_; 375 } 376 GetMaxEvacuateTaskCount()377 uint32_t GetMaxEvacuateTaskCount() const 378 { 379 return maxEvacuateTaskCount_; 380 } 381 382 /* 383 * Receive callback function to control idletime. 384 */ 385 inline void InitializeIdleStatusControl(std::function<void(bool)> callback); 386 DisableNotifyIdle()387 void DisableNotifyIdle() 388 { 389 if (notifyIdleStatusCallback != nullptr) { 390 notifyIdleStatusCallback(true); 391 } 392 } 393 EnableNotifyIdle()394 void EnableNotifyIdle() 395 { 396 if (enableIdleGC_ && notifyIdleStatusCallback != nullptr) { 397 notifyIdleStatusCallback(false); 398 } 399 } 400 SetIdleTask(IdleTaskType task)401 void SetIdleTask(IdleTaskType task) 402 { 403 idleTask_ = task; 404 } 405 406 void ClearIdleTask(); 407 408 #if defined(ECMASCRIPT_SUPPORT_HEAPPROFILER) StartHeapTracking()409 void StartHeapTracking() 410 { 411 WaitAllTasksFinished(); 412 } 413 StopHeapTracking()414 void StopHeapTracking() 415 { 416 WaitAllTasksFinished(); 417 } 418 #endif 419 void OnAllocateEvent(TaggedObject* address, size_t size); 420 void OnMoveEvent(uintptr_t address, TaggedObject* forwardAddress, size_t size); 421 void AddToKeptObjects(JSHandle<JSTaggedValue> value) const; 422 void ClearKeptObjects() const; 423 424 // add allocationInspector to each space 425 void AddAllocationInspectorToAllSpaces(AllocationInspector *inspector); 426 427 // clear allocationInspector from each space 428 void ClearAllocationInspectorFromAllSpaces(); 429 430 /* 431 * Funtions used by heap verification. 432 */ 433 434 template<class Callback> 435 void IterateOverObjects(const Callback &cb) const; 436 437 bool IsAlive(TaggedObject *object) const; 438 bool ContainObject(TaggedObject *object) const; 439 440 size_t VerifyHeapObjects() const; 441 size_t VerifyOldToNewRSet() const; 442 void StatisticHeapObject(TriggerGCType gcType) const; 443 void PrintHeapInfo(TriggerGCType gcType) const; 444 OldSpaceExceedCapacity(size_t size)445 bool OldSpaceExceedCapacity(size_t size) const 446 { 447 size_t totalSize = oldSpace_->GetCommittedSize() + hugeObjectSpace_->GetCommittedSize() + size; 448 return totalSize >= oldSpace_->GetMaximumCapacity() + oldSpace_->GetOutOfMemoryOvershootSize(); 449 } 450 OldSpaceExceedLimit()451 bool OldSpaceExceedLimit() const 452 { 453 size_t totalSize = oldSpace_->GetHeapObjectSize() + hugeObjectSpace_->GetHeapObjectSize(); 454 return totalSize >= oldSpace_->GetInitialCapacity(); 455 } 456 457 void AdjustSpaceSizeForAppSpawn(); 458 #if ECMASCRIPT_ENABLE_HEAP_VERIFY IsVerifying()459 bool IsVerifying() const 460 { 461 return isVerifying_; 462 } 463 #endif ShouldMoveToRoSpace(JSHClass * hclass,TaggedObject * object)464 static bool ShouldMoveToRoSpace(JSHClass *hclass, TaggedObject *object) 465 { 466 return hclass->IsString() && !Region::ObjectAddressToRange(object)->InHugeObjectSpace(); 467 } 468 IsFullMarkRequested()469 bool IsFullMarkRequested() const 470 { 471 return fullMarkRequested_; 472 } 473 SetFullMarkRequestedState(bool fullMarkRequested)474 void SetFullMarkRequestedState(bool fullMarkRequested) 475 { 476 fullMarkRequested_ = fullMarkRequested; 477 } 478 ShouldThrowOOMError(bool shouldThrow)479 void ShouldThrowOOMError(bool shouldThrow) 480 { 481 shouldThrowOOMError_ = shouldThrow; 482 } 483 SetHeapMode(HeapMode mode)484 void SetHeapMode(HeapMode mode) 485 { 486 mode_ = mode; 487 } 488 489 void ThrowOutOfMemoryError(size_t size, std::string functionName); 490 491 void IncreaseNativeBindingSize(bool nonMovable, size_t size); 492 void IncreaseNativeBindingSize(JSNativePointer *object); ResetNativeBindingSize()493 void ResetNativeBindingSize() 494 { 495 activeSemiSpace_->ResetNativeBindingSize(); 496 nonNewSpaceNativeBindingSize_ = 0; 497 } 498 GetNativeBindingSize()499 size_t GetNativeBindingSize() const 500 { 501 return activeSemiSpace_->GetNativeBindingSize() + nonNewSpaceNativeBindingSize_; 502 } 503 GetGlobalNativeSize()504 size_t GetGlobalNativeSize() const 505 { 506 return GetNativeBindingSize() + nativeAreaAllocator_->GetNativeMemoryUsage(); 507 } 508 GlobalNativeSizeLargerThanLimit()509 bool GlobalNativeSizeLargerThanLimit() const 510 { 511 return GetGlobalNativeSize() >= globalSpaceNativeLimit_; 512 } 513 GetNonNewSpaceNativeBindingSize()514 size_t GetNonNewSpaceNativeBindingSize() const 515 { 516 return nonNewSpaceNativeBindingSize_; 517 } 518 NotifyHeapAliveSizeAfterGC(size_t size)519 void NotifyHeapAliveSizeAfterGC(size_t size) 520 { 521 heapAliveSizeAfterGC_ = size; 522 } 523 GetHeapAliveSizeAfterGC()524 size_t GetHeapAliveSizeAfterGC() const 525 { 526 return heapAliveSizeAfterGC_; 527 } 528 IsInBackground()529 bool IsInBackground() const 530 { 531 return inBackground_; 532 } 533 private: 534 static constexpr int IDLE_TIME_LIMIT = 10; // if idle time over 10ms we can do something 535 static constexpr int ALLOCATE_SIZE_LIMIT = 100_KB; 536 static constexpr int IDLE_MAINTAIN_TIME = 500; 537 static constexpr int BACKGROUND_GROW_LIMIT = 2_MB; 538 void FatalOutOfMemoryError(size_t size, std::string functionName); 539 void RecomputeLimits(); 540 void AdjustOldSpaceLimit(); 541 // record lastRegion for each space, which will be used in ReclaimRegions() 542 void PrepareRecordRegionsForReclaim(); 543 void IncreaseTaskCount(); 544 void ReduceTaskCount(); 545 void WaitClearTaskFinished(); 546 void InvokeWeakNodeNativeFinalizeCallback(); 547 inline void ReclaimRegions(TriggerGCType gcType); 548 549 class ParallelGCTask : public Task { 550 public: ParallelGCTask(int32_t id,Heap * heap,ParallelGCTaskPhase taskPhase)551 ParallelGCTask(int32_t id, Heap *heap, ParallelGCTaskPhase taskPhase) 552 : Task(id), heap_(heap), taskPhase_(taskPhase) {}; 553 ~ParallelGCTask() override = default; 554 bool Run(uint32_t threadIndex) override; 555 556 NO_COPY_SEMANTIC(ParallelGCTask); 557 NO_MOVE_SEMANTIC(ParallelGCTask); 558 559 private: 560 Heap *heap_ {nullptr}; 561 ParallelGCTaskPhase taskPhase_; 562 }; 563 564 class AsyncClearTask : public Task { 565 public: AsyncClearTask(int32_t id,Heap * heap,TriggerGCType type)566 AsyncClearTask(int32_t id, Heap *heap, TriggerGCType type) 567 : Task(id), heap_(heap), gcType_(type) {} 568 ~AsyncClearTask() override = default; 569 bool Run(uint32_t threadIndex) override; 570 571 NO_COPY_SEMANTIC(AsyncClearTask); 572 NO_MOVE_SEMANTIC(AsyncClearTask); 573 private: 574 Heap *heap_; 575 TriggerGCType gcType_; 576 }; 577 578 EcmaVM *ecmaVm_ {nullptr}; 579 JSThread *thread_ {nullptr}; 580 581 /* 582 * Heap spaces. 583 */ 584 585 /* 586 * Young generation spaces where most new objects are allocated. 587 * (only one of the spaces is active at a time in semi space GC). 588 */ 589 SemiSpace *activeSemiSpace_ {nullptr}; 590 SemiSpace *inactiveSemiSpace_ {nullptr}; 591 592 // Old generation spaces where some long living objects are allocated or promoted. 593 OldSpace *oldSpace_ {nullptr}; 594 OldSpace *compressSpace_ {nullptr}; 595 ReadOnlySpace *readOnlySpace_ {nullptr}; 596 AppSpawnSpace *appSpawnSpace_ {nullptr}; 597 // Spaces used for special kinds of objects. 598 NonMovableSpace *nonMovableSpace_ {nullptr}; 599 MachineCodeSpace *machineCodeSpace_ {nullptr}; 600 HugeObjectSpace *hugeObjectSpace_ {nullptr}; 601 SnapshotSpace *snapshotSpace_ {nullptr}; 602 603 /* 604 * Garbage collectors collecting garbage in different scopes. 605 */ 606 607 /* 608 * Semi sapce GC which collects garbage only in young spaces. 609 * This is however optional for now because the partial GC also covers its functionality. 610 */ 611 STWYoungGC *stwYoungGC_ {nullptr}; 612 613 /* 614 * The mostly used partial GC which collects garbage in young spaces, 615 * and part of old spaces if needed determined by GC heuristics. 616 */ 617 PartialGC *partialGC_ {nullptr}; 618 619 // Full collector which collects garbage in all valid heap spaces. 620 FullGC *fullGC_ {nullptr}; 621 622 // Concurrent marker which coordinates actions of GC markers and mutators. 623 ConcurrentMarker *concurrentMarker_ {nullptr}; 624 625 // Concurrent sweeper which coordinates actions of sweepers (in spaces excluding young semi spaces) and mutators. 626 ConcurrentSweeper *sweeper_ {nullptr}; 627 628 // Parallel evacuator which evacuates objects from one space to another one. 629 ParallelEvacuator *evacuator_ {nullptr}; 630 631 // Incremental marker which coordinates actions of GC markers in idle time. 632 IncrementalMarker *incrementalMarker_ {nullptr}; 633 634 /* 635 * Different kinds of markers used by different collectors. 636 * Depending on the collector algorithm, some markers can do simple marking 637 * while some others need to handle object movement. 638 */ 639 Marker *nonMovableMarker_ {nullptr}; 640 Marker *semiGCMarker_ {nullptr}; 641 Marker *compressGCMarker_ {nullptr}; 642 643 // Work manager managing the tasks mostly generated in the GC mark phase. 644 WorkManager *workManager_ {nullptr}; 645 646 MarkType markType_ {MarkType::MARK_YOUNG}; 647 648 bool parallelGC_ {true}; 649 bool fullGCRequested_ {false}; 650 bool fullMarkRequested_ {false}; 651 bool oldSpaceLimitAdjusted_ {false}; 652 bool shouldThrowOOMError_ {false}; 653 bool runningNativeFinalizeCallbacks_ {false}; 654 bool enableIdleGC_ {false}; 655 HeapMode mode_ { HeapMode::NORMAL }; 656 657 size_t globalSpaceAllocLimit_ {0}; 658 size_t promotedSize_ {0}; 659 size_t semiSpaceCopiedSize_ {0}; 660 size_t nonNewSpaceNativeBindingSize_{0}; 661 size_t globalSpaceNativeLimit_ {0}; 662 MemGrowingType memGrowingtype_ {MemGrowingType::HIGH_THROUGHPUT}; 663 664 bool clearTaskFinished_ {true}; 665 os::memory::Mutex waitClearTaskFinishedMutex_; 666 os::memory::ConditionVariable waitClearTaskFinishedCV_; 667 uint32_t runningTaskCount_ {0}; 668 // parallel marker task number. 669 uint32_t maxMarkTaskCount_ {0}; 670 // parallel evacuator task number. 671 uint32_t maxEvacuateTaskCount_ {0}; 672 os::memory::Mutex waitTaskFinishedMutex_; 673 os::memory::ConditionVariable waitTaskFinishedCV_; 674 675 /* 676 * The memory controller providing memory statistics (by allocations and coleections), 677 * which is used for GC heuristics. 678 */ 679 MemController *memController_ {nullptr}; 680 681 // Region allocators. 682 NativeAreaAllocator *nativeAreaAllocator_ {nullptr}; 683 HeapRegionAllocator *heapRegionAllocator_ {nullptr}; 684 685 // Application status 686 bool inBackground_ {false}; 687 688 IdleNotifyStatusCallback notifyIdleStatusCallback {nullptr}; 689 690 IdleTaskType idleTask_ {IdleTaskType::NO_TASK}; 691 float idlePredictDuration_ {0.0f}; 692 size_t heapAliveSizeAfterGC_ {0}; 693 double idleTaskFinishTime_ {0.0}; 694 #if ECMASCRIPT_ENABLE_HEAP_VERIFY 695 bool isVerifying_ {false}; 696 #endif 697 }; 698 } // namespace panda::ecmascript 699 700 #endif // ECMASCRIPT_MEM_HEAP_H 701