1 /*
2 * Copyright (c) 2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "common_components/heap/collector/heuristic_gc_policy.h"
17
18 #include "common_components/heap/allocator/allocator.h"
19 #include "common_components/heap/heap.h"
20 #include "common_interfaces/base_runtime.h"
21
22 namespace common {
23 std::atomic<StartupStatus> StartupStatusManager::startupStatus_ = StartupStatus::BEFORE_STARTUP;
24
OnAppStartup()25 void StartupStatusManager::OnAppStartup()
26 {
27 startupStatus_ = StartupStatus::COLD_STARTUP;
28 Taskpool *threadPool = common::Taskpool::GetCurrentTaskpool();
29 threadPool->PostDelayedTask(
30 std::make_unique<StartupTask>(0, threadPool, STARTUP_DURATION_MS), STARTUP_DURATION_MS);
31 }
32
Init()33 void HeuristicGCPolicy::Init()
34 {
35 HeapParam &heapParam = BaseRuntime::GetInstance()->GetHeapParam();
36 heapSize_ = heapParam.heapSize * KB;
37 #ifndef PANDA_TARGET_32
38 // 2: only half heapSize used allocate
39 heapSize_ = heapSize_ / 2;
40 #endif
41 }
42
ShouldRestrainGCOnStartupOrSensitive()43 bool HeuristicGCPolicy::ShouldRestrainGCOnStartupOrSensitive()
44 {
45 // Startup
46 size_t allocated = Heap::GetHeap().GetAllocator().GetAllocatedBytes();
47 StartupStatus currentStatus = StartupStatusManager::GetStartupStatus();
48 if (UNLIKELY_CC(currentStatus == StartupStatus::COLD_STARTUP &&
49 allocated < heapSize_ * COLD_STARTUP_PHASE1_GC_THRESHOLD_RATIO)) {
50 return true;
51 }
52 if (currentStatus == StartupStatus::COLD_STARTUP_PARTIALLY_FINISH &&
53 allocated < heapSize_ * COLD_STARTUP_PHASE2_GC_THRESHOLD_RATIO) {
54 return true;
55 }
56 // Sensitive
57 return ShouldRestrainGCInSensitive(allocated);
58 }
59
GetStartupStatus() const60 StartupStatus HeuristicGCPolicy::GetStartupStatus() const
61 {
62 return StartupStatusManager::GetStartupStatus();
63 }
64
TryHeuristicGC()65 void HeuristicGCPolicy::TryHeuristicGC()
66 {
67 if (UNLIKELY_CC(ShouldRestrainGCOnStartupOrSensitive())) {
68 return;
69 }
70
71 Collector& collector = Heap::GetHeap().GetCollector();
72 size_t threshold = collector.GetGCStats().GetThreshold();
73 size_t allocated = Heap::GetHeap().GetAllocator().GetAllocatedBytes();
74 if (allocated >= threshold) {
75 if (collector.GetGCStats().shouldRequestYoung) {
76 DLOG(ALLOC, "request heu gc: young %zu, threshold %zu", allocated, threshold);
77 collector.RequestGC(GC_REASON_YOUNG, true, GC_TYPE_YOUNG);
78 } else {
79 DLOG(ALLOC, "request heu gc: allocated %zu, threshold %zu", allocated, threshold);
80 collector.RequestGC(GC_REASON_HEU, true, GC_TYPE_FULL);
81 }
82 }
83 }
84
TryIdleGC()85 void HeuristicGCPolicy::TryIdleGC()
86 {
87 if (UNLIKELY_CC(ShouldRestrainGCOnStartupOrSensitive())) {
88 return;
89 }
90
91 if (aliveSizeAfterGC_ == 0) {
92 return;
93 }
94 size_t allocated = Heap::GetHeap().GetAllocator().GetAllocatedBytes();
95 size_t expectHeapSize = std::max(static_cast<size_t>(aliveSizeAfterGC_ * IDLE_SPACE_SIZE_MIN_INC_RATIO),
96 aliveSizeAfterGC_ + IDLE_SPACE_SIZE_MIN_INC_STEP_FULL);
97 if (allocated >= expectHeapSize) {
98 DLOG(ALLOC, "request idle gc: allocated %zu, expectHeapSize %zu, aliveSizeAfterGC %zu", allocated,
99 expectHeapSize, aliveSizeAfterGC_);
100 Heap::GetHeap().GetCollector().RequestGC(GC_REASON_IDLE, true, GC_TYPE_FULL);
101 }
102 }
103
ShouldRestrainGCInSensitive(size_t currentSize)104 bool HeuristicGCPolicy::ShouldRestrainGCInSensitive(size_t currentSize)
105 {
106 AppSensitiveStatus current = GetSensitiveStatus();
107 switch (current) {
108 case AppSensitiveStatus::NORMAL_SCENE:
109 return false;
110 case AppSensitiveStatus::ENTER_HIGH_SENSITIVE: {
111 if (GetRecordHeapObjectSizeBeforeSensitive() == 0) {
112 SetRecordHeapObjectSizeBeforeSensitive(currentSize);
113 }
114 if (Heap::GetHeap().GetCollector().GetGCStats().shouldRequestYoung) {
115 return false;
116 }
117 if (currentSize < (GetRecordHeapObjectSizeBeforeSensitive() + INC_OBJ_SIZE_IN_SENSITIVE)) {
118 return true;
119 }
120 return false;
121 }
122 case AppSensitiveStatus::EXIT_HIGH_SENSITIVE: {
123 if (CASSensitiveStatus(current, AppSensitiveStatus::NORMAL_SCENE)) {
124 // Set record heap obj size 0 after exit high senstive
125 SetRecordHeapObjectSizeBeforeSensitive(0);
126 }
127 return false;
128 }
129 default:
130 return false;
131 }
132 }
133
NotifyNativeAllocation(size_t bytes)134 void HeuristicGCPolicy::NotifyNativeAllocation(size_t bytes)
135 {
136 notifiedNativeSize_.fetch_add(bytes, std::memory_order_relaxed);
137 size_t currentObjects = nativeHeapObjects_.fetch_add(1, std::memory_order_relaxed);
138 if (currentObjects % NOTIFY_NATIVE_INTERVAL == NOTIFY_NATIVE_INTERVAL - 1
139 || bytes > NATIVE_IMMEDIATE_THRESHOLD) {
140 CheckGCForNative();
141 }
142 }
CheckGCForNative()143 void HeuristicGCPolicy::CheckGCForNative()
144 {
145 size_t currentNativeSize = notifiedNativeSize_.load(std::memory_order_relaxed);
146 size_t currentThreshold = nativeHeapThreshold_.load(std::memory_order_relaxed);
147 if (currentNativeSize > currentThreshold) {
148 if (currentNativeSize > URGENCY_NATIVE_LIMIT) {
149 // Native binding size is too large, should wait a sync finished.
150 Heap::GetHeap().GetCollector().RequestGC(GC_REASON_NATIVE_SYNC, false, GC_TYPE_FULL);
151 return;
152 }
153 Heap::GetHeap().GetCollector().RequestGC(GC_REASON_NATIVE, true, GC_TYPE_FULL);
154 }
155 }
NotifyNativeFree(size_t bytes)156 void HeuristicGCPolicy::NotifyNativeFree(size_t bytes)
157 {
158 size_t allocated;
159 size_t newFreedBytes;
160 do {
161 allocated = notifiedNativeSize_.load(std::memory_order_relaxed);
162 newFreedBytes = std::min(allocated, bytes);
163 // We should not be registering more free than allocated bytes.
164 // But correctly keep going in non-debug builds.
165 ASSERT(newFreedBytes == bytes);
166 } while (!notifiedNativeSize_.compare_exchange_weak(allocated, allocated - newFreedBytes,
167 std::memory_order_relaxed));
168 }
169
NotifyNativeReset(size_t oldBytes,size_t newBytes)170 void HeuristicGCPolicy::NotifyNativeReset(size_t oldBytes, size_t newBytes)
171 {
172 NotifyNativeFree(oldBytes);
173 NotifyNativeAllocation(newBytes);
174 }
175
GetNotifiedNativeSize() const176 size_t HeuristicGCPolicy::GetNotifiedNativeSize() const
177 {
178 return notifiedNativeSize_.load(std::memory_order_relaxed);
179 }
180
SetNativeHeapThreshold(size_t newThreshold)181 void HeuristicGCPolicy::SetNativeHeapThreshold(size_t newThreshold)
182 {
183 nativeHeapThreshold_.store(newThreshold, std::memory_order_relaxed);
184 }
185
GetNativeHeapThreshold() const186 size_t HeuristicGCPolicy::GetNativeHeapThreshold() const
187 {
188 return nativeHeapThreshold_.load(std::memory_order_relaxed);
189 }
190
RecordAliveSizeAfterLastGC(size_t aliveBytes)191 void HeuristicGCPolicy::RecordAliveSizeAfterLastGC(size_t aliveBytes)
192 {
193 aliveSizeAfterGC_ = aliveBytes;
194 }
195
ChangeGCParams(bool isBackground)196 void HeuristicGCPolicy::ChangeGCParams(bool isBackground)
197 {
198 if (isBackground) {
199 size_t allocated = Heap::GetHeap().GetAllocator().GetAllocatedBytes();
200 if (allocated > aliveSizeAfterGC_ && (allocated - aliveSizeAfterGC_) > BACKGROUND_LIMIT &&
201 allocated > MIN_BACKGROUND_GC_SIZE) {
202 Heap::GetHeap().GetCollector().RequestGC(GC_REASON_BACKGROUND, true, GC_TYPE_FULL);
203 }
204 common::Taskpool::GetCurrentTaskpool()->SetThreadPriority(common::PriorityMode::BACKGROUND);
205 BaseRuntime::GetInstance()->GetGCParam().multiplier = 1;
206 } else {
207 common::Taskpool::GetCurrentTaskpool()->SetThreadPriority(common::PriorityMode::FOREGROUND);
208 // 3: The front-end application waterline is 3 times
209 BaseRuntime::GetInstance()->GetGCParam().multiplier = 3;
210 }
211 }
212
CheckAndTriggerHintGC(MemoryReduceDegree degree)213 bool HeuristicGCPolicy::CheckAndTriggerHintGC(MemoryReduceDegree degree)
214 {
215 if (UNLIKELY_CC(ShouldRestrainGCOnStartupOrSensitive())) {
216 return false;
217 }
218 size_t allocated = Heap::GetHeap().GetAllocator().GetAllocatedBytes();
219
220 size_t stepAfterLastGC = 0;
221 if (degree == MemoryReduceDegree::LOW) {
222 stepAfterLastGC = LOW_DEGREE_STEP_IN_IDLE;
223 } else {
224 stepAfterLastGC = HIGH_DEGREE_STEP_IN_IDLE;
225 }
226 if (aliveSizeAfterGC_ == 0) {
227 return false;
228 }
229 size_t expectHeapSize = std::max(static_cast<size_t>(aliveSizeAfterGC_ * IDLE_MIN_INC_RATIO),
230 aliveSizeAfterGC_ + stepAfterLastGC);
231 if (expectHeapSize < allocated) {
232 DLOG(ALLOC, "request heu gc by hint: allocated %zu, expectHeapSize %zu, aliveSizeAfterGC %zu",
233 allocated, expectHeapSize, aliveSizeAfterGC_);
234 Heap::GetHeap().GetCollector().RequestGC(GC_REASON_HINT, true, GC_TYPE_FULL);
235 return true;
236 }
237 return false;
238 }
239 } // namespace common
240