1 /*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/mem/idle_gc_trigger.h"
17
18
19 #include "ecmascript/mem/concurrent_marker.h"
20 #include "ecmascript/mem/heap-inl.h"
21
22 namespace panda::ecmascript {
NotifyVsyncIdleStart()23 void IdleGCTrigger::NotifyVsyncIdleStart()
24 {
25 TryTriggerIdleLocalOldGC();
26 }
27
NotifyLooperIdleStart(int64_t timestamp,int idleTime)28 bool IdleGCTrigger::NotifyLooperIdleStart([[maybe_unused]] int64_t timestamp, [[maybe_unused]] int idleTime)
29 {
30 LOG_ECMA_IF(optionalLogEnabled_, DEBUG) << "IdleGCTrigger: recv once looper idle time";
31 idleState_.store(true);
32 if (heap_->GetJSThread()->IsMarkFinished() &&
33 heap_->GetConcurrentMarker()->IsTriggeredConcurrentMark() &&
34 thread_->IsReadyToSharedConcurrentMark()) {
35 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK);
36 }
37 if (!IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK) ||
38 !IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK) ||
39 !IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK)) {
40 return true;
41 }
42 if (!heap_->CheckCanTriggerConcurrentMarking()) {
43 return false;
44 }
45 return TryTriggerIdleLocalOldGC() || TryTriggerIdleYoungGC() || TryTriggerIdleSharedOldGC();
46 }
47
NotifyLooperIdleEnd(int64_t timestamp)48 void IdleGCTrigger::NotifyLooperIdleEnd([[maybe_unused]] int64_t timestamp)
49 {
50 idleState_.store(false);
51 }
52
TryTriggerHandleMarkFinished()53 void IdleGCTrigger::TryTriggerHandleMarkFinished()
54 {
55 // wait sharedGC finish
56 if (!thread_->IsReadyToSharedConcurrentMark()) {
57 return ;
58 }
59 if (heap_->GetJSThread()->IsMarkFinished() && heap_->GetConcurrentMarker()->IsTriggeredConcurrentMark()
60 && !heap_->GetOnSerializeEvent() && !heap_->InSensitiveStatus()) {
61 heap_->SetCanThrowOOMError(false);
62 heap_->GetConcurrentMarker()->HandleMarkingFinished(GCReason::IDLE);
63 heap_->SetCanThrowOOMError(true);
64 }
65 }
66
TryTriggerLocalConcurrentMark(MarkType type)67 void IdleGCTrigger::TryTriggerLocalConcurrentMark(MarkType type)
68 {
69 if (heap_->GetConcurrentMarker()->IsEnabled() && heap_->CheckCanTriggerConcurrentMarking()) {
70 heap_->SetMarkType(type);
71 heap_->TriggerConcurrentMarking(MarkReason::IDLE);
72 }
73 }
74
TryTriggerIdleYoungGC()75 bool IdleGCTrigger::TryTriggerIdleYoungGC()
76 {
77 if (CheckIdleYoungGC() && !heap_->NeedStopCollection()) {
78 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK);
79 }
80 return false;
81 }
82
TryTriggerIdleLocalOldGC()83 bool IdleGCTrigger::TryTriggerIdleLocalOldGC()
84 {
85 if (heap_->GetJSThread()->FullMarkRequest() && !heap_->NeedStopCollection()) {
86 heap_->GetJSThread()->ResetFullMarkRequest();
87 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK);
88 }
89 if (CheckIdleLocalOldGC(heap_) && ReachIdleLocalOldGCThresholds() && !heap_->NeedStopCollection()) {
90 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK);
91 }
92 return false;
93 }
94
TryTriggerIdleSharedOldGC()95 bool IdleGCTrigger::TryTriggerIdleSharedOldGC()
96 {
97 if (!CheckIdleOrHintOldGC<SharedHeap>(sHeap_) || sHeap_->NeedStopCollection() ||
98 !sHeap_->CheckCanTriggerConcurrentMarking(thread_)) {
99 return false;
100 }
101 if (ReachIdleSharedGCThresholds()) {
102 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK);
103 }
104 return false;
105 }
106
ReachIdleLocalOldGCThresholds()107 bool IdleGCTrigger::ReachIdleLocalOldGCThresholds()
108 {
109 bool isFullMarking = heap_->IsConcurrentFullMark() && heap_->GetJSThread()->IsMarking();
110 bool isNativeSizeLargeTrigger = isFullMarking ? false : heap_->GlobalNativeSizeLargerThanLimitForIdle();
111 if (isNativeSizeLargeTrigger) {
112 return true;
113 }
114
115 OldSpace *oldSpace = heap_->GetOldSpace();
116 HugeObjectSpace *hugeObjectSpace = heap_->GetHugeObjectSpace();
117 size_t idleSizeLimit = static_cast<size_t>(oldSpace->GetInitialCapacity() *
118 IDLE_SPACE_SIZE_LIMIT_RATE);
119 size_t currentSize = oldSpace->GetHeapObjectSize() + hugeObjectSpace->GetHeapObjectSize();
120 if (currentSize >= idleSizeLimit) {
121 return true;
122 }
123
124 size_t maxCapacity = oldSpace->GetMaximumCapacity() + oldSpace->GetOvershootSize() +
125 oldSpace->GetOutOfMemoryOvershootSize();
126 size_t currentCapacity = oldSpace->GetCommittedSize() + hugeObjectSpace->GetCommittedSize();
127 size_t idleCapacityLimit = static_cast<size_t>(maxCapacity * IDLE_SPACE_SIZE_LIMIT_RATE);
128 if (currentCapacity >= idleCapacityLimit) {
129 return true;
130 }
131
132 size_t oldSpaceAllocLimit = heap_->GetGlobalSpaceAllocLimit() + oldSpace->GetOvershootSize();
133 size_t idleOldSpaceAllocLimit = static_cast<size_t>(oldSpaceAllocLimit * IDLE_SPACE_SIZE_LIMIT_RATE);
134 if (heap_->GetHeapObjectSize() > idleOldSpaceAllocLimit) {
135 return true;
136 }
137 return false;
138 }
139
ReachIdleSharedPartialGCThresholds()140 bool IdleGCTrigger::ReachIdleSharedPartialGCThresholds()
141 {
142 size_t expectGlobalSizeLimit = sHeap_->GetGlobalSpaceAllocLimit() * IDLE_PATIAL_GC_SPACE_SIZE_LIMIT_RATE;
143 return sHeap_->GetHeapObjectSize() > expectGlobalSizeLimit;
144 }
145
ReachIdleSharedGCThresholds()146 bool IdleGCTrigger::ReachIdleSharedGCThresholds()
147 {
148 size_t expectSizeLimit = sHeap_->GetOldSpace()->GetInitialCapacity() * IDLE_SPACE_SIZE_LIMIT_RATE;
149 size_t currentOldSize = sHeap_->GetOldSpace()->GetHeapObjectSize();
150 size_t expectGlobalSizeLimit = sHeap_->GetGlobalSpaceAllocLimit() * IDLE_SPACE_SIZE_LIMIT_RATE;
151 return currentOldSize > expectSizeLimit || sHeap_->GetHeapObjectSize() > expectGlobalSizeLimit;
152 }
153
TryPostHandleMarkFinished()154 void IdleGCTrigger::TryPostHandleMarkFinished()
155 {
156 if (IsIdleState()) {
157 PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK);
158 }
159 }
160
PostIdleGCTask(TRIGGER_IDLE_GC_TYPE gcType)161 bool IdleGCTrigger::PostIdleGCTask(TRIGGER_IDLE_GC_TYPE gcType)
162 {
163 if (triggerGCTaskCallback_ != nullptr && IsPossiblePostGCTask(gcType) && !heap_->NeedStopCollection()) {
164 std::pair<void*, uint8_t> data(heap_->GetEcmaVM(), static_cast<uint8_t>(gcType));
165 triggerGCTaskCallback_(data);
166 SetPostGCTask(gcType);
167 LOG_GC(INFO) << "IdleGCTrigger: post once " << GetGCTypeName(gcType) << " on idleTime";
168 return true;
169 }
170 LOG_GC(DEBUG) << "IdleGCTrigger: failed to post once " << GetGCTypeName(gcType);
171 return false;
172 }
173
CheckIdleYoungGC(bool isLongIdle) const174 bool IdleGCTrigger::CheckIdleYoungGC(bool isLongIdle) const
175 {
176 auto newSpace = heap_->GetNewSpace();
177 size_t allocatedSizeSinceGC = newSpace->GetAllocatedSizeSinceGC(newSpace->GetTop());
178 LOG_GC(DEBUG) << "IdleGCTrigger: check young GC semi Space size since gc:" << allocatedSizeSinceGC;
179 if (isLongIdle && allocatedSizeSinceGC > IDLE_MIN_EXPECT_RECLAIM_SIZE) {
180 return true;
181 }
182 size_t expectIdleLimitSize = (newSpace->GetInitialCapacity() + newSpace->GetOvershootSize()) *
183 IDLE_SPACE_SIZE_LIMIT_RATE;
184 return allocatedSizeSinceGC > IDLE_MIN_EXPECT_RECLAIM_SIZE && newSpace->GetObjectSize() >= expectIdleLimitSize;
185 }
186
CheckIdleLocalOldGC(const Heap * heap) const187 bool IdleGCTrigger::CheckIdleLocalOldGC(const Heap *heap) const
188 {
189 size_t afterGCSize = heap->GetHeapAliveSizeExcludesYoungAfterGC();
190 size_t currentSize = heap->GetHeapObjectSize() - heap->GetNewSpace()->GetHeapObjectSize();
191 size_t expectSize = std::max(static_cast<size_t>(afterGCSize * IDLE_SPACE_SIZE_MIN_INC_RATIO),
192 afterGCSize + IDLE_SPACE_SIZE_MIN_INC_STEP);
193 return currentSize >= expectSize;
194 }
195
CheckLocalBindingNativeTriggerOldGC() const196 bool IdleGCTrigger::CheckLocalBindingNativeTriggerOldGC() const
197 {
198 size_t nativeBindingSize = heap_->GetNativeBindingSize();
199 size_t nativeBindingSizeLastGC = heap_->GetNativeBindingSizeAfterLastGC();
200 if (nativeBindingSize <= nativeBindingSizeLastGC) {
201 return false;
202 }
203 size_t expectIncrementalNative = std::max(IDLE_BINDING_NATIVE_MIN_INC_SIZE,
204 static_cast<size_t>(nativeBindingSizeLastGC * IDLE_BINDING_NATIVE_MIN_INC_RATIO));
205 size_t incrementalNative = nativeBindingSize - nativeBindingSizeLastGC;
206 LOG_GC(DEBUG) << "IdleGCTrigger: check old GC expectIncrementalNative:" << expectIncrementalNative
207 << ";incrementalNative:" << incrementalNative;
208 return incrementalNative > expectIncrementalNative;
209 }
210
TryTriggerIdleGC(TRIGGER_IDLE_GC_TYPE gcType)211 void IdleGCTrigger::TryTriggerIdleGC(TRIGGER_IDLE_GC_TYPE gcType)
212 {
213 if (ecmascript::g_isEnableCMCGC) {
214 if (gcType == TRIGGER_IDLE_GC_TYPE::FULL_GC) {
215 common::Heap::GetHeap().TryIdleGC();
216 }
217 return;
218 }
219 LOG_GC(DEBUG) << "IdleGCTrigger: recv once notify of " << GetGCTypeName(gcType);
220 switch (gcType) {
221 case TRIGGER_IDLE_GC_TYPE::FULL_GC:
222 if (CheckIdleOrHintFullGC<Heap>(heap_) && !heap_->NeedStopCollection()) {
223 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
224 heap_->CollectGarbage(TriggerGCType::FULL_GC, GCReason::IDLE);
225 } else if (CheckLocalBindingNativeTriggerOldGC() && !heap_->NeedStopCollection()) {
226 LOG_GC(INFO) << "IdleGCTrigger: trigger local old GC by native binding size.";
227 heap_->CollectGarbage(TriggerGCType::OLD_GC, GCReason::IDLE_NATIVE);
228 } else if (CheckIdleYoungGC(true) && !heap_->NeedStopCollection()) {
229 LOG_GC(INFO) << "IdleGCTrigger: trigger young gc";
230 heap_->CollectGarbage(TriggerGCType::YOUNG_GC, GCReason::IDLE);
231 }
232 break;
233 case TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_PARTIAL_MARK:
234 if (CheckIdleOrHintOldGC<SharedHeap>(sHeap_) && sHeap_->CheckCanTriggerConcurrentMarking(thread_)
235 && !sHeap_->NeedStopCollection()) {
236 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
237 sHeap_->TriggerConcurrentMarking<TriggerGCType::SHARED_PARTIAL_GC, MarkReason::IDLE>(thread_);
238 }
239 break;
240 case TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK:
241 if (CheckIdleOrHintOldGC<SharedHeap>(sHeap_) && sHeap_->CheckCanTriggerConcurrentMarking(thread_)
242 && !sHeap_->NeedStopCollection()) {
243 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
244 sHeap_->TriggerConcurrentMarking<TriggerGCType::SHARED_GC, MarkReason::IDLE>(thread_);
245 }
246 break;
247 case TRIGGER_IDLE_GC_TYPE::SHARED_FULL_GC:
248 if (CheckIdleOrHintFullGC<SharedHeap>(sHeap_) && !sHeap_->NeedStopCollection()) {
249 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
250 sHeap_->CompressCollectGarbageNotWaiting<GCReason::IDLE>(thread_);
251 }
252 break;
253 case TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK:
254 if (CheckIdleYoungGC() && !heap_->NeedStopCollection()) {
255 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
256 TryTriggerLocalConcurrentMark(MarkType::MARK_YOUNG);
257 }
258 break;
259 case TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK:
260 if (CheckIdleLocalOldGC(heap_) && !heap_->NeedStopCollection()) {
261 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
262 TryTriggerLocalConcurrentMark(MarkType::MARK_FULL);
263 }
264 break;
265 case TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK:
266 if (!heap_->NeedStopCollection()) {
267 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
268 TryTriggerHandleMarkFinished();
269 }
270 break;
271 default: // LCOV_EXCL_BR_LINE
272 LOG_GC(ERROR) << "IdleGCTrigger: this branch is unreachable";
273 return;
274 }
275 ClearPostGCTask(gcType);
276 }
277 }
278