1 /*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "ecmascript/mem/idle_gc_trigger.h"
17
18
19 #include "ecmascript/mem/concurrent_marker.h"
20 #include "ecmascript/mem/heap-inl.h"
21
22 namespace panda::ecmascript {
NotifyVsyncIdleStart()23 void IdleGCTrigger::NotifyVsyncIdleStart()
24 {
25 TryTriggerIdleLocalOldGC();
26 }
27
NotifyLooperIdleStart(int64_t timestamp,int idleTime)28 bool IdleGCTrigger::NotifyLooperIdleStart([[maybe_unused]] int64_t timestamp, [[maybe_unused]] int idleTime)
29 {
30 LOG_ECMA_IF(optionalLogEnabled_, DEBUG) << "IdleGCTrigger: recv once looper idle time";
31 idleState_.store(true);
32 if (heap_->GetJSThread()->IsMarkFinished() &&
33 heap_->GetConcurrentMarker()->IsTriggeredConcurrentMark() &&
34 thread_->IsReadyToSharedConcurrentMark()) {
35 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK);
36 }
37 if (!IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK) ||
38 !IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK) ||
39 !IsPossiblePostGCTask(TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK)) {
40 return true;
41 }
42 if (!heap_->CheckCanTriggerConcurrentMarking()) {
43 return false;
44 }
45 return TryTriggerIdleLocalOldGC() || TryTriggerIdleYoungGC() || TryTriggerIdleSharedOldGC();
46 }
47
NotifyLooperIdleEnd(int64_t timestamp)48 void IdleGCTrigger::NotifyLooperIdleEnd([[maybe_unused]] int64_t timestamp)
49 {
50 idleState_.store(false);
51 }
52
TryTriggerHandleMarkFinished()53 void IdleGCTrigger::TryTriggerHandleMarkFinished()
54 {
55 // wait sharedGC finish
56 if (!thread_->IsReadyToSharedConcurrentMark()) {
57 return ;
58 }
59 if (heap_->GetJSThread()->IsMarkFinished() && heap_->GetConcurrentMarker()->IsTriggeredConcurrentMark()
60 && !heap_->GetOnSerializeEvent() && !heap_->InSensitiveStatus()) {
61 heap_->SetCanThrowOOMError(false);
62 heap_->GetConcurrentMarker()->HandleMarkingFinished();
63 heap_->SetCanThrowOOMError(true);
64 }
65 }
66
TryTriggerLocalConcurrentMark(MarkType type)67 void IdleGCTrigger::TryTriggerLocalConcurrentMark(MarkType type)
68 {
69 if (heap_->GetConcurrentMarker()->IsEnabled() && heap_->CheckCanTriggerConcurrentMarking()) {
70 heap_->SetMarkType(type);
71 heap_->TriggerConcurrentMarking();
72 }
73 }
74
TryTriggerIdleYoungGC()75 bool IdleGCTrigger::TryTriggerIdleYoungGC()
76 {
77 if (CheckIdleYoungGC() && !heap_->NeedStopCollection()) {
78 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK);
79 }
80 return false;
81 }
82
TryTriggerIdleLocalOldGC()83 bool IdleGCTrigger::TryTriggerIdleLocalOldGC()
84 {
85 if (heap_->GetJSThread()->FullMarkRequest() && !heap_->NeedStopCollection()) {
86 heap_->GetJSThread()->ResetFullMarkRequest();
87 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK);
88 }
89 if (CheckIdleLocalOldGC(heap_) && ReachIdleLocalOldGCThresholds() && !heap_->NeedStopCollection()) {
90 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK);
91 }
92 return false;
93 }
94
TryTriggerIdleSharedOldGC()95 bool IdleGCTrigger::TryTriggerIdleSharedOldGC()
96 {
97 if (!CheckIdleOrHintOldGC<SharedHeap>(sHeap_) || sHeap_->NeedStopCollection() ||
98 !sHeap_->CheckCanTriggerConcurrentMarking(thread_)) {
99 return false;
100 }
101 if (ReachIdleSharedPartialGCThresholds()) {
102 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_PARTIAL_MARK);
103 } else if (ReachIdleSharedGCThresholds()) {
104 return PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK);
105 }
106 return false;
107 }
108
ReachIdleLocalOldGCThresholds()109 bool IdleGCTrigger::ReachIdleLocalOldGCThresholds()
110 {
111 bool isFullMarking = heap_->IsConcurrentFullMark() && heap_->GetJSThread()->IsMarking();
112 bool isNativeSizeLargeTrigger = isFullMarking ? false : heap_->GlobalNativeSizeLargerThanLimitForIdle();
113 if (isNativeSizeLargeTrigger) {
114 return true;
115 }
116
117 OldSpace *oldSpace = heap_->GetOldSpace();
118 HugeObjectSpace *hugeObjectSpace = heap_->GetHugeObjectSpace();
119 size_t idleSizeLimit = static_cast<size_t>(oldSpace->GetInitialCapacity() *
120 IDLE_SPACE_SIZE_LIMIT_RATE);
121 size_t currentSize = oldSpace->GetHeapObjectSize() + hugeObjectSpace->GetHeapObjectSize();
122 if (currentSize >= idleSizeLimit) {
123 return true;
124 }
125
126 size_t maxCapacity = oldSpace->GetMaximumCapacity() + oldSpace->GetOvershootSize() +
127 oldSpace->GetOutOfMemoryOvershootSize();
128 size_t currentCapacity = oldSpace->GetCommittedSize() + hugeObjectSpace->GetCommittedSize();
129 size_t idleCapacityLimit = static_cast<size_t>(maxCapacity * IDLE_SPACE_SIZE_LIMIT_RATE);
130 if (currentCapacity >= idleCapacityLimit) {
131 return true;
132 }
133
134 size_t oldSpaceAllocLimit = heap_->GetGlobalSpaceAllocLimit() + oldSpace->GetOvershootSize();
135 size_t idleOldSpaceAllocLimit = static_cast<size_t>(oldSpaceAllocLimit * IDLE_SPACE_SIZE_LIMIT_RATE);
136 if (heap_->GetHeapObjectSize() > idleOldSpaceAllocLimit) {
137 return true;
138 }
139 return false;
140 }
141
ReachIdleSharedPartialGCThresholds()142 bool IdleGCTrigger::ReachIdleSharedPartialGCThresholds()
143 {
144 size_t expectGlobalSizeLimit = sHeap_->GetGlobalSpaceAllocLimit() * IDLE_PATIAL_GC_SPACE_SIZE_LIMIT_RATE;
145 return sHeap_->GetHeapObjectSize() > expectGlobalSizeLimit;
146 }
147
ReachIdleSharedGCThresholds()148 bool IdleGCTrigger::ReachIdleSharedGCThresholds()
149 {
150 size_t expectSizeLimit = sHeap_->GetOldSpace()->GetInitialCapacity() * IDLE_SPACE_SIZE_LIMIT_RATE;
151 size_t currentOldSize = sHeap_->GetOldSpace()->GetHeapObjectSize();
152 size_t expectGlobalSizeLimit = sHeap_->GetGlobalSpaceAllocLimit() * IDLE_SPACE_SIZE_LIMIT_RATE;
153 return currentOldSize > expectSizeLimit || sHeap_->GetHeapObjectSize() > expectGlobalSizeLimit;
154 }
155
TryPostHandleMarkFinished()156 void IdleGCTrigger::TryPostHandleMarkFinished()
157 {
158 if (IsIdleState()) {
159 PostIdleGCTask(TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK);
160 }
161 }
162
PostIdleGCTask(TRIGGER_IDLE_GC_TYPE gcType)163 bool IdleGCTrigger::PostIdleGCTask(TRIGGER_IDLE_GC_TYPE gcType)
164 {
165 if (triggerGCTaskCallback_ != nullptr && IsPossiblePostGCTask(gcType) && !heap_->NeedStopCollection()) {
166 std::pair<void*, uint8_t> data(heap_->GetEcmaVM(), static_cast<uint8_t>(gcType));
167 triggerGCTaskCallback_(data);
168 SetPostGCTask(gcType);
169 LOG_GC(INFO) << "IdleGCTrigger: post once " << GetGCTypeName(gcType) << " on idleTime";
170 return true;
171 }
172 LOG_GC(DEBUG) << "IdleGCTrigger: failed to post once " << GetGCTypeName(gcType);
173 return false;
174 }
175
CheckIdleYoungGC(bool isLongIdle) const176 bool IdleGCTrigger::CheckIdleYoungGC(bool isLongIdle) const
177 {
178 auto newSpace = heap_->GetNewSpace();
179 size_t allocatedSizeSinceGC = newSpace->GetAllocatedSizeSinceGC(newSpace->GetTop());
180 LOG_GC(DEBUG) << "IdleGCTrigger: check young GC semi Space size since gc:" << allocatedSizeSinceGC;
181 if (isLongIdle && allocatedSizeSinceGC > IDLE_MIN_EXPECT_RECLAIM_SIZE) {
182 return true;
183 }
184 size_t expectIdleLimitSize = (newSpace->GetInitialCapacity() + newSpace->GetOvershootSize()) *
185 IDLE_SPACE_SIZE_LIMIT_RATE;
186 return newSpace->GetObjectSize() >= expectIdleLimitSize;
187 }
188
CheckIdleLocalOldGC(const Heap * heap) const189 bool IdleGCTrigger::CheckIdleLocalOldGC(const Heap *heap) const
190 {
191 size_t afterGCSize = heap->GetHeapAliveSizeExcludesYoungAfterGC();
192 size_t currentSize = heap->GetHeapObjectSize() - heap->GetNewSpace()->GetHeapObjectSize();
193 size_t expectSize = std::max(static_cast<size_t>(afterGCSize * IDLE_SPACE_SIZE_MIN_INC_RATIO),
194 afterGCSize + IDLE_SPACE_SIZE_MIN_INC_STEP);
195 return currentSize >= expectSize;
196 }
197
CheckLocalBindingNativeTriggerOldGC() const198 bool IdleGCTrigger::CheckLocalBindingNativeTriggerOldGC() const
199 {
200 size_t nativeBindingSize = heap_->GetNativeBindingSize();
201 size_t nativeBindingSizeLastGC = heap_->GetNativeBindingSizeAfterLastGC();
202 if (nativeBindingSize <= nativeBindingSizeLastGC) {
203 return false;
204 }
205 size_t expectIncrementalNative = std::max(IDLE_BINDING_NATIVE_MIN_INC_SIZE,
206 static_cast<size_t>(nativeBindingSizeLastGC * IDLE_BINDING_NATIVE_MIN_INC_RATIO));
207 size_t incrementalNative = nativeBindingSize - nativeBindingSizeLastGC;
208 LOG_GC(DEBUG) << "IdleGCTrigger: check old GC expectIncrementalNative:" << expectIncrementalNative
209 << ";incrementalNative:" << incrementalNative;
210 return incrementalNative > expectIncrementalNative;
211 }
212
TryTriggerIdleGC(TRIGGER_IDLE_GC_TYPE gcType)213 void IdleGCTrigger::TryTriggerIdleGC(TRIGGER_IDLE_GC_TYPE gcType)
214 {
215 LOG_GC(DEBUG) << "IdleGCTrigger: recv once notify of " << GetGCTypeName(gcType);
216 switch (gcType) {
217 case TRIGGER_IDLE_GC_TYPE::FULL_GC:
218 if (CheckIdleOrHintFullGC<Heap>(heap_) && !heap_->NeedStopCollection()) {
219 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
220 heap_->CollectGarbage(TriggerGCType::FULL_GC, GCReason::IDLE);
221 } else if (CheckLocalBindingNativeTriggerOldGC() && !heap_->NeedStopCollection()) {
222 LOG_GC(INFO) << "IdleGCTrigger: trigger local old GC by native binding size.";
223 heap_->CollectGarbage(TriggerGCType::FULL_GC, GCReason::IDLE_NATIVE);
224 } else if (CheckIdleYoungGC(true) && !heap_->NeedStopCollection()) {
225 LOG_GC(INFO) << "IdleGCTrigger: trigger young gc";
226 heap_->CollectGarbage(TriggerGCType::YOUNG_GC, GCReason::IDLE);
227 }
228 break;
229 case TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_PARTIAL_MARK:
230 if (CheckIdleOrHintOldGC<SharedHeap>(sHeap_) && sHeap_->CheckCanTriggerConcurrentMarking(thread_)
231 && !sHeap_->NeedStopCollection()) {
232 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
233 sHeap_->TriggerConcurrentMarking<TriggerGCType::SHARED_PARTIAL_GC, GCReason::IDLE>(thread_);
234 }
235 break;
236 case TRIGGER_IDLE_GC_TYPE::SHARED_CONCURRENT_MARK:
237 if (CheckIdleOrHintOldGC<SharedHeap>(sHeap_) && sHeap_->CheckCanTriggerConcurrentMarking(thread_)
238 && !sHeap_->NeedStopCollection()) {
239 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
240 sHeap_->TriggerConcurrentMarking<TriggerGCType::SHARED_GC, GCReason::IDLE>(thread_);
241 }
242 break;
243 case TRIGGER_IDLE_GC_TYPE::SHARED_FULL_GC:
244 if (CheckIdleOrHintFullGC<SharedHeap>(sHeap_) && !sHeap_->NeedStopCollection()) {
245 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
246 sHeap_->CompressCollectGarbageNotWaiting<GCReason::IDLE>(thread_);
247 }
248 break;
249 case TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_YOUNG_MARK:
250 if (CheckIdleYoungGC() && !heap_->NeedStopCollection()) {
251 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
252 TryTriggerLocalConcurrentMark(MarkType::MARK_YOUNG);
253 }
254 break;
255 case TRIGGER_IDLE_GC_TYPE::LOCAL_CONCURRENT_FULL_MARK:
256 if (CheckIdleLocalOldGC(heap_) && !heap_->NeedStopCollection()) {
257 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
258 TryTriggerLocalConcurrentMark(MarkType::MARK_FULL);
259 }
260 break;
261 case TRIGGER_IDLE_GC_TYPE::LOCAL_REMARK:
262 if (!heap_->NeedStopCollection()) {
263 LOG_GC(INFO) << "IdleGCTrigger: trigger " << GetGCTypeName(gcType);
264 TryTriggerHandleMarkFinished();
265 }
266 break;
267 default: // LCOV_EXCL_BR_LINE
268 LOG_GC(ERROR) << "IdleGCTrigger: this branch is unreachable";
269 return;
270 }
271 ClearPostGCTask(gcType);
272 }
273 }