1 /**
2 * Copyright (c) 2023-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "g1_analytics.h"
17 #include "libpandabase/utils/time.h"
18 #include "libpandabase/os/time.h"
19 #include "libpandabase/utils/type_converter.h"
20 #include "runtime/mem/gc/card_table.h"
21
22 namespace ark::mem {
G1Analytics(uint64_t now)23 G1Analytics::G1Analytics(uint64_t now) : previousYoungCollectionEnd_(now) {}
24
ReportEvacuatedBytes(size_t bytes)25 void G1Analytics::ReportEvacuatedBytes(size_t bytes)
26 {
27 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
28 // on other reads or writes
29 copiedBytes_.fetch_add(bytes, std::memory_order_relaxed);
30 }
31
ReportRemsetSize(size_t remsetSize,size_t remsetRefsCount)32 void G1Analytics::ReportRemsetSize(size_t remsetSize, size_t remsetRefsCount)
33 {
34 remsetSize_ = remsetSize;
35 remsetRefsCount_ = remsetRefsCount;
36 }
37
ReportMarkingStart(uint64_t time)38 void G1Analytics::ReportMarkingStart(uint64_t time)
39 {
40 markingStart_ = time;
41 }
42
ReportMarkingEnd(uint64_t time,size_t remsetRefsCount)43 void G1Analytics::ReportMarkingEnd(uint64_t time, size_t remsetRefsCount)
44 {
45 markingEnd_ = time;
46 totalRemsetRefsCount_ = remsetRefsCount;
47 }
48
ReportScanDirtyCardsStart(uint64_t time)49 void G1Analytics::ReportScanDirtyCardsStart(uint64_t time)
50 {
51 scanDirtyCardsStart_ = time;
52 }
53
ReportScanDirtyCardsEnd(uint64_t time,size_t dirtyCardsCount)54 void G1Analytics::ReportScanDirtyCardsEnd(uint64_t time, size_t dirtyCardsCount)
55 {
56 scanDirtyCardsEnd_ = time;
57 dirtyCardsCount_ = dirtyCardsCount;
58 }
59
ReportEvacuationStart(uint64_t time)60 void G1Analytics::ReportEvacuationStart(uint64_t time)
61 {
62 evacuationStart_ = time;
63 }
64
ReportEvacuationEnd(uint64_t time)65 void G1Analytics::ReportEvacuationEnd(uint64_t time)
66 {
67 evacuationEnd_ = time;
68 }
69
ReportUpdateRefsStart(uint64_t time)70 void G1Analytics::ReportUpdateRefsStart(uint64_t time)
71 {
72 updateRefsStart_ = time;
73 }
74
ReportUpdateRefsEnd(uint64_t time)75 void G1Analytics::ReportUpdateRefsEnd(uint64_t time)
76 {
77 updateRefsEnd_ = time;
78 }
79
ReportPromotedRegion()80 void G1Analytics::ReportPromotedRegion()
81 {
82 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
83 // on other reads or writes
84 promotedRegions_.fetch_add(1, std::memory_order_relaxed);
85 }
86
ReportLiveObjects(size_t num)87 void G1Analytics::ReportLiveObjects(size_t num)
88 {
89 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
90 // on other reads or writes
91 liveObjects_.fetch_add(num, std::memory_order_relaxed);
92 }
93
ReportSurvivedBytesRatio(const CollectionSet & collectionSet)94 void G1Analytics::ReportSurvivedBytesRatio(const CollectionSet &collectionSet)
95 {
96 if (!collectionSet.Young().empty()) {
97 auto liveBytes = static_cast<double>(GetPromotedRegions() * DEFAULT_REGION_SIZE + GetEvacuatedBytes());
98 auto totalSize = collectionSet.Young().size() * DEFAULT_REGION_SIZE;
99 auto ratio = liveBytes / totalSize;
100 survivedBytesRatioSeq_.Add(ratio);
101 }
102 }
103
GetPromotedRegions() const104 size_t G1Analytics::GetPromotedRegions() const
105 {
106 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
107 // on other reads or writes
108 return promotedRegions_.load(std::memory_order_relaxed);
109 }
110
GetEvacuatedBytes() const111 size_t G1Analytics::GetEvacuatedBytes() const
112 {
113 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
114 // on other reads or writes
115 return copiedBytes_.load(std::memory_order_relaxed);
116 }
117
PredictAllocationRate() const118 double G1Analytics::PredictAllocationRate() const
119 {
120 return predictor_.Predict(allocationRateSeq_);
121 }
122
ReportCollectionStart(uint64_t time)123 void G1Analytics::ReportCollectionStart(uint64_t time)
124 {
125 currentYoungCollectionStart_ = time;
126 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
127 // on other reads or writes
128 copiedBytes_.store(0, std::memory_order_relaxed);
129 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
130 // on other reads or writes
131 promotedRegions_.store(0, std::memory_order_relaxed);
132 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
133 // on other reads or writes
134 liveObjects_.store(0, std::memory_order_relaxed);
135 remsetSize_ = 0;
136 remsetRefsCount_ = 0;
137 totalRemsetRefsCount_ = 0;
138 }
139
140 template <typename T>
DumpMetric(const char * msg,T actual,T prediction)141 static void DumpMetric(const char *msg, T actual, T prediction)
142 {
143 auto error = actual > 0 ? PERCENT_100_D * (prediction - actual) / actual : std::numeric_limits<double>::quiet_NaN();
144 LOG(INFO, GC) << "G1Analytics metric: " << msg << " actual " << actual << " prediction " << prediction << " error "
145 << error << "%";
146 }
147
DumpPauseMetric(const char * msg,uint64_t actual,uint64_t prediction,uint64_t totalPause)148 static void DumpPauseMetric(const char *msg, uint64_t actual, uint64_t prediction, uint64_t totalPause)
149 {
150 auto error =
151 totalPause > 0 ? PERCENT_100_D * (prediction - actual) / totalPause : std::numeric_limits<double>::quiet_NaN();
152 LOG(INFO, GC) << "G1Analytics metric: " << msg << " actual " << actual << " prediction " << prediction << " error "
153 << error << "%";
154 }
155
ReportCollectionEnd(GCTaskCause cause,uint64_t endTime,const CollectionSet & collectionSet,bool dump)156 void G1Analytics::ReportCollectionEnd(GCTaskCause cause, uint64_t endTime, const CollectionSet &collectionSet,
157 bool dump)
158 {
159 auto edenLength = collectionSet.Young().size();
160 auto appTime = (currentYoungCollectionStart_ - previousYoungCollectionEnd_) / ark::os::time::MICRO_TO_NANO;
161 auto allocationRate = static_cast<double>(edenLength) / appTime;
162 auto pauseTime = (endTime - currentYoungCollectionStart_) / ark::os::time::MICRO_TO_NANO;
163
164 if (dump && cause != GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE) {
165 DumpMetrics(collectionSet, pauseTime, allocationRate);
166 }
167
168 allocationRateSeq_.Add(allocationRate);
169
170 if (cause != GCTaskCause::EXPLICIT_CAUSE && edenLength == collectionSet.size() && edenLength > 0) {
171 auto liveObjectsPerRegion = static_cast<double>(liveObjects_) / edenLength;
172 liveObjectsSeq_.Add(liveObjectsPerRegion);
173
174 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
175 auto compactedRegions = edenLength - promotedRegions_;
176 if (compactedRegions > 0) {
177 auto copiedBytesPerRegion = static_cast<double>(copiedBytes_) / compactedRegions;
178 copiedBytesSeq_.Add(copiedBytesPerRegion);
179 auto estimatedPromotionTime = EstimatePromotionTimeInMicros(promotedRegions_);
180 if (evacuationTime > estimatedPromotionTime) {
181 auto copyingBytesRate = static_cast<double>(copiedBytes_) / (evacuationTime - estimatedPromotionTime);
182 copyingBytesRateSeq_.Add(copyingBytesRate);
183 }
184 }
185
186 auto traversedObjects = liveObjects_ + totalRemsetRefsCount_;
187 auto markingTime = (markingEnd_ - markingStart_) / ark::os::time::MICRO_TO_NANO;
188 auto markingRate = static_cast<double>(traversedObjects) / markingTime;
189 markingRateSeq_.Add(markingRate);
190
191 auto updateRefsTime = (updateRefsEnd_ - updateRefsStart_) / ark::os::time::MICRO_TO_NANO;
192 auto updateRefsRate = static_cast<double>(traversedObjects) / updateRefsTime;
193 updateRefsRateSeq_.Add(updateRefsRate);
194
195 ASSERT(edenLength != 0);
196 promotionSeq_.Add(static_cast<double>(promotedRegions_) / edenLength);
197
198 auto pauseTimeSum = markingTime + evacuationTime + updateRefsTime;
199 auto otherTime = pauseTime - pauseTimeSum;
200 otherSeq_.Add(otherTime);
201
202 if (dirtyCardsCount_ > 0) {
203 auto scanDirtyCardsTime = (scanDirtyCardsEnd_ - scanDirtyCardsStart_) / ark::os::time::MICRO_TO_NANO;
204 scanDirtyCardsRateSeq_.Add(static_cast<double>(dirtyCardsCount_) / scanDirtyCardsTime);
205 }
206
207 if (cause != GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE) {
208 // it can be too early after previous pause and skew statistics
209 remsetRefsSeq_.Add(totalRemsetRefsCount_);
210 }
211
212 if (remsetSize_ > 0) {
213 remsetRefsPerChunkSeq_.Add(static_cast<double>(remsetRefsCount_) / remsetSize_);
214 }
215 }
216
217 previousYoungCollectionEnd_ = endTime;
218 }
219
DumpMetrics(const CollectionSet & collectionSet,uint64_t pauseTime,double allocationRate) const220 void G1Analytics::DumpMetrics(const CollectionSet &collectionSet, uint64_t pauseTime, double allocationRate) const
221 {
222 DumpMetric("allocation_rate", allocationRate * DEFAULT_REGION_SIZE, PredictAllocationRate() * DEFAULT_REGION_SIZE);
223
224 auto expectedRemsetRefsCount = predictor_.Predict(remsetRefsSeq_);
225 DumpMetric("total_remset_refs_count", static_cast<double>(totalRemsetRefsCount_), expectedRemsetRefsCount);
226 DumpMetric("remset_refs_count", static_cast<double>(remsetRefsCount_), expectedRemsetRefsCount);
227
228 auto edenLength = collectionSet.Young().size();
229 auto predictedYoungPause = PredictYoungCollectionTimeInMicros(edenLength);
230 auto liveObjectsPerRegion =
231 edenLength > 0 ? static_cast<double>(liveObjects_) / edenLength : std::numeric_limits<double>::quiet_NaN();
232 DumpMetric("live_objects_per_region", liveObjectsPerRegion, predictor_.Predict(liveObjectsSeq_));
233
234 auto expectedLiveObjects = edenLength * predictor_.Predict(liveObjectsSeq_);
235 DumpMetric("live_objects", static_cast<double>(liveObjects_), expectedLiveObjects);
236
237 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
238
239 auto compactedRegions = edenLength - promotedRegions_;
240 auto expectedPromotedRegions = PredictPromotedRegions(edenLength);
241 auto expectedCompactedRegions = edenLength - expectedPromotedRegions;
242 DumpMetric("compacted_regions", static_cast<double>(compactedRegions), expectedCompactedRegions);
243
244 auto copiedBytesPerRegion = compactedRegions > 0 ? static_cast<double>(copiedBytes_) / compactedRegions : 0;
245 DumpMetric("copied_bytes_per_region", copiedBytesPerRegion, predictor_.Predict(copiedBytesSeq_));
246
247 auto promotionTime =
248 promotedRegions_ == edenLength ? evacuationTime : EstimatePromotionTimeInMicros(promotedRegions_);
249 auto copyingTime = evacuationTime > promotionTime ? evacuationTime - promotionTime : 0;
250
251 if (copyingTime > 0) {
252 auto copyingBytesRate = static_cast<double>(copiedBytes_) / copyingTime;
253 DumpMetric("copying_bytes_rate", copyingBytesRate, predictor_.Predict(copyingBytesRateSeq_));
254 }
255
256 auto expectedPromotionTime = EstimatePromotionTimeInMicros(expectedPromotedRegions);
257 DumpPauseMetric("promotion_time", promotionTime, expectedPromotionTime, pauseTime);
258
259 auto expectedCopiedBytes = expectedCompactedRegions * predictor_.Predict(copiedBytesSeq_);
260 auto expectedCopyingTime = PredictCopyingTimeInMicros(expectedCopiedBytes);
261 DumpPauseMetric("copying_time", copyingTime, expectedCopyingTime, pauseTime);
262 DumpPauseMetric("evacuation_time", evacuationTime, expectedCopyingTime + expectedPromotionTime, pauseTime);
263
264 auto traversedObjects = liveObjects_ + remsetRefsCount_;
265 auto markingTime = (markingEnd_ - markingStart_) / ark::os::time::MICRO_TO_NANO;
266 auto markingRate = static_cast<double>(traversedObjects) / markingTime;
267 DumpMetric("marking_rate", markingRate, predictor_.Predict(markingRateSeq_));
268 auto expectedMarkingTime = PredictMarkingTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount);
269 DumpPauseMetric("marking_time", markingTime, expectedMarkingTime, pauseTime);
270
271 auto updateRefsTime = (updateRefsEnd_ - updateRefsStart_) / ark::os::time::MICRO_TO_NANO;
272 auto updateRefsRate = static_cast<double>(traversedObjects) / updateRefsTime;
273 DumpMetric("update_refs_rate", updateRefsRate, predictor_.Predict(updateRefsRateSeq_));
274 auto expectedUpdateRefsTime = PredictUpdateRefsTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount);
275 DumpPauseMetric("update_refs_time", updateRefsTime, expectedUpdateRefsTime, pauseTime);
276
277 auto otherTime = pauseTime - markingTime - evacuationTime - updateRefsTime;
278 DumpPauseMetric("other_time", otherTime, static_cast<uint64_t>(predictor_.Predict(otherSeq_)), pauseTime);
279
280 DumpMetric("young_pause_time", pauseTime, predictedYoungPause);
281 if (edenLength < collectionSet.size()) {
282 DumpMetric("mixed_pause_time", pauseTime, predictedMixedPause_);
283 }
284 }
285
PredictYoungCollectionTimeInMicros(size_t edenLength) const286 uint64_t G1Analytics::PredictYoungCollectionTimeInMicros(size_t edenLength) const
287 {
288 auto expectedPromotedRegions = PredictPromotedRegions(edenLength);
289 auto expectedCompactedRegions = edenLength - expectedPromotedRegions;
290 auto expectedCopiedBytes = expectedCompactedRegions * predictor_.Predict(copiedBytesSeq_);
291 auto expectedLiveObjects = edenLength * predictor_.Predict(liveObjectsSeq_);
292 auto expectedRemsetRefsCount = predictor_.Predict(remsetRefsSeq_);
293 auto otherTime = predictor_.Predict(otherSeq_);
294 return PredictMarkingTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
295 PredictCopyingTimeInMicros(expectedCopiedBytes) +
296 PredictUpdateRefsTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
297 EstimatePromotionTimeInMicros(expectedPromotedRegions) + otherTime;
298 }
299
PredictYoungCollectionTimeInMicros(const CollectionSet & collectionSet) const300 uint64_t G1Analytics::PredictYoungCollectionTimeInMicros(const CollectionSet &collectionSet) const
301 {
302 ASSERT(collectionSet.Young().size() == collectionSet.size());
303 auto edenLength = collectionSet.Young().size();
304 auto expectedPromotedRegions = PredictPromotedRegions(edenLength);
305 auto expectedCompactedRegions = edenLength - expectedPromotedRegions;
306 auto expectedCopiedBytes = expectedCompactedRegions * predictor_.Predict(copiedBytesSeq_);
307 auto expectedLiveObjects = edenLength * predictor_.Predict(liveObjectsSeq_);
308 size_t remsetSize = 0;
309 std::for_each(collectionSet.begin(), collectionSet.end(),
310 [&remsetSize](auto *region) { remsetSize += region->GetRemSetSize(); });
311 auto expectedRemsetRefsCount = PredictRemsetRefsCount(remsetSize);
312 auto otherTime = predictor_.Predict(otherSeq_);
313 return PredictMarkingTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
314 PredictCopyingTimeInMicros(expectedCopiedBytes) +
315 PredictUpdateRefsTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
316 EstimatePromotionTimeInMicros(expectedPromotedRegions) + otherTime;
317 }
318
PredictOldCollectionTimeInMicros(Region * region) const319 uint64_t G1Analytics::PredictOldCollectionTimeInMicros(Region *region) const
320 {
321 auto expectedLiveObjects = region->GetLiveBytes() * region->GetAllocatedObjects() / region->GetAllocatedBytes();
322 return PredictOldCollectionTimeInMicros(region->GetRemSetSize(), region->GetLiveBytes(), expectedLiveObjects);
323 }
324
PredictOldCollectionTimeInMicros(size_t remsetSize,size_t liveBytes,size_t liveObjects) const325 uint64_t G1Analytics::PredictOldCollectionTimeInMicros(size_t remsetSize, size_t liveBytes, size_t liveObjects) const
326 {
327 auto expectedRemsetRefsCount = PredictRemsetRefsCount(remsetSize);
328 return PredictMarkingTimeInMicros(liveObjects, expectedRemsetRefsCount) + PredictCopyingTimeInMicros(liveBytes);
329 }
PredictScanDirtyCardsTime(size_t dirtyCardsCount) const330 uint64_t G1Analytics::PredictScanDirtyCardsTime(size_t dirtyCardsCount) const
331 {
332 if (dirtyCardsCount == 0) {
333 return 0;
334 }
335 return PredictTime(dirtyCardsCount, scanDirtyCardsRateSeq_);
336 }
337
PredictRemsetRefsCount(size_t remsetSize) const338 size_t G1Analytics::PredictRemsetRefsCount(size_t remsetSize) const
339 {
340 return predictor_.Predict(remsetRefsPerChunkSeq_) * remsetSize;
341 }
342
PredictPromotedRegions(size_t edenLength) const343 double G1Analytics::PredictPromotedRegions(size_t edenLength) const
344 {
345 return predictor_.Predict(promotionSeq_) * edenLength;
346 }
347
EstimatePromotionTimeInMicros(size_t promotedRegions) const348 uint64_t G1Analytics::EstimatePromotionTimeInMicros(size_t promotedRegions) const
349 {
350 return promotionCost_ * promotedRegions;
351 }
352
PredictUpdateRefsTimeInMicros(size_t liveObjects,size_t remsetRefsCount) const353 uint64_t G1Analytics::PredictUpdateRefsTimeInMicros(size_t liveObjects, size_t remsetRefsCount) const
354 {
355 return PredictTime(liveObjects + remsetRefsCount, updateRefsRateSeq_);
356 }
357
PredictMarkingTimeInMicros(size_t liveObjects,size_t remsetRefsCount) const358 uint64_t G1Analytics::PredictMarkingTimeInMicros(size_t liveObjects, size_t remsetRefsCount) const
359 {
360 return PredictTime(liveObjects + remsetRefsCount, markingRateSeq_);
361 }
362
PredictCopyingTimeInMicros(size_t copiedBytes) const363 uint64_t G1Analytics::PredictCopyingTimeInMicros(size_t copiedBytes) const
364 {
365 return PredictTime(copiedBytes, copyingBytesRateSeq_);
366 }
367
PredictSurvivedBytesRatio() const368 double G1Analytics::PredictSurvivedBytesRatio() const
369 {
370 return predictor_.Predict(survivedBytesRatioSeq_);
371 }
372 } // namespace ark::mem
373