1 /**
2 * Copyright (c) 2023-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "g1_analytics.h"
17 #include "libpandabase/utils/time.h"
18 #include "libpandabase/os/time.h"
19 #include "libpandabase/utils/type_converter.h"
20 #include "runtime/mem/gc/card_table.h"
21 #include <numeric>
22
23 namespace ark::mem {
G1Analytics(uint64_t now)24 G1Analytics::G1Analytics(uint64_t now) : previousYoungCollectionEnd_(now) {}
25
ReportEvacuatedBytes(size_t bytes)26 void G1Analytics::ReportEvacuatedBytes(size_t bytes)
27 {
28 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
29 // on other reads or writes
30 copiedBytes_.fetch_add(bytes, std::memory_order_relaxed);
31 }
32
ReportRemsetSize(size_t remsetSize,size_t remsetRefsCount)33 void G1Analytics::ReportRemsetSize(size_t remsetSize, size_t remsetRefsCount)
34 {
35 remsetSize_ = remsetSize;
36 remsetRefsCount_ = remsetRefsCount;
37 }
38
ReportMarkingStart(uint64_t time)39 void G1Analytics::ReportMarkingStart(uint64_t time)
40 {
41 markingStart_ = time;
42 }
43
ReportMarkingEnd(uint64_t time,size_t remsetRefsCount)44 void G1Analytics::ReportMarkingEnd(uint64_t time, size_t remsetRefsCount)
45 {
46 markingEnd_ = time;
47 totalRemsetRefsCount_ = remsetRefsCount;
48 }
49
ReportScanDirtyCardsStart(uint64_t time)50 void G1Analytics::ReportScanDirtyCardsStart(uint64_t time)
51 {
52 scanDirtyCardsStart_ = time;
53 }
54
ReportScanDirtyCardsEnd(uint64_t time,size_t dirtyCardsCount)55 void G1Analytics::ReportScanDirtyCardsEnd(uint64_t time, size_t dirtyCardsCount)
56 {
57 scanDirtyCardsEnd_ = time;
58 dirtyCardsCount_ = dirtyCardsCount;
59 }
60
ReportEvacuationStart(uint64_t time)61 void G1Analytics::ReportEvacuationStart(uint64_t time)
62 {
63 evacuationStart_ = time;
64 }
65
ReportEvacuationEnd(uint64_t time)66 void G1Analytics::ReportEvacuationEnd(uint64_t time)
67 {
68 evacuationEnd_ = time;
69 }
70
ReportUpdateRefsStart(uint64_t time)71 void G1Analytics::ReportUpdateRefsStart(uint64_t time)
72 {
73 updateRefsStart_ = time;
74 }
75
ReportUpdateRefsEnd(uint64_t time)76 void G1Analytics::ReportUpdateRefsEnd(uint64_t time)
77 {
78 updateRefsEnd_ = time;
79 }
80
ReportPromotedRegion()81 void G1Analytics::ReportPromotedRegion()
82 {
83 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
84 // on other reads or writes
85 promotedRegions_.fetch_add(1, std::memory_order_relaxed);
86 }
87
ReportLiveObjects(size_t num)88 void G1Analytics::ReportLiveObjects(size_t num)
89 {
90 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
91 // on other reads or writes
92 liveObjects_.fetch_add(num, std::memory_order_relaxed);
93 }
94
ReportSurvivedBytesRatio(const CollectionSet & collectionSet)95 void G1Analytics::ReportSurvivedBytesRatio(const CollectionSet &collectionSet)
96 {
97 if (!collectionSet.Young().empty()) {
98 auto liveBytes = static_cast<double>(GetPromotedRegions() * DEFAULT_REGION_SIZE + GetEvacuatedBytes());
99 auto totalSize = collectionSet.Young().size() * DEFAULT_REGION_SIZE;
100 auto ratio = liveBytes / totalSize;
101 survivedBytesRatioSeq_.Add(ratio);
102 }
103 }
104
ReportScanRemsetTime(size_t remsetSize,uint64_t time)105 void G1Analytics::ReportScanRemsetTime(size_t remsetSize, uint64_t time)
106 {
107 remsetSize_ = remsetSize;
108 scanRemsetTime_ = time;
109 }
110
GetPromotedRegions() const111 size_t G1Analytics::GetPromotedRegions() const
112 {
113 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
114 // on other reads or writes
115 return promotedRegions_.load(std::memory_order_relaxed);
116 }
117
GetEvacuatedBytes() const118 size_t G1Analytics::GetEvacuatedBytes() const
119 {
120 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
121 // on other reads or writes
122 return copiedBytes_.load(std::memory_order_relaxed);
123 }
124
PredictAllocationRate() const125 double G1Analytics::PredictAllocationRate() const
126 {
127 return predictor_.Predict(allocationRateSeq_);
128 }
129
ReportCollectionStart(uint64_t time)130 void G1Analytics::ReportCollectionStart(uint64_t time)
131 {
132 currentYoungCollectionStart_ = time;
133 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
134 // on other reads or writes
135 copiedBytes_.store(0, std::memory_order_relaxed);
136 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
137 // on other reads or writes
138 promotedRegions_.store(0, std::memory_order_relaxed);
139 // Atomic with relaxed order reason: data race with no synchronization or ordering constraints imposed
140 // on other reads or writes
141 liveObjects_.store(0, std::memory_order_relaxed);
142 remsetSize_ = 0;
143 remsetRefsCount_ = 0;
144 totalRemsetRefsCount_ = 0;
145 }
146
147 template <typename T>
DumpMetric(const char * msg,T actual,T prediction)148 static void DumpMetric(const char *msg, T actual, T prediction)
149 {
150 auto error = actual > 0 ? PERCENT_100_D * (prediction - actual) / actual : std::numeric_limits<double>::quiet_NaN();
151 LOG(INFO, GC) << "G1Analytics metric: " << msg << " actual " << actual << " prediction " << prediction << " error "
152 << error << "%";
153 }
154
DumpPauseMetric(const char * msg,uint64_t actual,uint64_t prediction,uint64_t totalPause)155 static void DumpPauseMetric(const char *msg, uint64_t actual, uint64_t prediction, uint64_t totalPause)
156 {
157 auto error =
158 totalPause > 0 ? PERCENT_100_D * (prediction - actual) / totalPause : std::numeric_limits<double>::quiet_NaN();
159 LOG(INFO, GC) << "G1Analytics metric: " << msg << " actual " << actual << " prediction " << prediction << " error "
160 << error << "%";
161 }
162
ReportCollectionEnd(GCTaskCause cause,uint64_t endTime,const CollectionSet & collectionSet,bool singlePassCompactionEnabled,bool dump)163 void G1Analytics::ReportCollectionEnd(GCTaskCause cause, uint64_t endTime, const CollectionSet &collectionSet,
164 bool singlePassCompactionEnabled, bool dump)
165 {
166 previousWasSinglePassCompaction_ = singlePassCompactionEnabled;
167 auto edenLength = collectionSet.Young().size();
168 auto appTime = (currentYoungCollectionStart_ - previousYoungCollectionEnd_) / ark::os::time::MICRO_TO_NANO;
169 auto allocationRate = static_cast<double>(edenLength) / appTime;
170 auto pauseTime = (endTime - currentYoungCollectionStart_) / ark::os::time::MICRO_TO_NANO;
171
172 if (dump && cause != GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE) {
173 DumpMetrics(collectionSet, pauseTime, allocationRate);
174 }
175
176 allocationRateSeq_.Add(allocationRate);
177
178 if (cause != GCTaskCause::EXPLICIT_CAUSE && edenLength == collectionSet.size() && edenLength > 0) {
179 if (singlePassCompactionEnabled) {
180 ReportSinglePassCompactionEnd(cause, pauseTime, edenLength);
181 } else {
182 ReportMarkingCollectionEnd(cause, pauseTime, edenLength);
183 }
184 }
185
186 previousYoungCollectionEnd_ = endTime;
187 }
188
ReportMarkingCollectionEnd(GCTaskCause cause,uint64_t pauseTime,size_t edenLength)189 void G1Analytics::ReportMarkingCollectionEnd(GCTaskCause cause, uint64_t pauseTime, size_t edenLength)
190 {
191 ASSERT(edenLength != 0);
192 auto liveObjectsPerRegion = static_cast<double>(liveObjects_) / edenLength;
193 liveObjectsSeq_.Add(liveObjectsPerRegion);
194
195 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
196 auto compactedRegions = edenLength - promotedRegions_;
197 if (compactedRegions > 0) {
198 UpdateCopiedBytesStat(compactedRegions);
199 auto estimatedPromotionTime = EstimatePromotionTimeInMicros(promotedRegions_);
200 if (evacuationTime > estimatedPromotionTime) {
201 UpdateCopiedBytesRateStat(evacuationTime - estimatedPromotionTime);
202 }
203 }
204
205 auto traversedObjects = liveObjects_ + totalRemsetRefsCount_;
206 auto markingTime = (markingEnd_ - markingStart_) / ark::os::time::MICRO_TO_NANO;
207 auto markingRate = static_cast<double>(traversedObjects) / markingTime;
208 markingRateSeq_.Add(markingRate);
209
210 auto updateRefsTime = (updateRefsEnd_ - updateRefsStart_) / ark::os::time::MICRO_TO_NANO;
211 auto updateRefsRate = static_cast<double>(traversedObjects) / updateRefsTime;
212 updateRefsRateSeq_.Add(updateRefsRate);
213
214 ASSERT(edenLength != 0);
215 promotionSeq_.Add(static_cast<double>(promotedRegions_) / edenLength);
216
217 auto pauseTimeSum = markingTime + evacuationTime + updateRefsTime;
218 auto otherTime = pauseTime - pauseTimeSum;
219 otherSeq_[MARKING_COLLECTION].Add(otherTime);
220
221 if (dirtyCardsCount_ > 0) {
222 auto scanDirtyCardsTime = (scanDirtyCardsEnd_ - scanDirtyCardsStart_) / ark::os::time::MICRO_TO_NANO;
223 scanDirtyCardsRateSeq_.Add(static_cast<double>(dirtyCardsCount_) / scanDirtyCardsTime);
224 }
225
226 if (cause != GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE) {
227 // it can be too early after previous pause and skew statistics
228 remsetRefsSeq_.Add(totalRemsetRefsCount_);
229 }
230
231 if (remsetSize_ > 0) {
232 remsetRefsPerChunkSeq_.Add(static_cast<double>(remsetRefsCount_) / remsetSize_);
233 }
234 }
235
ReportSinglePassCompactionEnd(GCTaskCause cause,uint64_t pauseTime,size_t edenLength)236 void G1Analytics::ReportSinglePassCompactionEnd([[maybe_unused]] GCTaskCause cause, uint64_t pauseTime,
237 size_t edenLength)
238 {
239 ASSERT(edenLength != 0);
240 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
241 UpdateCopiedBytesStat(edenLength);
242 UpdateCopiedBytesRateStat(evacuationTime);
243 remsetSizeSeq_.Add(static_cast<double>(remsetSize_) / edenLength);
244 auto scanRemsetTime = scanRemsetTime_ / ark::os::time::MICRO_TO_NANO;
245 remsetScanRateSeq_.Add(static_cast<double>(remsetSize_) / scanRemsetTime);
246 auto pauseTimeSum = evacuationTime + scanRemsetTime;
247 auto otherTime = pauseTime - pauseTimeSum;
248 otherSeq_[SINGLE_PASS_COMPACTION].Add(otherTime);
249 }
250
UpdateCopiedBytesStat(size_t compactedRegions)251 void G1Analytics::UpdateCopiedBytesStat(size_t compactedRegions)
252 {
253 ASSERT(compactedRegions != 0);
254 auto copiedBytesPerRegion = static_cast<double>(copiedBytes_) / compactedRegions;
255 copiedBytesSeq_.Add(copiedBytesPerRegion);
256 }
257
UpdateCopiedBytesRateStat(uint64_t compactionTime)258 void G1Analytics::UpdateCopiedBytesRateStat(uint64_t compactionTime)
259 {
260 ASSERT(compactionTime != 0);
261 auto copyingBytesRate = static_cast<double>(copiedBytes_) / compactionTime;
262 copyingBytesRateSeq_.Add(copyingBytesRate);
263 }
264
DumpMetrics(const CollectionSet & collectionSet,uint64_t pauseTime,double allocationRate) const265 void G1Analytics::DumpMetrics(const CollectionSet &collectionSet, uint64_t pauseTime, double allocationRate) const
266 {
267 DumpMetric("allocation_rate", allocationRate * DEFAULT_REGION_SIZE, PredictAllocationRate() * DEFAULT_REGION_SIZE);
268
269 auto edenLength = collectionSet.Young().size();
270 auto predictedYoungPause = PredictYoungCollectionTimeInMicros(edenLength);
271
272 if (previousWasSinglePassCompaction_) {
273 DumpSinglePassCompactionMetrics(edenLength, pauseTime);
274 } else {
275 DumpMarkingCollectionMetrics(edenLength, pauseTime);
276 }
277
278 DumpMetric("young_pause_time", pauseTime, predictedYoungPause);
279 if (edenLength < collectionSet.size()) {
280 DumpMetric("mixed_pause_time", pauseTime, predictedMixedPause_);
281 }
282 }
283
DumpSinglePassCompactionMetrics(size_t edenLength,uint64_t pauseTime) const284 void G1Analytics::DumpSinglePassCompactionMetrics(size_t edenLength, uint64_t pauseTime) const
285 {
286 ASSERT(edenLength != 0);
287 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
288 auto copiedBytesPerRegion = static_cast<double>(copiedBytes_) / edenLength;
289 DumpMetric("copied_bytes_per_region", copiedBytesPerRegion, predictor_.Predict(copiedBytesSeq_));
290 auto copyingBytesRate = static_cast<double>(copiedBytes_) / evacuationTime;
291 DumpMetric("copying_bytes_rate", copyingBytesRate, predictor_.Predict(copyingBytesRateSeq_));
292 auto expectedCopiedBytes = edenLength * predictor_.Predict(copiedBytesSeq_);
293 auto expectedCopyingTime = PredictCopyingTimeInMicros(expectedCopiedBytes);
294 DumpPauseMetric("copying_time", evacuationTime, expectedCopyingTime, pauseTime);
295 auto scanRemsetTime = scanRemsetTime_ / ark::os::time::MICRO_TO_NANO;
296 DumpPauseMetric("scan_remset_time", scanRemsetTime, PredictRemsetScanTimeInMicros(edenLength), pauseTime);
297 auto otherTime = pauseTime - evacuationTime - scanRemsetTime;
298 DumpPauseMetric("other_time", otherTime, PredictOtherTime(SINGLE_PASS_COMPACTION), pauseTime);
299 }
300
DumpMarkingCollectionMetrics(size_t edenLength,uint64_t pauseTime) const301 void G1Analytics::DumpMarkingCollectionMetrics(size_t edenLength, uint64_t pauseTime) const
302 {
303 auto expectedRemsetRefsCount = predictor_.Predict(remsetRefsSeq_);
304 DumpMetric("total_remset_refs_count", static_cast<double>(totalRemsetRefsCount_), expectedRemsetRefsCount);
305 DumpMetric("remset_refs_count", static_cast<double>(remsetRefsCount_), expectedRemsetRefsCount);
306 auto liveObjectsPerRegion =
307 edenLength > 0 ? static_cast<double>(liveObjects_) / edenLength : std::numeric_limits<double>::quiet_NaN();
308 DumpMetric("live_objects_per_region", liveObjectsPerRegion, predictor_.Predict(liveObjectsSeq_));
309
310 auto expectedLiveObjects = edenLength * predictor_.Predict(liveObjectsSeq_);
311 DumpMetric("live_objects", static_cast<double>(liveObjects_), expectedLiveObjects);
312
313 auto evacuationTime = (evacuationEnd_ - evacuationStart_) / ark::os::time::MICRO_TO_NANO;
314
315 auto compactedRegions = edenLength - promotedRegions_;
316 auto expectedPromotedRegions = PredictPromotedRegions(edenLength);
317 auto expectedCompactedRegions = edenLength - expectedPromotedRegions;
318 DumpMetric("compacted_regions", static_cast<double>(compactedRegions), expectedCompactedRegions);
319
320 auto copiedBytesPerRegion = compactedRegions > 0 ? static_cast<double>(copiedBytes_) / compactedRegions : 0;
321 DumpMetric("copied_bytes_per_region", copiedBytesPerRegion, predictor_.Predict(copiedBytesSeq_));
322
323 auto promotionTime =
324 promotedRegions_ == edenLength ? evacuationTime : EstimatePromotionTimeInMicros(promotedRegions_);
325 auto copyingTime = evacuationTime > promotionTime ? evacuationTime - promotionTime : 0;
326
327 if (copyingTime > 0) {
328 auto copyingBytesRate = static_cast<double>(copiedBytes_) / copyingTime;
329 DumpMetric("copying_bytes_rate", copyingBytesRate, predictor_.Predict(copyingBytesRateSeq_));
330 }
331
332 auto expectedPromotionTime = EstimatePromotionTimeInMicros(expectedPromotedRegions);
333 DumpPauseMetric("promotion_time", promotionTime, expectedPromotionTime, pauseTime);
334
335 auto expectedCopiedBytes = expectedCompactedRegions * predictor_.Predict(copiedBytesSeq_);
336 auto expectedCopyingTime = PredictCopyingTimeInMicros(expectedCopiedBytes);
337 DumpPauseMetric("copying_time", copyingTime, expectedCopyingTime, pauseTime);
338 DumpPauseMetric("evacuation_time", evacuationTime, expectedCopyingTime + expectedPromotionTime, pauseTime);
339
340 auto traversedObjects = liveObjects_ + remsetRefsCount_;
341 auto markingTime = (markingEnd_ - markingStart_) / ark::os::time::MICRO_TO_NANO;
342 auto markingRate = static_cast<double>(traversedObjects) / markingTime;
343 DumpMetric("marking_rate", markingRate, predictor_.Predict(markingRateSeq_));
344 auto expectedMarkingTime = PredictMarkingTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount);
345 DumpPauseMetric("marking_time", markingTime, expectedMarkingTime, pauseTime);
346
347 auto updateRefsTime = (updateRefsEnd_ - updateRefsStart_) / ark::os::time::MICRO_TO_NANO;
348 auto updateRefsRate = static_cast<double>(traversedObjects) / updateRefsTime;
349 DumpMetric("update_refs_rate", updateRefsRate, predictor_.Predict(updateRefsRateSeq_));
350 auto expectedUpdateRefsTime = PredictUpdateRefsTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount);
351 DumpPauseMetric("update_refs_time", updateRefsTime, expectedUpdateRefsTime, pauseTime);
352
353 auto otherTime = pauseTime - markingTime - evacuationTime - updateRefsTime;
354 DumpPauseMetric("other_time", otherTime, PredictOtherTime(MARKING_COLLECTION), pauseTime);
355 }
356
PredictYoungCollectionTimeInMicros(size_t edenLength) const357 uint64_t G1Analytics::PredictYoungCollectionTimeInMicros(size_t edenLength) const
358 {
359 ASSERT(edenLength > 0);
360 if (previousWasSinglePassCompaction_) {
361 return PredictYoungSinglePassCompactionTimeInMicros(edenLength);
362 }
363
364 auto expectedRemsetRefsCount = predictor_.Predict(remsetRefsSeq_);
365 return PredictYoungMarkingCollectionTimeInMicros(edenLength, expectedRemsetRefsCount);
366 }
367
PredictYoungCollectionTimeInMicros(const CollectionSet & collectionSet) const368 uint64_t G1Analytics::PredictYoungCollectionTimeInMicros(const CollectionSet &collectionSet) const
369 {
370 ASSERT(collectionSet.Young().size() == collectionSet.size());
371 auto edenLength = collectionSet.Young().size();
372 if (previousWasSinglePassCompaction_) {
373 return PredictYoungSinglePassCompactionTimeInMicros(edenLength);
374 }
375
376 auto remsetSize = std::accumulate(collectionSet.begin(), collectionSet.end(), 0,
377 [](size_t acc, auto *region) { return acc + region->GetRemSetSize(); });
378 auto expectedRemsetRefsCount = PredictRemsetRefsCount(remsetSize);
379 return PredictYoungMarkingCollectionTimeInMicros(edenLength, expectedRemsetRefsCount);
380 }
381
PredictYoungMarkingCollectionTimeInMicros(size_t edenLength,size_t expectedRemsetRefsCount) const382 uint64_t G1Analytics::PredictYoungMarkingCollectionTimeInMicros(size_t edenLength, size_t expectedRemsetRefsCount) const
383 {
384 auto expectedPromotedRegions = PredictPromotedRegions(edenLength);
385 auto expectedCompactedRegions = edenLength - expectedPromotedRegions;
386 auto expectedCopiedBytes = expectedCompactedRegions * predictor_.Predict(copiedBytesSeq_);
387 auto expectedLiveObjects = edenLength * predictor_.Predict(liveObjectsSeq_);
388 auto otherTime = PredictOtherTime(MARKING_COLLECTION);
389 return PredictMarkingTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
390 PredictCopyingTimeInMicros(expectedCopiedBytes) +
391 PredictUpdateRefsTimeInMicros(expectedLiveObjects, expectedRemsetRefsCount) +
392 EstimatePromotionTimeInMicros(expectedPromotedRegions) + otherTime;
393 }
394
PredictYoungSinglePassCompactionTimeInMicros(size_t edenLength) const395 uint64_t G1Analytics::PredictYoungSinglePassCompactionTimeInMicros(size_t edenLength) const
396 {
397 auto expectedCopiedBytes = edenLength * predictor_.Predict(copiedBytesSeq_);
398 auto otherTime = PredictOtherTime(SINGLE_PASS_COMPACTION);
399 return PredictCopyingTimeInMicros(expectedCopiedBytes) + PredictRemsetScanTimeInMicros(edenLength) + otherTime;
400 }
401
PredictRemsetScanTimeInMicros(size_t edenLength) const402 uint64_t G1Analytics::PredictRemsetScanTimeInMicros(size_t edenLength) const
403 {
404 auto expectedRemsetSize = edenLength * predictor_.Predict(remsetSizeSeq_);
405 return PredictTime(expectedRemsetSize, remsetScanRateSeq_);
406 }
407
PredictOldCollectionTimeInMicros(Region * region) const408 uint64_t G1Analytics::PredictOldCollectionTimeInMicros(Region *region) const
409 {
410 auto expectedLiveObjects = region->GetLiveBytes() * region->GetAllocatedObjects() / region->GetAllocatedBytes();
411 return PredictOldCollectionTimeInMicros(region->GetRemSetSize(), region->GetLiveBytes(), expectedLiveObjects);
412 }
413
PredictOldCollectionTimeInMicros(size_t remsetSize,size_t liveBytes,size_t liveObjects) const414 uint64_t G1Analytics::PredictOldCollectionTimeInMicros(size_t remsetSize, size_t liveBytes, size_t liveObjects) const
415 {
416 auto expectedRemsetRefsCount = PredictRemsetRefsCount(remsetSize);
417 return PredictMarkingTimeInMicros(liveObjects, expectedRemsetRefsCount) + PredictCopyingTimeInMicros(liveBytes);
418 }
PredictScanDirtyCardsTime(size_t dirtyCardsCount) const419 uint64_t G1Analytics::PredictScanDirtyCardsTime(size_t dirtyCardsCount) const
420 {
421 if (dirtyCardsCount == 0) {
422 return 0;
423 }
424 return PredictTime(dirtyCardsCount, scanDirtyCardsRateSeq_);
425 }
426
PredictRemsetRefsCount(size_t remsetSize) const427 size_t G1Analytics::PredictRemsetRefsCount(size_t remsetSize) const
428 {
429 return predictor_.Predict(remsetRefsPerChunkSeq_) * remsetSize;
430 }
431
PredictPromotedRegions(size_t edenLength) const432 double G1Analytics::PredictPromotedRegions(size_t edenLength) const
433 {
434 return predictor_.Predict(promotionSeq_) * edenLength;
435 }
436
EstimatePromotionTimeInMicros(size_t promotedRegions) const437 uint64_t G1Analytics::EstimatePromotionTimeInMicros(size_t promotedRegions) const
438 {
439 return promotionCost_ * promotedRegions;
440 }
441
PredictUpdateRefsTimeInMicros(size_t liveObjects,size_t remsetRefsCount) const442 uint64_t G1Analytics::PredictUpdateRefsTimeInMicros(size_t liveObjects, size_t remsetRefsCount) const
443 {
444 return PredictTime(liveObjects + remsetRefsCount, updateRefsRateSeq_);
445 }
446
PredictMarkingTimeInMicros(size_t liveObjects,size_t remsetRefsCount) const447 uint64_t G1Analytics::PredictMarkingTimeInMicros(size_t liveObjects, size_t remsetRefsCount) const
448 {
449 return PredictTime(liveObjects + remsetRefsCount, markingRateSeq_);
450 }
451
PredictCopyingTimeInMicros(size_t copiedBytes) const452 uint64_t G1Analytics::PredictCopyingTimeInMicros(size_t copiedBytes) const
453 {
454 return PredictTime(copiedBytes, copyingBytesRateSeq_);
455 }
456
PredictSurvivedBytesRatio() const457 double G1Analytics::PredictSurvivedBytesRatio() const
458 {
459 return predictor_.Predict(survivedBytesRatioSeq_);
460 }
461
PredictOtherTime(StatType type) const462 uint64_t G1Analytics::PredictOtherTime(StatType type) const
463 {
464 return predictor_.Predict(otherSeq_[type]);
465 }
466 } // namespace ark::mem
467