1 /**
2 * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include "gtest/gtest.h"
16 #include "iostream"
17 #include "libpandabase/utils/utils.h"
18 #include "runtime/include/coretypes/string.h"
19 #include "runtime/include/runtime.h"
20 #include "runtime/include/panda_vm.h"
21 #include "runtime/handle_scope-inl.h"
22 #include "runtime/mem/gc/g1/g1-allocator.h"
23 #include "runtime/mem/gc/generational-gc-base.h"
24 #include "runtime/mem/malloc-proxy-allocator-inl.h"
25 #include "runtime/mem/mem_stats.h"
26 #include "runtime/mem/mem_stats_default.h"
27 #include "runtime/mem/runslots_allocator-inl.h"
28
29 namespace ark::mem::test {
30 class MemStatsGenGCTest : public testing::Test {
31 public:
32 using ObjVec = PandaVector<ObjectHeader *>;
33 using HanVec = PandaVector<VMHandle<ObjectHeader *> *>;
34 static constexpr size_t ROOT_MAX_SIZE = 100000U;
35 static constexpr int MIX_TEST_ALLOC_TIMES = 5;
36 static constexpr int FULL_TEST_ALLOC_TIMES = 2;
37
38 static constexpr GCTaskCause MIXED_G1_GC_CAUSE = GCTaskCause::YOUNG_GC_CAUSE;
39 static constexpr GCTaskCause FULL_GC_CAUSE = GCTaskCause::EXPLICIT_CAUSE;
40
41 enum class TargetSpace {
42 YOUNG,
43 TENURED_REGULAR,
44 /*
45 * Some allocators have Large objects, it's not the same as Humongous. Objects can be less than Humongous but be
46 * allocated directly in the tenured space for example.
47 */
48 TENURED_LARGE,
49 HUMONGOUS
50 };
51
52 // this class allows to iterate over configurations for JIT and TLAB
53 class Config {
54 public:
55 enum class JITConfig : bool { NO_JIT = false, JIT };
56
57 enum class TLABConfig : bool { NO_TLAB = false, TLAB };
58
IsJITEnabled() const59 bool IsJITEnabled() const
60 {
61 return static_cast<bool>(jitCfg_);
62 }
63
IsTLABEnabled() const64 bool IsTLABEnabled() const
65 {
66 return static_cast<bool>(tlabCfg_);
67 }
68
End() const69 bool End() const
70 {
71 return jitCfg_ == JITConfig::JIT && tlabCfg_ == TLABConfig::NO_TLAB;
72 }
73
operator ++()74 Config &operator++()
75 {
76 if (jitCfg_ == JITConfig::NO_JIT) {
77 jitCfg_ = JITConfig::JIT;
78 } else {
79 jitCfg_ = JITConfig::NO_JIT;
80 if (tlabCfg_ == TLABConfig::TLAB) {
81 tlabCfg_ = TLABConfig::NO_TLAB;
82 } else {
83 tlabCfg_ = TLABConfig::TLAB;
84 }
85 }
86 return *this;
87 }
88
89 private:
90 JITConfig jitCfg_ {JITConfig::NO_JIT};
91 TLABConfig tlabCfg_ {TLABConfig::TLAB};
92 };
93
94 class GCCounter : public GCListener {
95 public:
GCStarted(const GCTask & task,size_t heapSize)96 void GCStarted([[maybe_unused]] const GCTask &task, [[maybe_unused]] size_t heapSize) override
97 {
98 count++;
99 }
100
GCFinished(const GCTask & task,size_t heapSizeBeforeGc,size_t heapSize)101 void GCFinished([[maybe_unused]] const GCTask &task, [[maybe_unused]] size_t heapSizeBeforeGc,
102 [[maybe_unused]] size_t heapSize) override
103 {
104 }
105
106 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
107 int count = 0;
108 };
109
110 struct GcData {
111 size_t count;
112 size_t minSize;
113 size_t maxSize;
114 bool checkOom;
115 };
116
117 struct MemOpReport {
118 size_t allocatedCount;
119 size_t allocatedBytes;
120 size_t savedCount;
121 size_t savedBytes;
122 };
123
124 struct RealStatsLocations {
125 uint32_t *youngFreedObjectsCount;
126 uint64_t *youngFreedObjectsSize;
127 uint32_t *youngMovedObjectsCount;
128 uint64_t *youngMovedObjectsSize;
129 uint32_t *tenuredFreedObjectsCount;
130 uint64_t *tenuredFreedObjectsSize;
131 };
132
SetupRuntime(const std::string & gcTypeParam,const Config & cfg)133 void SetupRuntime(const std::string &gcTypeParam, const Config &cfg)
134 {
135 RuntimeOptions options;
136 options.SetShouldLoadBootPandaFiles(false);
137 options.SetShouldInitializeIntrinsics(false);
138 options.SetUseTlabForAllocations(cfg.IsTLABEnabled());
139 options.SetGcType(gcTypeParam);
140 options.SetGcTriggerType("debug-never");
141 options.SetRunGcInPlace(true);
142 options.SetCompilerEnableJit(cfg.IsJITEnabled());
143 options.SetExplicitConcurrentGcEnabled(false);
144 [[maybe_unused]] bool success = Runtime::Create(options);
145 ASSERT(success);
146
147 thread = ark::MTManagedThread::GetCurrent();
148 gcType = Runtime::GetGCType(options, plugins::RuntimeTypeToLang(Runtime::GetRuntimeType()));
149 [[maybe_unused]] auto gcLocal = thread->GetVM()->GetGC();
150 ASSERT(gcLocal->GetType() == ark::mem::GCTypeFromString(gcTypeParam));
151 ASSERT(gcLocal->IsGenerational());
152 thread->ManagedCodeBegin();
153 }
154
ResetRuntime()155 void ResetRuntime()
156 {
157 DeleteHandles();
158 internalAllocator->Delete(gccnt);
159 thread->ManagedCodeEnd();
160 bool success = Runtime::Destroy();
161 ASSERT_TRUE(success) << "Cannot destroy Runtime";
162 }
163
164 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE>
165 ObjVec MakeAllocationsWithRepeats(size_t minSize, size_t maxSize, size_t count, size_t *allocated,
166 size_t *requested, F spaceChecker, bool checkOomInTenured);
167
168 void InitRoot();
169 void MakeObjectsAlive(const ObjVec &objects, int every = 1);
170 void MakeObjectsPermAlive(const ObjVec &objects, int every = 1);
171 void MakeObjectsGarbage(size_t startIdx, size_t afterEndIdx, int every = 1);
172 void DumpHandles();
173 void DumpAliveObjects();
174 void DeleteHandles();
175 bool IsInYoung(uintptr_t addr);
176 MemOpReport HelpAllocTenured();
177
NeedToCheckYoungFreedCount()178 bool NeedToCheckYoungFreedCount()
179 {
180 return (gcType != GCType::G1_GC) || Runtime::GetOptions().IsG1TrackFreedObjects();
181 }
182
183 template <class LanguageConfig>
184 void PrepareTest();
185
186 template <class LanguageConfig>
187 typename GenerationalGC<LanguageConfig>::MemStats *GetGenMemStats();
188
189 // Allocate a series of objects in a specific space. If DO_SAVE is true, a subsequence of objects
190 // is going to be kept alive and put into the roots array this->root_
191 // If IS_SINGLE is true, then only 1 object is allocated of unaligned size
192 // If IS_SINGLE is false, then an array of objects of different sizes is allocated in triplets twice
193 // Saved subsequence contains 2 equal subsequences of objects (2 of 3 objs in each triplets are garbage)
194 template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE = false, bool IS_SIMPLE = false>
195 typename MemStatsGenGCTest::MemOpReport MakeAllocations();
196
197 template <typename T>
198 RealStatsLocations GetGenMemStatsDetails(T gms);
199
TearDown()200 void TearDown() override {}
201
202 // NOLINTBEGIN(misc-non-private-member-variables-in-classes)
203 ark::MTManagedThread *thread {};
204 GCType gcType {};
205
206 LanguageContext ctx {nullptr};
207 ObjectAllocatorBase *objectAllocator {};
208 mem::InternalAllocatorPtr internalAllocator;
209 PandaVM *vm {};
210 GC *gc {};
211 std::vector<HanVec> handles;
212 MemStatsType *ms {};
213 GCStats *gcMs {};
214 coretypes::Array *root = nullptr;
215 size_t rootSize = 0;
216 GCCounter *gccnt {};
217
218 private:
219 template <MemStatsGenGCTest::TargetSpace SPACE>
220 auto CreateSpaceCheck();
221
222 template <MemStatsGenGCTest::TargetSpace SPACE>
223 bool InitG1Gc(MemStatsGenGCTest::GcData &gcData);
224
225 template <MemStatsGenGCTest::TargetSpace SPACE>
226 bool InitGenGc(MemStatsGenGCTest::GcData &gcData);
227
228 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE>
229 void MakeAllocationsWithSingleRepeat(MemStatsGenGCTest::GcData &gcData, MemStatsGenGCTest::MemOpReport &report,
230 size_t &bytes, size_t &rawObjectsSize, [[maybe_unused]] F spaceCheck);
231
232 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE>
233 void MakeAllocationsWithSeveralRepeat(MemStatsGenGCTest::GcData &gcData, MemStatsGenGCTest::MemOpReport &report,
234 size_t &bytes, size_t &rawObjectsSize, [[maybe_unused]] F spaceCheck);
235
236 // NOLINTEND(misc-non-private-member-variables-in-classes)
237 };
238
239 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE>
MakeAllocationsWithRepeats(size_t minSize,size_t maxSize,size_t count,size_t * allocated,size_t * requested,F spaceChecker,bool checkOomInTenured)240 MemStatsGenGCTest::ObjVec MemStatsGenGCTest::MakeAllocationsWithRepeats(size_t minSize, size_t maxSize, size_t count,
241 size_t *allocated, size_t *requested,
242 [[maybe_unused]] F spaceChecker,
243 bool checkOomInTenured)
244 {
245 ASSERT(minSize <= maxSize);
246 *allocated = 0;
247 *requested = 0;
248 // Create array of object templates based on count and max size
249 PandaVector<PandaString> objTemplates(count);
250 size_t objSize = sizeof(coretypes::String) + minSize;
251 for (size_t i = 0; i < count; ++i) {
252 PandaString simpleString;
253 simpleString.resize(objSize - sizeof(coretypes::String));
254 objTemplates[i] = std::move(simpleString);
255 objSize += (maxSize / count + i); // +i to mess with the alignment
256 if (objSize > maxSize) {
257 objSize = maxSize;
258 }
259 }
260 ObjVec result;
261 result.reserve(count * REPEAT);
262 for (size_t j = 0; j < count; ++j) {
263 size_t size = objTemplates[j].length() + sizeof(coretypes::String);
264 if (checkOomInTenured) {
265 // Leaving 5MB in tenured seems OK
266 auto free =
267 reinterpret_cast<GenerationalSpaces *>(objectAllocator->GetHeapSpace())->GetCurrentFreeTenuredSize();
268 constexpr size_t FIVE_MB = 5000000U;
269 if (size + FIVE_MB > free) {
270 return result;
271 }
272 }
273 for (size_t i = 0; i < REPEAT; ++i) {
274 // create string of '\0's
275 coretypes::String *stringObj =
276 coretypes::String::CreateFromMUtf8(reinterpret_cast<const uint8_t *>(&objTemplates[j][0]),
277 objTemplates[j].length(), objTemplates[j].length(), true, ctx, vm);
278 ASSERT(stringObj != nullptr);
279 ASSERT(stringObj->GetLength() == objTemplates[j].length());
280 ASSERT(spaceChecker(ToUintPtr(stringObj)) == true);
281 if (gcType == GCType::G1_GC && SPACE == TargetSpace::HUMONGOUS) {
282 // for humongous objects in G1 we calculate size of the region instead of just alignment size
283 Region *region = AddrToRegion(stringObj);
284 *allocated += region->Size();
285 } else {
286 *allocated += GetAlignedObjectSize(size);
287 }
288 *requested += size;
289 result.push_back(stringObj);
290 }
291 }
292 return result;
293 }
294
295 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE>
MakeAllocationsWithSingleRepeat(MemStatsGenGCTest::GcData & gcData,MemStatsGenGCTest::MemOpReport & report,size_t & bytes,size_t & rawObjectsSize,F spaceCheck)296 void MemStatsGenGCTest::MakeAllocationsWithSingleRepeat(MemStatsGenGCTest::GcData &gcData,
297 MemStatsGenGCTest::MemOpReport &report, size_t &bytes,
298 size_t &rawObjectsSize, [[maybe_unused]] F spaceCheck)
299 {
300 ObjVec ov1 = MakeAllocationsWithRepeats<F, REPEAT, SPACE>(gcData.minSize + 1, gcData.maxSize, 1, &bytes,
301 &rawObjectsSize, spaceCheck, gcData.checkOom);
302 report.allocatedCount += 1;
303 report.allocatedBytes += bytes;
304 if constexpr (DO_SAVE) {
305 MakeObjectsAlive(ov1, 1);
306 report.savedCount = report.allocatedCount;
307 report.savedBytes = report.allocatedBytes;
308 }
309 }
310
311 template <typename F, size_t REPEAT, MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE>
MakeAllocationsWithSeveralRepeat(MemStatsGenGCTest::GcData & gcData,MemStatsGenGCTest::MemOpReport & report,size_t & bytes,size_t & rawObjectsSize,F spaceCheck)312 void MemStatsGenGCTest::MakeAllocationsWithSeveralRepeat(MemStatsGenGCTest::GcData &gcData,
313 MemStatsGenGCTest::MemOpReport &report, size_t &bytes,
314 size_t &rawObjectsSize, [[maybe_unused]] F spaceCheck)
315 {
316 ObjVec ov1 = MakeAllocationsWithRepeats<decltype(spaceCheck), 3U, SPACE>(
317 gcData.minSize, gcData.maxSize, gcData.count, &bytes, &rawObjectsSize, spaceCheck, gcData.checkOom);
318 report.allocatedCount += gcData.count * 3U;
319 report.allocatedBytes += bytes;
320 ObjVec ov2 = MakeAllocationsWithRepeats<decltype(spaceCheck), 3U, SPACE>(
321 gcData.minSize, gcData.maxSize, gcData.count, &bytes, &rawObjectsSize, spaceCheck, gcData.checkOom);
322 report.allocatedCount += gcData.count * 3U;
323 report.allocatedBytes += bytes;
324 if constexpr (DO_SAVE) {
325 MakeObjectsAlive(ov1, 3_I);
326 MakeObjectsAlive(ov2, 3_I);
327 report.savedCount = report.allocatedCount / 3U;
328 report.savedBytes = report.allocatedBytes / 3U;
329 }
330 }
331
332 template <MemStatsGenGCTest::TargetSpace SPACE>
CreateSpaceCheck()333 auto MemStatsGenGCTest::CreateSpaceCheck()
334 {
335 auto spaceCheck = [this](uintptr_t addr) -> bool {
336 if constexpr (SPACE == TargetSpace::YOUNG) {
337 return IsInYoung(addr);
338 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
339 return !IsInYoung(addr);
340 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
341 return !IsInYoung(addr);
342 } else if constexpr (SPACE == TargetSpace::HUMONGOUS) {
343 return !IsInYoung(addr);
344 }
345 UNREACHABLE();
346 };
347 return spaceCheck;
348 }
349
InitRoot()350 void MemStatsGenGCTest::InitRoot()
351 {
352 ClassLinker *classLinker = Runtime::GetCurrent()->GetClassLinker();
353 Class *klass = classLinker->GetExtension(panda_file::SourceLang::PANDA_ASSEMBLY)
354 ->GetClass(ctx.GetStringArrayClassDescriptor());
355 ASSERT_NE(klass, nullptr);
356 root = coretypes::Array::Create(klass, ROOT_MAX_SIZE);
357 rootSize = 0;
358 MakeObjectsPermAlive({root});
359 }
360
361 template <MemStatsGenGCTest::TargetSpace SPACE>
InitG1Gc(MemStatsGenGCTest::GcData & gcData)362 bool MemStatsGenGCTest::InitG1Gc(MemStatsGenGCTest::GcData &gcData)
363 {
364 auto g1Alloc = reinterpret_cast<ObjectAllocatorG1<MT_MODE_MULTI> *>(objectAllocator);
365 // NOLINTNEXTLINE(readability-magic-numbers)
366 gcData.count = 15U;
367 if constexpr (SPACE == TargetSpace::YOUNG) {
368 gcData.minSize = 0;
369 gcData.maxSize = g1Alloc->GetYoungAllocMaxSize();
370 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
371 gcData.minSize = g1Alloc->GetYoungAllocMaxSize() + 1;
372 gcData.maxSize = g1Alloc->GetRegularObjectMaxSize();
373 if (gcData.minSize >= gcData.maxSize) {
374 // Allocator configuration disallows allocating directly in this space
375 return false;
376 }
377 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
378 gcData.minSize = g1Alloc->GetYoungAllocMaxSize() + 1;
379 gcData.minSize = std::max(gcData.minSize, g1Alloc->GetRegularObjectMaxSize() + 1);
380 gcData.maxSize = g1Alloc->GetLargeObjectMaxSize();
381 if (gcData.minSize >= gcData.maxSize) {
382 // Allocator configuration disallows allocating directly in this space
383 return false;
384 }
385 } else {
386 ASSERT(SPACE == TargetSpace::HUMONGOUS);
387 gcData.count = 3U;
388 gcData.minSize = g1Alloc->GetYoungAllocMaxSize() + 1;
389 gcData.minSize = std::max(gcData.minSize, g1Alloc->GetRegularObjectMaxSize() + 1);
390 gcData.minSize = std::max(gcData.minSize, g1Alloc->GetLargeObjectMaxSize() + 1);
391 gcData.maxSize = gcData.minSize * 3U;
392 gcData.checkOom = true;
393 }
394 return true;
395 }
396
397 template <MemStatsGenGCTest::TargetSpace SPACE>
InitGenGc(MemStatsGenGCTest::GcData & gcData)398 bool MemStatsGenGCTest::InitGenGc(MemStatsGenGCTest::GcData &gcData)
399 {
400 auto genAlloc = reinterpret_cast<ObjectAllocatorGen<MT_MODE_MULTI> *>(objectAllocator);
401 // NOLINTNEXTLINE(readability-magic-numbers)
402 gcData.count = 15U;
403 if constexpr (SPACE == TargetSpace::YOUNG) {
404 gcData.minSize = 0;
405 gcData.maxSize = genAlloc->GetYoungAllocMaxSize();
406 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
407 gcData.minSize = genAlloc->GetYoungAllocMaxSize() + 1;
408 gcData.maxSize = genAlloc->GetRegularObjectMaxSize();
409 if (gcData.minSize >= gcData.maxSize) {
410 // Allocator configuration disallows allocating directly in this space
411 return false;
412 }
413 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
414 gcData.minSize = genAlloc->GetYoungAllocMaxSize() + 1;
415 gcData.minSize = std::max(gcData.minSize, genAlloc->GetRegularObjectMaxSize() + 1);
416 gcData.maxSize = genAlloc->GetLargeObjectMaxSize();
417 if (gcData.minSize >= gcData.maxSize) {
418 // Allocator configuration disallows allocating directly in this space
419 return false;
420 }
421 } else {
422 ASSERT(SPACE == TargetSpace::HUMONGOUS);
423 gcData.count = 3U;
424 gcData.minSize = genAlloc->GetYoungAllocMaxSize() + 1;
425 gcData.minSize = std::max(gcData.minSize, genAlloc->GetRegularObjectMaxSize() + 1);
426 gcData.minSize = std::max(gcData.minSize, genAlloc->GetLargeObjectMaxSize() + 1);
427 gcData.maxSize = gcData.minSize * 3U;
428 gcData.checkOom = true;
429 }
430 return true;
431 }
432
MakeObjectsAlive(const ObjVec & objects,int every)433 void MemStatsGenGCTest::MakeObjectsAlive(const ObjVec &objects, int every)
434 {
435 int cnt = every;
436 for (auto *obj : objects) {
437 cnt--;
438 if (cnt != 0) {
439 continue;
440 }
441 root->Set(rootSize, obj);
442 rootSize++;
443 ASSERT(rootSize < ROOT_MAX_SIZE);
444 cnt = every;
445 }
446 }
447
MakeObjectsGarbage(size_t startIdx,size_t afterEndIdx,int every)448 void MemStatsGenGCTest::MakeObjectsGarbage(size_t startIdx, size_t afterEndIdx, int every)
449 {
450 int cnt = every;
451 for (size_t i = startIdx; i < afterEndIdx; ++i) {
452 cnt--;
453 if (cnt != 0) {
454 continue;
455 }
456 root->Set(i, static_cast<ObjectHeader *>(nullptr));
457 cnt = every;
458 }
459 }
460
MakeObjectsPermAlive(const ObjVec & objects,int every)461 void MemStatsGenGCTest::MakeObjectsPermAlive(const ObjVec &objects, int every)
462 {
463 HanVec result;
464 result.reserve(objects.size() / every);
465 int cnt = every;
466 for (auto *obj : objects) {
467 cnt--;
468 if (cnt != 0) {
469 continue;
470 }
471 result.push_back(internalAllocator->New<VMHandle<ObjectHeader *>>(thread, obj));
472 cnt = every;
473 }
474 handles.push_back(result);
475 }
476
DumpHandles()477 void MemStatsGenGCTest::DumpHandles()
478 {
479 for (auto &hv : handles) {
480 for (auto *handle : hv) {
481 std::cout << "vector " << (void *)&hv << " handle " << (void *)handle << " obj " << handle->GetPtr()
482 << std::endl;
483 }
484 }
485 }
486
DumpAliveObjects()487 void MemStatsGenGCTest::DumpAliveObjects()
488 {
489 std::cout << "Alive root array : " << handles[0][0]->GetPtr() << std::endl;
490 for (size_t i = 0; i < rootSize; ++i) {
491 if (root->Get<ObjectHeader *>(i) != nullptr) {
492 std::cout << "Alive idx " << i << " : " << root->Get<ObjectHeader *>(i) << std::endl;
493 }
494 }
495 }
496
DeleteHandles()497 void MemStatsGenGCTest::DeleteHandles()
498 {
499 for (auto &hv : handles) {
500 for (auto *handle : hv) {
501 internalAllocator->Delete(handle);
502 }
503 }
504 handles.clear();
505 }
506
507 template <class LanguageConfig>
PrepareTest()508 void MemStatsGenGCTest::PrepareTest()
509 {
510 if constexpr (std::is_same<LanguageConfig, ark::PandaAssemblyLanguageConfig>::value) {
511 DeleteHandles();
512 ctx = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
513 objectAllocator = thread->GetVM()->GetGC()->GetObjectAllocator();
514 vm = Runtime::GetCurrent()->GetPandaVM();
515 internalAllocator = Runtime::GetCurrent()->GetClassLinker()->GetAllocator();
516 gc = vm->GetGC();
517 ms = vm->GetMemStats();
518 gcMs = vm->GetGCStats();
519 gccnt = internalAllocator->New<GCCounter>();
520 gc->AddListener(gccnt);
521 InitRoot();
522 } else {
523 UNREACHABLE();
524 }
525 }
526
527 template <class LanguageConfig>
GetGenMemStats()528 typename GenerationalGC<LanguageConfig>::MemStats *MemStatsGenGCTest::GetGenMemStats()
529 {
530 // An explicit getter, because the typename has to be template-specialized
531 return &reinterpret_cast<GenerationalGC<LanguageConfig> *>(gc)->memStats_;
532 }
533
IsInYoung(uintptr_t addr)534 bool MemStatsGenGCTest::IsInYoung(uintptr_t addr)
535 {
536 switch (gcType) {
537 case GCType::GEN_GC: {
538 return objectAllocator->IsObjectInYoungSpace(reinterpret_cast<ObjectHeader *>(addr));
539 }
540 case GCType::G1_GC: {
541 auto memPool = PoolManager::GetMmapMemPool();
542 if (memPool->GetSpaceTypeForAddr(reinterpret_cast<ObjectHeader *>(addr)) != SpaceType::SPACE_TYPE_OBJECT) {
543 return false;
544 }
545 return AddrToRegion(reinterpret_cast<ObjectHeader *>(addr))->HasFlag(RegionFlag::IS_EDEN);
546 }
547 default:
548 UNREACHABLE(); // NYI
549 }
550 return false;
551 }
552
553 template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE, bool IS_SINGLE>
MakeAllocations()554 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::MakeAllocations()
555 {
556 [[maybe_unused]] int gcCnt = gccnt->count;
557 MemStatsGenGCTest::MemOpReport report {};
558 report.allocatedCount = 0;
559 report.allocatedBytes = 0;
560 report.savedCount = 0;
561 report.savedBytes = 0;
562 size_t bytes = 0;
563 [[maybe_unused]] size_t rawObjectsSize; // currently not tracked by memstats
564 MemStatsGenGCTest::GcData gcData {};
565 gcData.count = 0;
566 gcData.minSize = 0;
567 gcData.maxSize = 0;
568 gcData.checkOom = false;
569 size_t youngSize = reinterpret_cast<GenerationalSpaces *>(
570 reinterpret_cast<ObjectAllocatorGenBase *>(objectAllocator)->GetHeapSpace())
571 ->GetCurrentYoungSize();
572 switch (gcType) {
573 case GCType::GEN_GC: {
574 if (!InitGenGc<SPACE>(gcData)) {
575 return report;
576 }
577 break;
578 }
579 case GCType::G1_GC: {
580 if (!InitG1Gc<SPACE>(gcData)) {
581 return report;
582 }
583 break;
584 }
585 default:
586 UNREACHABLE();
587 }
588
589 auto spaceCheck = CreateSpaceCheck<SPACE>();
590
591 if constexpr (SPACE == TargetSpace::YOUNG) {
592 // To prevent Young GC collection while we're allocating
593 gcData.maxSize = std::min(youngSize / (gcData.count * 6U), gcData.maxSize);
594 }
595
596 if (IS_SINGLE) {
597 MakeAllocationsWithSingleRepeat<decltype(spaceCheck), 1, SPACE, DO_SAVE>(gcData, report, bytes, rawObjectsSize,
598 spaceCheck);
599 } else {
600 MakeAllocationsWithSeveralRepeat<decltype(spaceCheck), 3U, SPACE, DO_SAVE>(gcData, report, bytes,
601 rawObjectsSize, spaceCheck);
602 }
603
604 // We must not have uncounted GCs
605 ASSERT(gcCnt == gccnt->count);
606 return report;
607 }
608
HelpAllocTenured()609 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::HelpAllocTenured()
610 {
611 MemStatsGenGCTest::MemOpReport report {};
612 report.allocatedCount = 0;
613 report.allocatedBytes = 0;
614 report.savedCount = 0;
615 report.savedBytes = 0;
616
617 auto oldRootSize = rootSize;
618
619 // One way to get objects into tenured space - by promotion
620 auto r = MakeAllocations<TargetSpace::YOUNG, true>();
621 gc->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
622 MakeObjectsGarbage(oldRootSize, oldRootSize + (rootSize - oldRootSize) / 2U);
623
624 report.allocatedCount = r.savedCount;
625 report.allocatedBytes = r.savedBytes;
626 report.savedCount = r.savedCount / 2U;
627 report.savedBytes = r.savedBytes / 2U;
628
629 // Another way - by direct allocation in tenured if possible
630 auto r2 = MakeAllocations<TargetSpace::TENURED_REGULAR, true>();
631
632 report.allocatedCount += r2.allocatedCount;
633 report.allocatedBytes += r2.allocatedBytes;
634 report.savedCount += r2.savedCount;
635 report.savedBytes += r2.savedBytes;
636
637 // Large objects are also tenured in terms of gen memstats
638 auto r3 = MakeAllocations<TargetSpace::TENURED_LARGE, true>();
639
640 report.allocatedCount += r3.allocatedCount;
641 report.allocatedBytes += r3.allocatedBytes;
642 report.savedCount += r3.savedCount;
643 report.savedBytes += r3.savedBytes;
644
645 auto r4 = MakeAllocations<TargetSpace::HUMONGOUS, true>();
646
647 report.allocatedCount += r4.allocatedCount;
648 report.allocatedBytes += r4.allocatedBytes;
649 report.savedCount += r4.savedCount;
650 report.savedBytes += r4.savedBytes;
651 return report;
652 }
653
654 template <typename T>
GetGenMemStatsDetails(T gms)655 MemStatsGenGCTest::RealStatsLocations MemStatsGenGCTest::GetGenMemStatsDetails(T gms)
656 {
657 RealStatsLocations loc {};
658 loc.youngFreedObjectsCount = &gms->youngFreeObjectCount_;
659 loc.youngFreedObjectsSize = &gms->youngFreeObjectSize_;
660 loc.youngMovedObjectsCount = &gms->youngMoveObjectCount_;
661 loc.youngMovedObjectsSize = &gms->youngMoveObjectSize_;
662 loc.tenuredFreedObjectsCount = &gms->tenuredFreeObjectCount_;
663 loc.tenuredFreedObjectsSize = &gms->tenuredFreeObjectSize_;
664 return loc;
665 }
666
TEST_F(MemStatsGenGCTest,TrivialGarbageStatsGenGcTest)667 TEST_F(MemStatsGenGCTest, TrivialGarbageStatsGenGcTest)
668 {
669 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
670 auto gcTypeLocal = static_cast<GCType>(gctypeIdx);
671 if (gcTypeLocal == GCType::EPSILON_G1_GC || gcTypeLocal == GCType::INVALID_GC) {
672 continue;
673 }
674 if (!IsGenerationalGCType(gcTypeLocal)) {
675 continue;
676 }
677 std::string gctype = static_cast<std::string>(GCStringFromType(gcTypeLocal));
678 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
679 SetupRuntime(gctype, cfg);
680
681 {
682 HandleScope<ObjectHeader *> scope(thread);
683 PrepareTest<ark::PandaAssemblyLanguageConfig>();
684 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
685 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
686
687 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE)); // Heap doesn't have unexpected garbage now
688
689 // Make a trivial allocation of unaligned size and make it garbage
690 auto r = MakeAllocations<TargetSpace::YOUNG, false, true>();
691 gc->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
692 ASSERT_EQ(2U, gccnt->count);
693 if (NeedToCheckYoungFreedCount()) {
694 ASSERT_EQ(*loc.youngFreedObjectsCount, r.allocatedCount);
695 }
696 ASSERT_EQ(*loc.youngFreedObjectsSize, r.allocatedBytes);
697 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
698 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
699 ASSERT_EQ(*loc.tenuredFreedObjectsCount, 0);
700 ASSERT_EQ(*loc.tenuredFreedObjectsSize, 0);
701 if (NeedToCheckYoungFreedCount()) {
702 ASSERT_EQ(gcMs->GetObjectsFreedCount(), r.allocatedCount);
703 }
704 if (PANDA_TRACK_TLAB_ALLOCATIONS) {
705 ASSERT_EQ(gcMs->GetObjectsFreedBytes(), r.allocatedBytes);
706 }
707 ASSERT_EQ(gcMs->GetLargeObjectsFreedCount(), 0);
708 ASSERT_EQ(gcMs->GetLargeObjectsFreedBytes(), 0);
709 }
710
711 ResetRuntime();
712 }
713 }
714 }
715
TEST_F(MemStatsGenGCTest,TrivialAliveAndTenuredStatsGenGcTest)716 TEST_F(MemStatsGenGCTest, TrivialAliveAndTenuredStatsGenGcTest)
717 {
718 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
719 auto gcTypeLocal = static_cast<GCType>(gctypeIdx);
720 if (gcTypeLocal == GCType::EPSILON_G1_GC || gcTypeLocal == GCType::INVALID_GC) {
721 continue;
722 }
723 if (!IsGenerationalGCType(gcTypeLocal)) {
724 continue;
725 }
726 std::string gctype = static_cast<std::string>(GCStringFromType(gcTypeLocal));
727 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
728 SetupRuntime(gctype, cfg);
729
730 {
731 HandleScope<ObjectHeader *> scope(thread);
732 PrepareTest<ark::PandaAssemblyLanguageConfig>();
733 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
734 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
735
736 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE)); // Heap doesn't have unexpected garbage now
737
738 // Make a trivial allocation of unaligned size and make it alive
739 auto r = MakeAllocations<TargetSpace::YOUNG, true, true>();
740 gc->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
741 ASSERT_EQ(2U, gccnt->count);
742 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
743 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
744 ASSERT_EQ(*loc.youngMovedObjectsCount, r.savedCount);
745 ASSERT_EQ(*loc.youngMovedObjectsSize, r.savedBytes);
746 ASSERT_EQ(*loc.tenuredFreedObjectsCount, 0);
747 ASSERT_EQ(*loc.tenuredFreedObjectsSize, 0);
748
749 // Expecting that r.saved_bytes/count have been promoted into tenured
750 // Make them garbage
751 MakeObjectsGarbage(0, rootSize);
752 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
753 ASSERT_EQ(3U, gccnt->count);
754 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
755 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
756 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
757 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
758 ASSERT_EQ(*loc.tenuredFreedObjectsCount, r.savedCount);
759 ASSERT_EQ(*loc.tenuredFreedObjectsSize, r.savedBytes);
760 }
761
762 ResetRuntime();
763 }
764 }
765 }
766
TEST_F(MemStatsGenGCTest,TrivialTenuredAndLargeStatsGenGcTest)767 TEST_F(MemStatsGenGCTest, TrivialTenuredAndLargeStatsGenGcTest)
768 {
769 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
770 auto gcTypeLocal = static_cast<GCType>(gctypeIdx);
771 if (gcTypeLocal == GCType::EPSILON_G1_GC || gcTypeLocal == GCType::INVALID_GC) {
772 continue;
773 }
774 if (!IsGenerationalGCType(gcTypeLocal)) {
775 continue;
776 }
777 std::string gctype = static_cast<std::string>(GCStringFromType(gcTypeLocal));
778 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
779 SetupRuntime(gctype, cfg);
780
781 {
782 HandleScope<ObjectHeader *> scope(thread);
783 PrepareTest<ark::PandaAssemblyLanguageConfig>();
784 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
785 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
786
787 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE)); // Heap doesn't have unexpected garbage now
788
789 // Make a trivial allocation of unaligned size in tenured space and make it garbage
790 auto r = MakeAllocations<TargetSpace::TENURED_REGULAR, false, true>();
791 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
792 ASSERT_EQ(2U, gccnt->count);
793 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
794 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
795 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
796 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
797 ASSERT_EQ(*loc.tenuredFreedObjectsCount, r.allocatedCount);
798 ASSERT_EQ(*loc.tenuredFreedObjectsSize, r.allocatedBytes);
799
800 // Make a trivial allocation of unaligned size large object and make it garbage
801 r = MakeAllocations<TargetSpace::TENURED_LARGE, false, true>();
802 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
803 ASSERT_EQ(3U, gccnt->count);
804 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
805 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
806 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
807 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
808 ASSERT_EQ(*loc.tenuredFreedObjectsCount, r.allocatedCount);
809 ASSERT_EQ(*loc.tenuredFreedObjectsSize, r.allocatedBytes);
810
811 r = MakeAllocations<TargetSpace::HUMONGOUS, false, true>();
812 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
813 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
814 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
815 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
816 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
817 ASSERT_EQ(*loc.tenuredFreedObjectsCount, r.allocatedCount);
818 ASSERT_EQ(*loc.tenuredFreedObjectsSize, r.allocatedBytes);
819 }
820
821 ResetRuntime();
822 }
823 }
824 }
825
TEST_F(MemStatsGenGCTest,YoungStatsGenGcTest)826 TEST_F(MemStatsGenGCTest, YoungStatsGenGcTest)
827 {
828 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
829 if (static_cast<GCType>(gctypeIdx) == GCType::EPSILON_G1_GC ||
830 static_cast<GCType>(gctypeIdx) == GCType::INVALID_GC) {
831 continue;
832 }
833 if (!IsGenerationalGCType(static_cast<GCType>(gctypeIdx))) {
834 continue;
835 }
836 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctypeIdx)));
837 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
838 SetupRuntime(gctype, cfg);
839
840 {
841 HandleScope<ObjectHeader *> scope(thread);
842 PrepareTest<ark::PandaAssemblyLanguageConfig>();
843 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
844 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
845
846 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
847 // Young shall be empty now.
848 auto r = MakeAllocations<TargetSpace::YOUNG, true>();
849 gc->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
850
851 if (NeedToCheckYoungFreedCount()) {
852 ASSERT_EQ(*loc.youngFreedObjectsCount, r.allocatedCount - r.savedCount);
853 }
854 ASSERT_EQ(*loc.youngFreedObjectsSize, r.allocatedBytes - r.savedBytes);
855 ASSERT_EQ(*loc.youngMovedObjectsCount, r.savedCount);
856 ASSERT_EQ(*loc.youngMovedObjectsSize, r.savedBytes);
857 ASSERT_EQ(*loc.tenuredFreedObjectsCount, 0);
858 ASSERT_EQ(*loc.tenuredFreedObjectsSize, 0);
859 }
860
861 ResetRuntime();
862 }
863 }
864 }
865
TEST_F(MemStatsGenGCTest,TenuredStatsFullGenGcTest)866 TEST_F(MemStatsGenGCTest, TenuredStatsFullGenGcTest)
867 {
868 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
869 if (static_cast<GCType>(gctypeIdx) == GCType::EPSILON_G1_GC ||
870 static_cast<GCType>(gctypeIdx) == GCType::INVALID_GC) {
871 continue;
872 }
873 if (!IsGenerationalGCType(static_cast<GCType>(gctypeIdx))) {
874 continue;
875 }
876 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctypeIdx)));
877 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
878 SetupRuntime(gctype, cfg);
879
880 {
881 HandleScope<ObjectHeader *> scope(thread);
882 PrepareTest<ark::PandaAssemblyLanguageConfig>();
883 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
884 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
885
886 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
887 // Young shall be empty now.
888
889 uint32_t tCount = 0;
890 uint64_t tBytes = 0;
891
892 for (int i = 0; i < FULL_TEST_ALLOC_TIMES; ++i) {
893 [[maybe_unused]] int gcCnt = gccnt->count;
894 auto r = HelpAllocTenured();
895 // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
896 ASSERT(gcCnt + 1 == gccnt->count);
897 auto tfocY = *loc.tenuredFreedObjectsCount;
898 auto tfosY = *loc.tenuredFreedObjectsSize;
899 ASSERT(r.allocatedCount > 0);
900 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
901 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
902 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
903 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
904 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
905 ASSERT_EQ(*loc.tenuredFreedObjectsCount + tfocY, r.allocatedCount - r.savedCount);
906 ASSERT_EQ(*loc.tenuredFreedObjectsSize + tfosY, r.allocatedBytes - r.savedBytes);
907 tCount += r.savedCount;
908 tBytes += r.savedBytes;
909 }
910
911 // Empty everything
912 auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
913 MakeObjectsGarbage(0, rootSize);
914
915 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
916 if (NeedToCheckYoungFreedCount()) {
917 ASSERT_EQ(*loc.youngFreedObjectsCount, ry.allocatedCount);
918 }
919 ASSERT_EQ(*loc.youngFreedObjectsSize, ry.allocatedBytes);
920 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
921 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
922 ASSERT_EQ(*loc.tenuredFreedObjectsCount, tCount);
923 ASSERT_EQ(*loc.tenuredFreedObjectsSize, tBytes);
924
925 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
926 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
927 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
928 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
929 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
930 ASSERT_EQ(*loc.tenuredFreedObjectsCount, 0);
931 ASSERT_EQ(*loc.tenuredFreedObjectsSize, 0);
932 }
933
934 ResetRuntime();
935 }
936 }
937 }
938
TEST_F(MemStatsGenGCTest,TenuredStatsMixGenGcTest)939 TEST_F(MemStatsGenGCTest, TenuredStatsMixGenGcTest)
940 {
941 for (int gctypeIdx = 0; static_cast<GCType>(gctypeIdx) <= GCType::GCTYPE_LAST; ++gctypeIdx) {
942 if (static_cast<GCType>(gctypeIdx) == GCType::EPSILON_G1_GC ||
943 static_cast<GCType>(gctypeIdx) == GCType::INVALID_GC) {
944 continue;
945 }
946 if (!IsGenerationalGCType(static_cast<GCType>(gctypeIdx))) {
947 continue;
948 }
949 if (static_cast<GCType>(gctypeIdx) == GCType::GEN_GC) {
950 // Doesn't have mixed GC collection
951 continue;
952 }
953 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctypeIdx)));
954 for (MemStatsGenGCTest::Config cfg; !cfg.End(); ++cfg) {
955 SetupRuntime(gctype, cfg);
956
957 {
958 HandleScope<ObjectHeader *> scope(thread);
959 PrepareTest<ark::PandaAssemblyLanguageConfig>();
960 GCTaskCause mixedCause;
961 switch (gcType) {
962 case GCType::GEN_GC: {
963 UNREACHABLE(); // Doesn't have mixed GC collection
964 }
965 case GCType::G1_GC: {
966 mixedCause = MIXED_G1_GC_CAUSE;
967 break;
968 }
969 default:
970 UNREACHABLE(); // NIY
971 }
972 auto *genMs = GetGenMemStats<ark::PandaAssemblyLanguageConfig>();
973 RealStatsLocations loc = GetGenMemStatsDetails<decltype(genMs)>(genMs);
974
975 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
976 // Young shall be empty now.
977
978 uint32_t tCount = 0;
979 uint64_t tBytes = 0;
980
981 {
982 uint32_t deadCount = 0;
983 uint64_t deadBytes = 0;
984 uint32_t expectedDeadCount = 0;
985 uint64_t expectedDeadBytes = 0;
986 for (int i = 0; i < MIX_TEST_ALLOC_TIMES; ++i) {
987 [[maybe_unused]] int gcCnt = gccnt->count;
988 auto r = HelpAllocTenured();
989 // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
990 ASSERT(gcCnt + 1 == gccnt->count);
991 deadCount += *loc.tenuredFreedObjectsCount;
992 deadBytes += *loc.tenuredFreedObjectsSize;
993 // Mixed can free not all the tenured garbage, so run it until it stalls
994 do {
995 gc->WaitForGCInManaged(GCTask(mixedCause));
996 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
997 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
998 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
999 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
1000 deadCount += *loc.tenuredFreedObjectsCount;
1001 deadBytes += *loc.tenuredFreedObjectsSize;
1002 } while (*loc.tenuredFreedObjectsCount != 0);
1003 tCount += r.savedCount;
1004 tBytes += r.savedBytes;
1005 expectedDeadCount += r.allocatedCount - r.savedCount;
1006 expectedDeadBytes += r.allocatedBytes - r.savedBytes;
1007 }
1008 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
1009 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
1010 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
1011 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
1012 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
1013 deadCount += *loc.tenuredFreedObjectsCount;
1014 deadBytes += *loc.tenuredFreedObjectsSize;
1015 ASSERT_EQ(deadCount, expectedDeadCount);
1016 ASSERT_EQ(deadBytes, expectedDeadBytes);
1017 }
1018
1019 // Empty everything
1020 auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
1021 MakeObjectsGarbage(0, rootSize);
1022 {
1023 uint32_t deadCount = 0;
1024 uint64_t deadBytes = 0;
1025 do {
1026 gc->WaitForGCInManaged(GCTask(mixedCause));
1027 if (NeedToCheckYoungFreedCount()) {
1028 ASSERT_EQ(*loc.youngFreedObjectsCount, ry.allocatedCount);
1029 }
1030 ASSERT_EQ(*loc.youngFreedObjectsSize, ry.allocatedBytes);
1031 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
1032 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
1033 deadCount += *loc.tenuredFreedObjectsCount;
1034 deadBytes += *loc.tenuredFreedObjectsSize;
1035 ry.allocatedCount = 0;
1036 ry.allocatedBytes = 0;
1037 } while (*loc.tenuredFreedObjectsCount != 0);
1038 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
1039 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
1040 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
1041 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
1042 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
1043 deadCount += *loc.tenuredFreedObjectsCount;
1044 deadBytes += *loc.tenuredFreedObjectsSize;
1045 ASSERT_EQ(deadCount, tCount);
1046 ASSERT_EQ(deadBytes, tBytes);
1047 }
1048 gc->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
1049 ASSERT_EQ(*loc.youngFreedObjectsCount, 0);
1050 ASSERT_EQ(*loc.youngFreedObjectsSize, 0);
1051 ASSERT_EQ(*loc.youngMovedObjectsCount, 0);
1052 ASSERT_EQ(*loc.youngMovedObjectsSize, 0);
1053 ASSERT_EQ(*loc.tenuredFreedObjectsCount, 0);
1054 ASSERT_EQ(*loc.tenuredFreedObjectsSize, 0);
1055 }
1056
1057 ResetRuntime();
1058 }
1059 }
1060 }
1061 } // namespace ark::mem::test
1062