• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include <array>
16 #include <atomic>
17 #include <chrono>
18 
19 #include "gtest/gtest.h"
20 #include "iostream"
21 #include "runtime/include/coretypes/string.h"
22 #include "runtime/include/runtime.h"
23 #include "runtime/include/panda_vm.h"
24 #include "runtime/handle_scope-inl.h"
25 #include "runtime/mem/gc/g1/g1-allocator.h"
26 #include "runtime/mem/gc/generational-gc-base.h"
27 #include "runtime/mem/malloc-proxy-allocator-inl.h"
28 #include "runtime/mem/mem_stats.h"
29 #include "runtime/mem/mem_stats_default.h"
30 #include "runtime/mem/runslots_allocator-inl.h"
31 
32 namespace panda::mem::test {
33 class MemStatsGenGCTest : public testing::Test {
34 public:
35     using ObjVec = PandaVector<ObjectHeader *>;
36     using HanVec = PandaVector<VMHandle<ObjectHeader *> *>;
37     static constexpr size_t ROOT_MAX_SIZE = 100000U;
38     static constexpr int MIX_TEST_ALLOC_TIMES = 5;
39     static constexpr int FULL_TEST_ALLOC_TIMES = 2;
40 
41     static constexpr GCTaskCause MIXED_G1_GC_CAUSE = GCTaskCause::YOUNG_GC_CAUSE;
42     static constexpr GCTaskCause FULL_GC_CAUSE = GCTaskCause::EXPLICIT_CAUSE;
43 
44     enum class TargetSpace {
45         YOUNG,
46         TENURED_REGULAR,
47         /*
48          * Some allocators have Large objects, it's not the same as Humongous. Objects can be less than Humongous but be
49          * allocated directly in the tenured space for example.
50          */
51         TENURED_LARGE,
52         HUMONGOUS
53     };
54 
55     class GCCounter : public GCListener {
56     public:
GCStarted(size_t heap_size)57         void GCStarted([[maybe_unused]] size_t heap_size) override
58         {
59             count++;
60         }
61 
GCFinished(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)62         void GCFinished([[maybe_unused]] const GCTask &task, [[maybe_unused]] size_t heap_size_before_gc,
63                         [[maybe_unused]] size_t heap_size) override
64         {
65         }
66 
67         int count = 0;
68     };
69 
70     struct MemOpReport {
71         size_t allocated_count;
72         size_t allocated_bytes;
73         size_t saved_count;
74         size_t saved_bytes;
75     };
76 
77     struct RealStatsLocations {
78         uint32_t *young_freed_objects_count_;
79         uint64_t *young_freed_objects_size_;
80         uint32_t *young_moved_objects_count_;
81         uint64_t *young_moved_objects_size_;
82         uint32_t *tenured_freed_objects_count_;
83         uint64_t *tenured_freed_objects_size_;
84     };
85 
SetupRuntime(const std::string & gc_type)86     void SetupRuntime(const std::string &gc_type)
87     {
88         RuntimeOptions options;
89         options.SetShouldLoadBootPandaFiles(false);
90         options.SetShouldInitializeIntrinsics(false);
91         options.SetUseTlabForAllocations(false);
92         options.SetGcType(gc_type);
93         options.SetGcTriggerType("debug-never");
94         options.SetRunGcInPlace(true);
95         options.SetCompilerEnableJit(false);
96         [[maybe_unused]] bool success = Runtime::Create(options);
97         ASSERT(success);
98 
99         thread_ = panda::MTManagedThread::GetCurrent();
100         gc_type_ = Runtime::GetGCType(options, plugins::RuntimeTypeToLang(options.GetRuntimeType()));
101         [[maybe_unused]] auto gc = thread_->GetVM()->GetGC();
102         ASSERT(gc->GetType() == panda::mem::GCTypeFromString(gc_type));
103         ASSERT(gc->IsGenerational());
104         thread_->ManagedCodeBegin();
105     }
106 
ResetRuntime()107     void ResetRuntime()
108     {
109         DeleteHandles();
110         internal_allocator_->Delete(gccnt_);
111         thread_->ManagedCodeEnd();
112         bool success = Runtime::Destroy();
113         ASSERT_TRUE(success) << "Cannot destroy Runtime";
114     }
115 
116     template <typename F, size_t repeat, MemStatsGenGCTest::TargetSpace SPACE>
117     ObjVec MakeAllocationsWithRepeats(size_t min_size, size_t max_size, size_t count, size_t *allocated,
118                                       size_t *requested, F space_checker, bool check_oom_in_tenured);
119 
120     void InitRoot();
121     void MakeObjectsAlive(ObjVec objects, int every = 1);
122     void MakeObjectsPermAlive(ObjVec objects, int every = 1);
123     void MakeObjectsGarbage(size_t start_idx, size_t after_end_idx, int every = 1);
124     void DumpHandles();
125     void DumpAliveObjects();
126     void DeleteHandles();
127     bool IsInYoung(uintptr_t addr);
128     MemOpReport HelpAllocTenured();
129 
130     template <class LanguageConfig>
131     void PrepareTest();
132 
133     template <class LanguageConfig>
134     typename GenerationalGC<LanguageConfig>::MemStats *GetGenMemStats();
135 
136     // Allocate a series of objects in a specific space. If DO_SAVE is true, a subsequence of objects
137     // is going to be kept alive and put into the roots array this->root_
138     // If IS_SINGLE is true, then only 1 object is allocated of unaligned size
139     // If IS_SINGLE is false, then an array of objects of different sizes is allocated in triplets twice
140     // Saved subsequence contains 2 equal subsequences of objects (2 of 3 objs in each triplets are garbage)
141     template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE = false, bool IS_SIMPLE = false>
142     typename MemStatsGenGCTest::MemOpReport MakeAllocations();
143 
144     template <typename T>
145     RealStatsLocations GetGenMemStatsDetails(T gms);
146 
TearDown()147     void TearDown() override {}
148 
149     panda::MTManagedThread *thread_;
150     GCType gc_type_;
151 
152     LanguageContext ctx_ {nullptr};
153     ObjectAllocatorBase *object_allocator_;
154     mem::InternalAllocatorPtr internal_allocator_;
155     PandaVM *vm_;
156     GC *gc_;
157     std::vector<HanVec> handles_;
158     MemStatsType *ms_;
159     GCStats *gc_ms_;
160     coretypes::Array *root_ = nullptr;
161     size_t root_size_ = 0;
162     GCCounter *gccnt_;
163 };
164 
165 template <typename F, size_t repeat, MemStatsGenGCTest::TargetSpace SPACE>
MakeAllocationsWithRepeats(size_t min_size,size_t max_size,size_t count,size_t * allocated,size_t * requested,F space_checker,bool check_oom_in_tenured)166 MemStatsGenGCTest::ObjVec MemStatsGenGCTest::MakeAllocationsWithRepeats(size_t min_size, size_t max_size, size_t count,
167                                                                         size_t *allocated, size_t *requested,
168                                                                         [[maybe_unused]] F space_checker,
169                                                                         bool check_oom_in_tenured)
170 {
171     ASSERT(min_size <= max_size);
172     *allocated = 0;
173     *requested = 0;
174     // Create array of object templates based on count and max size
175     PandaVector<PandaString> obj_templates(count);
176     size_t obj_size = sizeof(coretypes::String) + min_size;
177     for (size_t i = 0; i < count; ++i) {
178         PandaString simple_string;
179         simple_string.resize(obj_size - sizeof(coretypes::String));
180         obj_templates[i] = std::move(simple_string);
181         obj_size += (max_size / count + i);  // +i to mess with the alignment
182         if (obj_size > max_size) {
183             obj_size = max_size;
184         }
185     }
186     ObjVec result;
187     result.reserve(count * repeat);
188     for (size_t j = 0; j < count; ++j) {
189         size_t size = obj_templates[j].length() + sizeof(coretypes::String);
190         if (check_oom_in_tenured) {
191             // Leaving 5MB in tenured seems OK
192             auto free =
193                 reinterpret_cast<GenerationalSpaces *>(object_allocator_->GetHeapSpace())->GetCurrentFreeTenuredSize();
194             if (size + 5000000 > free) {
195                 return result;
196             }
197         }
198         for (size_t i = 0; i < repeat; ++i) {
199             coretypes::String *string_obj = coretypes::String::CreateFromMUtf8(
200                 reinterpret_cast<const uint8_t *>(&obj_templates[j][0]), obj_templates[j].length(), ctx_, vm_);
201             ASSERT(string_obj != nullptr);
202             ASSERT(space_checker(ToUintPtr(string_obj)) == true);
203             if (gc_type_ == GCType::G1_GC && SPACE == TargetSpace::HUMONGOUS) {
204                 // for humongous objects in G1 we calculate size of the region instead of just alignment size
205                 Region *region = AddrToRegion(string_obj);
206                 *allocated += region->Size();
207             } else {
208                 *allocated += GetAlignedObjectSize(size);
209             }
210             *requested += size;
211             result.push_back(string_obj);
212         }
213     }
214     return result;
215 }
216 
InitRoot()217 void MemStatsGenGCTest::InitRoot()
218 {
219     ClassLinker *class_linker = Runtime::GetCurrent()->GetClassLinker();
220     Class *klass = class_linker->GetExtension(panda_file::SourceLang::PANDA_ASSEMBLY)
221                        ->GetClass(ctx_.GetStringArrayClassDescriptor());
222     ASSERT_NE(klass, nullptr);
223     root_ = coretypes::Array::Create(klass, ROOT_MAX_SIZE);
224     root_size_ = 0;
225     MakeObjectsPermAlive({root_});
226 }
227 
MakeObjectsAlive(ObjVec objects,int every)228 void MemStatsGenGCTest::MakeObjectsAlive(ObjVec objects, int every)
229 {
230     int cnt = every;
231     for (auto *obj : objects) {
232         cnt--;
233         if (cnt != 0) {
234             continue;
235         }
236         root_->Set(root_size_, obj);
237         root_size_++;
238         ASSERT(root_size_ < ROOT_MAX_SIZE);
239         cnt = every;
240     }
241 }
242 
MakeObjectsGarbage(size_t start_idx,size_t after_end_idx,int every)243 void MemStatsGenGCTest::MakeObjectsGarbage(size_t start_idx, size_t after_end_idx, int every)
244 {
245     int cnt = every;
246     for (size_t i = start_idx; i < after_end_idx; ++i) {
247         cnt--;
248         if (cnt != 0) {
249             continue;
250         }
251         root_->Set(i, 0);
252         cnt = every;
253     }
254 }
255 
MakeObjectsPermAlive(ObjVec objects,int every)256 void MemStatsGenGCTest::MakeObjectsPermAlive(ObjVec objects, int every)
257 {
258     HanVec result;
259     result.reserve(objects.size() / every);
260     int cnt = every;
261     for (auto *obj : objects) {
262         cnt--;
263         if (cnt != 0) {
264             continue;
265         }
266         result.push_back(internal_allocator_->New<VMHandle<ObjectHeader *>>(thread_, obj));
267         cnt = every;
268     }
269     handles_.push_back(result);
270 }
271 
DumpHandles()272 void MemStatsGenGCTest::DumpHandles()
273 {
274     for (auto &hv : handles_) {
275         for (auto *handle : hv) {
276             std::cout << "vector " << (void *)&hv << " handle " << (void *)handle << " obj " << handle->GetPtr()
277                       << std::endl;
278         }
279     }
280 }
281 
DumpAliveObjects()282 void MemStatsGenGCTest::DumpAliveObjects()
283 {
284     std::cout << "Alive root array : " << handles_[0][0]->GetPtr() << std::endl;
285     for (size_t i = 0; i < root_size_; ++i) {
286         if (root_->Get<ObjectHeader *>(i) != nullptr) {
287             std::cout << "Alive idx " << i << " : " << root_->Get<ObjectHeader *>(i) << std::endl;
288         }
289     }
290 }
291 
DeleteHandles()292 void MemStatsGenGCTest::DeleteHandles()
293 {
294     for (auto &hv : handles_) {
295         for (auto *handle : hv) {
296             internal_allocator_->Delete(handle);
297         }
298     }
299     handles_.clear();
300 }
301 
302 template <class LanguageConfig>
PrepareTest()303 void MemStatsGenGCTest::PrepareTest()
304 {
305     if constexpr (std::is_same<LanguageConfig, panda::PandaAssemblyLanguageConfig>::value) {
306         DeleteHandles();
307         ctx_ = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
308         object_allocator_ = thread_->GetVM()->GetHeapManager()->GetObjectAllocator().AsObjectAllocator();
309         vm_ = Runtime::GetCurrent()->GetPandaVM();
310         internal_allocator_ = Runtime::GetCurrent()->GetClassLinker()->GetAllocator();
311         gc_ = vm_->GetGC();
312         ms_ = vm_->GetMemStats();
313         gc_ms_ = vm_->GetGCStats();
314         gccnt_ = internal_allocator_->New<GCCounter>();
315         gc_->AddListener(gccnt_);
316         InitRoot();
317     } else {
318         UNREACHABLE();
319     }
320 }
321 
322 template <class LanguageConfig>
GetGenMemStats()323 typename GenerationalGC<LanguageConfig>::MemStats *MemStatsGenGCTest::GetGenMemStats()
324 {
325     // An explicit getter, because the typename has to be template-specialized
326     return &reinterpret_cast<GenerationalGC<LanguageConfig> *>(gc_)->mem_stats_;
327 }
328 
IsInYoung(uintptr_t addr)329 bool MemStatsGenGCTest::IsInYoung(uintptr_t addr)
330 {
331     switch (gc_type_) {
332         case GCType::GEN_GC: {
333             return object_allocator_->IsAddressInYoungSpace(addr);
334         }
335         case GCType::G1_GC: {
336             auto mem_pool = PoolManager::GetMmapMemPool();
337             if (mem_pool->GetSpaceTypeForAddr(reinterpret_cast<ObjectHeader *>(addr)) != SpaceType::SPACE_TYPE_OBJECT) {
338                 return false;
339             }
340             return Region::AddrToRegion<false>(reinterpret_cast<ObjectHeader *>(addr))->HasFlag(RegionFlag::IS_EDEN);
341         }
342         default:
343             UNREACHABLE();  // NYI
344     }
345     return false;
346 }
347 
348 template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE, bool IS_SINGLE>
MakeAllocations()349 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::MakeAllocations()
350 {
351     [[maybe_unused]] int gc_cnt = gccnt_->count;
352     MemStatsGenGCTest::MemOpReport report;
353     report.allocated_count = 0;
354     report.allocated_bytes = 0;
355     report.saved_count = 0;
356     report.saved_bytes = 0;
357     size_t bytes = 0;
358     [[maybe_unused]] size_t raw_objects_size;  // currently not tracked by memstats
359     size_t count = 0;
360     size_t min_size = 0;
361     size_t max_size = 0;
362     bool check_oom = false;
363     size_t young_size = reinterpret_cast<GenerationalSpaces *>(
364                             reinterpret_cast<ObjectAllocatorGenBase *>(object_allocator_)->GetHeapSpace())
365                             ->GetCurrentMaxYoungSize();
366     switch (gc_type_) {
367         case GCType::GEN_GC: {
368             auto gen_alloc = reinterpret_cast<ObjectAllocatorGen<MT_MODE_MULTI> *>(object_allocator_);
369             count = 15;
370             if constexpr (SPACE == TargetSpace::YOUNG) {
371                 min_size = 0;
372                 max_size = gen_alloc->GetYoungAllocMaxSize();
373             } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
374                 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
375                 max_size = gen_alloc->GetRegularObjectMaxSize();
376                 if (min_size >= max_size) {
377                     // Allocator configuration disallows allocating directly in this space
378                     return report;
379                 }
380             } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
381                 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
382                 min_size = std::max(min_size, gen_alloc->GetRegularObjectMaxSize() + 1);
383                 max_size = gen_alloc->GetLargeObjectMaxSize();
384                 if (min_size >= max_size) {
385                     // Allocator configuration disallows allocating directly in this space
386                     return report;
387                 }
388             } else {
389                 ASSERT(SPACE == TargetSpace::HUMONGOUS);
390                 count = 3;
391                 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
392                 min_size = std::max(min_size, gen_alloc->GetRegularObjectMaxSize() + 1);
393                 min_size = std::max(min_size, gen_alloc->GetLargeObjectMaxSize() + 1);
394                 max_size = min_size * 3;
395                 check_oom = true;
396             }
397             break;
398         }
399         case GCType::G1_GC: {
400             auto g1_alloc = reinterpret_cast<ObjectAllocatorG1<MT_MODE_MULTI> *>(object_allocator_);
401             count = 15;
402             if constexpr (SPACE == TargetSpace::YOUNG) {
403                 min_size = 0;
404                 max_size = g1_alloc->GetYoungAllocMaxSize();
405             } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
406                 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
407                 max_size = g1_alloc->GetRegularObjectMaxSize();
408                 if (min_size >= max_size) {
409                     // Allocator configuration disallows allocating directly in this space
410                     return report;
411                 }
412             } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
413                 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
414                 min_size = std::max(min_size, g1_alloc->GetRegularObjectMaxSize() + 1);
415                 max_size = g1_alloc->GetLargeObjectMaxSize();
416                 if (min_size >= max_size) {
417                     // Allocator configuration disallows allocating directly in this space
418                     return report;
419                 }
420             } else {
421                 ASSERT(SPACE == TargetSpace::HUMONGOUS);
422                 count = 3;
423                 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
424                 min_size = std::max(min_size, g1_alloc->GetRegularObjectMaxSize() + 1);
425                 min_size = std::max(min_size, g1_alloc->GetLargeObjectMaxSize() + 1);
426                 max_size = min_size * 3;
427                 check_oom = true;
428             }
429             break;
430         }
431         default:
432             UNREACHABLE();
433     }
434 
435     auto space_check = [&SPACE](uintptr_t addr) -> bool {
436         if constexpr (SPACE == TargetSpace::YOUNG) {
437             return IsInYoung(addr);
438         } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
439             return !IsInYoung(addr);
440         } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
441             return !IsInYoung(addr);
442         } else if constexpr (SPACE == TargetSpace::HUMONGOUS) {
443             return !IsInYoung(addr);
444         }
445         UNREACHABLE();
446     };
447 
448     if constexpr (SPACE == TargetSpace::YOUNG) {
449         // To prevent Young GC collection while we're allocating
450         max_size = std::min(young_size / (count * 6), max_size);
451     }
452 
453     if (IS_SINGLE) {
454         ObjVec ov1 = MakeAllocationsWithRepeats<decltype(space_check), 1, SPACE>(
455             min_size + 1, max_size, 1, &bytes, &raw_objects_size, space_check, check_oom);
456         report.allocated_count += 1;
457         report.allocated_bytes += bytes;
458         if constexpr (DO_SAVE) {
459             MakeObjectsAlive(ov1, 1);
460             report.saved_count = report.allocated_count;
461             report.saved_bytes = report.allocated_bytes;
462         }
463     } else {
464         ObjVec ov1 = MakeAllocationsWithRepeats<decltype(space_check), 3, SPACE>(
465             min_size, max_size, count, &bytes, &raw_objects_size, space_check, check_oom);
466         report.allocated_count += count * 3;
467         report.allocated_bytes += bytes;
468         ObjVec ov2 = MakeAllocationsWithRepeats<decltype(space_check), 3, SPACE>(
469             min_size, max_size, count, &bytes, &raw_objects_size, space_check, check_oom);
470         report.allocated_count += count * 3;
471         report.allocated_bytes += bytes;
472         if constexpr (DO_SAVE) {
473             MakeObjectsAlive(ov1, 3);
474             MakeObjectsAlive(ov2, 3);
475             report.saved_count = report.allocated_count / 3;
476             report.saved_bytes = report.allocated_bytes / 3;
477         }
478     }
479 
480     // We must not have uncounted GCs
481     ASSERT(gc_cnt == gccnt_->count);
482     return report;
483 }
484 
HelpAllocTenured()485 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::HelpAllocTenured()
486 {
487     MemStatsGenGCTest::MemOpReport report;
488     report.allocated_count = 0;
489     report.allocated_bytes = 0;
490     report.saved_count = 0;
491     report.saved_bytes = 0;
492 
493     auto old_root_size = root_size_;
494 
495     // One way to get objects into tenured space - by promotion
496     auto r = MakeAllocations<TargetSpace::YOUNG, true>();
497     gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
498     MakeObjectsGarbage(old_root_size, old_root_size + (root_size_ - old_root_size) / 2);
499 
500     report.allocated_count = r.saved_count;
501     report.allocated_bytes = r.saved_bytes;
502     report.saved_count = r.saved_count / 2;
503     report.saved_bytes = r.saved_bytes / 2;
504 
505     // Another way - by direct allocation in tenured if possible
506     auto r2 = MakeAllocations<TargetSpace::TENURED_REGULAR, true>();
507 
508     report.allocated_count += r2.allocated_count;
509     report.allocated_bytes += r2.allocated_bytes;
510     report.saved_count += r2.saved_count;
511     report.saved_bytes += r2.saved_bytes;
512 
513     // Large objects are also tenured in terms of gen memstats
514     auto r3 = MakeAllocations<TargetSpace::TENURED_LARGE, true>();
515 
516     report.allocated_count += r3.allocated_count;
517     report.allocated_bytes += r3.allocated_bytes;
518     report.saved_count += r3.saved_count;
519     report.saved_bytes += r3.saved_bytes;
520 
521     auto r4 = MakeAllocations<TargetSpace::HUMONGOUS, true>();
522 
523     report.allocated_count += r4.allocated_count;
524     report.allocated_bytes += r4.allocated_bytes;
525     report.saved_count += r4.saved_count;
526     report.saved_bytes += r4.saved_bytes;
527     return report;
528 }
529 
530 template <typename T>
GetGenMemStatsDetails(T gms)531 MemStatsGenGCTest::RealStatsLocations MemStatsGenGCTest::GetGenMemStatsDetails(T gms)
532 {
533     RealStatsLocations loc;
534     loc.young_freed_objects_count_ = &gms->young_free_object_count_;
535     loc.young_freed_objects_size_ = &gms->young_free_object_size_;
536     loc.young_moved_objects_count_ = &gms->young_move_object_count_;
537     loc.young_moved_objects_size_ = &gms->young_move_object_size_;
538     loc.tenured_freed_objects_count_ = &gms->tenured_free_object_count_;
539     loc.tenured_freed_objects_size_ = &gms->tenured_free_object_size_;
540     return loc;
541 }
542 
TEST_F(MemStatsGenGCTest,TrivialStatsGenGcTest)543 TEST_F(MemStatsGenGCTest, TrivialStatsGenGcTest)
544 {
545     for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
546         GCType gc_type = static_cast<GCType>(gctype_idx);
547         if (gc_type == GCType::INVALID_GC) {
548             continue;
549         }
550         if (!IsGenerationalGCType(gc_type)) {
551             continue;
552         }
553         std::string gctype = static_cast<std::string>(GCStringFromType(gc_type));
554         SetupRuntime(gctype);
555 
556         {
557             HandleScope<ObjectHeader *> scope(thread_);
558             PrepareTest<panda::PandaAssemblyLanguageConfig>();
559             auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
560             RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
561 
562             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));  // Heap doesn't have unexpected garbage now
563 
564             // Make a trivial allocation of unaligned size and make it garbage
565             auto r = MakeAllocations<TargetSpace::YOUNG, false, true>();
566             gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
567             ASSERT_EQ(2, gccnt_->count);
568             ASSERT_EQ(*loc.young_freed_objects_count_, r.allocated_count);
569             ASSERT_EQ(*loc.young_freed_objects_size_, r.allocated_bytes);
570             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
571             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
572             ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
573             ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
574             ASSERT_EQ(gc_ms_->GetObjectsFreedCount(), r.allocated_count);
575             ASSERT_EQ(gc_ms_->GetObjectsFreedBytes(), r.allocated_bytes);
576             ASSERT_EQ(gc_ms_->GetLargeObjectsFreedCount(), 0);
577             ASSERT_EQ(gc_ms_->GetLargeObjectsFreedBytes(), 0);
578 
579             // Make a trivial allocation of unaligned size and make it alive
580             r = MakeAllocations<TargetSpace::YOUNG, true, true>();
581             gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
582             ASSERT_EQ(3, gccnt_->count);
583             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
584             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
585             ASSERT_EQ(*loc.young_moved_objects_count_, r.saved_count);
586             ASSERT_EQ(*loc.young_moved_objects_size_, r.saved_bytes);
587             ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
588             ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
589 
590             // Expecting that r.saved_bytes/count have been promoted into tenured
591             // Make them garbage
592             MakeObjectsGarbage(0, root_size_);
593             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
594             ASSERT_EQ(4, gccnt_->count);
595             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
596             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
597             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
598             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
599             ASSERT_EQ(*loc.tenured_freed_objects_count_, r.saved_count);
600             ASSERT_EQ(*loc.tenured_freed_objects_size_, r.saved_bytes);
601 
602             // Make a trivial allocation of unaligned size in tenured space and make it garbage
603             r = MakeAllocations<TargetSpace::TENURED_REGULAR, false, true>();
604             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
605             ASSERT_EQ(5, gccnt_->count);
606             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
607             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
608             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
609             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
610             ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
611             ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
612 
613             // Make a trivial allocation of unaligned size large object and make it garbage
614             r = MakeAllocations<TargetSpace::TENURED_LARGE, false, true>();
615             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
616             ASSERT_EQ(6, gccnt_->count);
617             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
618             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
619             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
620             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
621             ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
622             ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
623 
624             r = MakeAllocations<TargetSpace::HUMONGOUS, false, true>();
625             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
626             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
627             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
628             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
629             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
630             ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
631             ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
632         }
633         ResetRuntime();
634     }
635 }
636 
TEST_F(MemStatsGenGCTest,YoungStatsGenGcTest)637 TEST_F(MemStatsGenGCTest, YoungStatsGenGcTest)
638 {
639     for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
640         if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
641             continue;
642         }
643         if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
644             continue;
645         }
646         std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
647         SetupRuntime(gctype);
648 
649         {
650             HandleScope<ObjectHeader *> scope(thread_);
651             PrepareTest<panda::PandaAssemblyLanguageConfig>();
652             auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
653             RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
654 
655             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
656             // Young shall be empty now.
657             auto r = MakeAllocations<TargetSpace::YOUNG, true>();
658             gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
659 
660             ASSERT_EQ(*loc.young_freed_objects_count_, r.allocated_count - r.saved_count);
661             ASSERT_EQ(*loc.young_freed_objects_size_, r.allocated_bytes - r.saved_bytes);
662             ASSERT_EQ(*loc.young_moved_objects_count_, r.saved_count);
663             ASSERT_EQ(*loc.young_moved_objects_size_, r.saved_bytes);
664             ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
665             ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
666         }
667 
668         ResetRuntime();
669     }
670 }
671 
TEST_F(MemStatsGenGCTest,TenuredStatsFullGenGcTest)672 TEST_F(MemStatsGenGCTest, TenuredStatsFullGenGcTest)
673 {
674     for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
675         if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
676             continue;
677         }
678         if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
679             continue;
680         }
681         std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
682         SetupRuntime(gctype);
683 
684         {
685             HandleScope<ObjectHeader *> scope(thread_);
686             PrepareTest<panda::PandaAssemblyLanguageConfig>();
687             auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
688             RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
689 
690             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
691             // Young shall be empty now.
692 
693             uint32_t t_count = 0;
694             uint64_t t_bytes = 0;
695 
696             for (int i = 0; i < FULL_TEST_ALLOC_TIMES; ++i) {
697                 [[maybe_unused]] int gc_cnt = gccnt_->count;
698                 auto r = HelpAllocTenured();
699                 // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
700                 ASSERT(gc_cnt + 1 == gccnt_->count);
701                 auto tfoc_y = *loc.tenured_freed_objects_count_;
702                 auto tfos_y = *loc.tenured_freed_objects_size_;
703                 ASSERT(r.allocated_count > 0);
704                 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
705                 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
706                 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
707                 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
708                 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
709                 ASSERT_EQ(*loc.tenured_freed_objects_count_ + tfoc_y, r.allocated_count - r.saved_count);
710                 ASSERT_EQ(*loc.tenured_freed_objects_size_ + tfos_y, r.allocated_bytes - r.saved_bytes);
711                 t_count += r.saved_count;
712                 t_bytes += r.saved_bytes;
713             }
714 
715             // Empty everything
716             auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
717             MakeObjectsGarbage(0, root_size_);
718 
719             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
720             ASSERT_EQ(*loc.young_freed_objects_count_, ry.allocated_count);
721             ASSERT_EQ(*loc.young_freed_objects_size_, ry.allocated_bytes);
722             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
723             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
724             ASSERT_EQ(*loc.tenured_freed_objects_count_, t_count);
725             ASSERT_EQ(*loc.tenured_freed_objects_size_, t_bytes);
726 
727             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
728             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
729             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
730             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
731             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
732             ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
733             ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
734         }
735 
736         ResetRuntime();
737     }
738 }
739 
TEST_F(MemStatsGenGCTest,TenuredStatsMixGenGcTest)740 TEST_F(MemStatsGenGCTest, TenuredStatsMixGenGcTest)
741 {
742     for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
743         if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
744             continue;
745         }
746         if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
747             continue;
748         }
749         if (static_cast<GCType>(gctype_idx) == GCType::GEN_GC) {
750             // Doesn't have mixed GC collection
751             continue;
752         }
753         std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
754         SetupRuntime(gctype);
755 
756         {
757             HandleScope<ObjectHeader *> scope(thread_);
758             PrepareTest<panda::PandaAssemblyLanguageConfig>();
759             GCTaskCause mixed_cause;
760             switch (gc_type_) {
761                 case GCType::GEN_GC: {
762                     UNREACHABLE();  // Doesn't have mixed GC collection
763                 }
764                 case GCType::G1_GC: {
765                     mixed_cause = MIXED_G1_GC_CAUSE;
766                     break;
767                 }
768                 default:
769                     UNREACHABLE();  // NIY
770             }
771             auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
772             RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
773 
774             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
775             // Young shall be empty now.
776 
777             uint32_t t_count = 0;
778             uint64_t t_bytes = 0;
779 
780             {
781                 uint32_t dead_count = 0;
782                 uint64_t dead_bytes = 0;
783                 uint32_t expected_dead_count = 0;
784                 uint64_t expected_dead_bytes = 0;
785                 for (int i = 0; i < MIX_TEST_ALLOC_TIMES; ++i) {
786                     [[maybe_unused]] int gc_cnt = gccnt_->count;
787                     auto r = HelpAllocTenured();
788                     // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
789                     ASSERT(gc_cnt + 1 == gccnt_->count);
790                     dead_count += *loc.tenured_freed_objects_count_;
791                     dead_bytes += *loc.tenured_freed_objects_size_;
792                     // Mixed can free not all the tenured garbage, so run it until it stalls
793                     do {
794                         gc_->WaitForGCInManaged(GCTask(mixed_cause));
795                         ASSERT_EQ(*loc.young_freed_objects_count_, 0);
796                         ASSERT_EQ(*loc.young_freed_objects_size_, 0);
797                         ASSERT_EQ(*loc.young_moved_objects_count_, 0);
798                         ASSERT_EQ(*loc.young_moved_objects_size_, 0);
799                         dead_count += *loc.tenured_freed_objects_count_;
800                         dead_bytes += *loc.tenured_freed_objects_size_;
801                     } while (*loc.tenured_freed_objects_count_ != 0);
802                     t_count += r.saved_count;
803                     t_bytes += r.saved_bytes;
804                     expected_dead_count += r.allocated_count - r.saved_count;
805                     expected_dead_bytes += r.allocated_bytes - r.saved_bytes;
806                 }
807                 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
808                 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
809                 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
810                 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
811                 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
812                 dead_count += *loc.tenured_freed_objects_count_;
813                 dead_bytes += *loc.tenured_freed_objects_size_;
814                 ASSERT_EQ(dead_count, expected_dead_count);
815                 ASSERT_EQ(dead_bytes, expected_dead_bytes);
816             }
817 
818             // Empty everything
819             auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
820             MakeObjectsGarbage(0, root_size_);
821             {
822                 uint32_t dead_count = 0;
823                 uint64_t dead_bytes = 0;
824                 do {
825                     gc_->WaitForGCInManaged(GCTask(mixed_cause));
826                     ASSERT_EQ(*loc.young_freed_objects_count_, ry.allocated_count);
827                     ASSERT_EQ(*loc.young_freed_objects_size_, ry.allocated_bytes);
828                     ASSERT_EQ(*loc.young_moved_objects_count_, 0);
829                     ASSERT_EQ(*loc.young_moved_objects_size_, 0);
830                     dead_count += *loc.tenured_freed_objects_count_;
831                     dead_bytes += *loc.tenured_freed_objects_size_;
832                     ry.allocated_count = 0;
833                     ry.allocated_bytes = 0;
834                 } while (*loc.tenured_freed_objects_count_ != 0);
835                 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
836                 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
837                 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
838                 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
839                 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
840                 dead_count += *loc.tenured_freed_objects_count_;
841                 dead_bytes += *loc.tenured_freed_objects_size_;
842                 ASSERT_EQ(dead_count, t_count);
843                 ASSERT_EQ(dead_bytes, t_bytes);
844             }
845             gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
846             ASSERT_EQ(*loc.young_freed_objects_count_, 0);
847             ASSERT_EQ(*loc.young_freed_objects_size_, 0);
848             ASSERT_EQ(*loc.young_moved_objects_count_, 0);
849             ASSERT_EQ(*loc.young_moved_objects_size_, 0);
850             ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
851             ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
852         }
853 
854         ResetRuntime();
855     }
856 }
857 }  // namespace panda::mem::test
858