1 /*
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #include <array>
16 #include <atomic>
17 #include <chrono>
18
19 #include "gtest/gtest.h"
20 #include "iostream"
21 #include "runtime/include/coretypes/string.h"
22 #include "runtime/include/runtime.h"
23 #include "runtime/include/panda_vm.h"
24 #include "runtime/handle_scope-inl.h"
25 #include "runtime/mem/gc/g1/g1-allocator.h"
26 #include "runtime/mem/gc/generational-gc-base.h"
27 #include "runtime/mem/malloc-proxy-allocator-inl.h"
28 #include "runtime/mem/mem_stats.h"
29 #include "runtime/mem/mem_stats_default.h"
30 #include "runtime/mem/runslots_allocator-inl.h"
31
32 namespace panda::mem::test {
33 class MemStatsGenGCTest : public testing::Test {
34 public:
35 using ObjVec = PandaVector<ObjectHeader *>;
36 using HanVec = PandaVector<VMHandle<ObjectHeader *> *>;
37 static constexpr size_t ROOT_MAX_SIZE = 100000U;
38 static constexpr int MIX_TEST_ALLOC_TIMES = 5;
39 static constexpr int FULL_TEST_ALLOC_TIMES = 2;
40
41 static constexpr GCTaskCause MIXED_G1_GC_CAUSE = GCTaskCause::YOUNG_GC_CAUSE;
42 static constexpr GCTaskCause FULL_GC_CAUSE = GCTaskCause::EXPLICIT_CAUSE;
43
44 enum class TargetSpace {
45 YOUNG,
46 TENURED_REGULAR,
47 /*
48 * Some allocators have Large objects, it's not the same as Humongous. Objects can be less than Humongous but be
49 * allocated directly in the tenured space for example.
50 */
51 TENURED_LARGE,
52 HUMONGOUS
53 };
54
55 class GCCounter : public GCListener {
56 public:
GCStarted(size_t heap_size)57 void GCStarted([[maybe_unused]] size_t heap_size) override
58 {
59 count++;
60 }
61
GCFinished(const GCTask & task,size_t heap_size_before_gc,size_t heap_size)62 void GCFinished([[maybe_unused]] const GCTask &task, [[maybe_unused]] size_t heap_size_before_gc,
63 [[maybe_unused]] size_t heap_size) override
64 {
65 }
66
67 int count = 0;
68 };
69
70 struct MemOpReport {
71 size_t allocated_count;
72 size_t allocated_bytes;
73 size_t saved_count;
74 size_t saved_bytes;
75 };
76
77 struct RealStatsLocations {
78 uint32_t *young_freed_objects_count_;
79 uint64_t *young_freed_objects_size_;
80 uint32_t *young_moved_objects_count_;
81 uint64_t *young_moved_objects_size_;
82 uint32_t *tenured_freed_objects_count_;
83 uint64_t *tenured_freed_objects_size_;
84 };
85
SetupRuntime(const std::string & gc_type)86 void SetupRuntime(const std::string &gc_type)
87 {
88 RuntimeOptions options;
89 options.SetShouldLoadBootPandaFiles(false);
90 options.SetShouldInitializeIntrinsics(false);
91 options.SetUseTlabForAllocations(false);
92 options.SetGcType(gc_type);
93 options.SetGcTriggerType("debug-never");
94 options.SetRunGcInPlace(true);
95 options.SetCompilerEnableJit(false);
96 [[maybe_unused]] bool success = Runtime::Create(options);
97 ASSERT(success);
98
99 thread_ = panda::MTManagedThread::GetCurrent();
100 gc_type_ = Runtime::GetGCType(options, plugins::RuntimeTypeToLang(options.GetRuntimeType()));
101 [[maybe_unused]] auto gc = thread_->GetVM()->GetGC();
102 ASSERT(gc->GetType() == panda::mem::GCTypeFromString(gc_type));
103 ASSERT(gc->IsGenerational());
104 thread_->ManagedCodeBegin();
105 }
106
ResetRuntime()107 void ResetRuntime()
108 {
109 DeleteHandles();
110 internal_allocator_->Delete(gccnt_);
111 thread_->ManagedCodeEnd();
112 bool success = Runtime::Destroy();
113 ASSERT_TRUE(success) << "Cannot destroy Runtime";
114 }
115
116 template <typename F, size_t repeat, MemStatsGenGCTest::TargetSpace SPACE>
117 ObjVec MakeAllocationsWithRepeats(size_t min_size, size_t max_size, size_t count, size_t *allocated,
118 size_t *requested, F space_checker, bool check_oom_in_tenured);
119
120 void InitRoot();
121 void MakeObjectsAlive(ObjVec objects, int every = 1);
122 void MakeObjectsPermAlive(ObjVec objects, int every = 1);
123 void MakeObjectsGarbage(size_t start_idx, size_t after_end_idx, int every = 1);
124 void DumpHandles();
125 void DumpAliveObjects();
126 void DeleteHandles();
127 bool IsInYoung(uintptr_t addr);
128 MemOpReport HelpAllocTenured();
129
130 template <class LanguageConfig>
131 void PrepareTest();
132
133 template <class LanguageConfig>
134 typename GenerationalGC<LanguageConfig>::MemStats *GetGenMemStats();
135
136 // Allocate a series of objects in a specific space. If DO_SAVE is true, a subsequence of objects
137 // is going to be kept alive and put into the roots array this->root_
138 // If IS_SINGLE is true, then only 1 object is allocated of unaligned size
139 // If IS_SINGLE is false, then an array of objects of different sizes is allocated in triplets twice
140 // Saved subsequence contains 2 equal subsequences of objects (2 of 3 objs in each triplets are garbage)
141 template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE = false, bool IS_SIMPLE = false>
142 typename MemStatsGenGCTest::MemOpReport MakeAllocations();
143
144 template <typename T>
145 RealStatsLocations GetGenMemStatsDetails(T gms);
146
TearDown()147 void TearDown() override {}
148
149 panda::MTManagedThread *thread_;
150 GCType gc_type_;
151
152 LanguageContext ctx_ {nullptr};
153 ObjectAllocatorBase *object_allocator_;
154 mem::InternalAllocatorPtr internal_allocator_;
155 PandaVM *vm_;
156 GC *gc_;
157 std::vector<HanVec> handles_;
158 MemStatsType *ms_;
159 GCStats *gc_ms_;
160 coretypes::Array *root_ = nullptr;
161 size_t root_size_ = 0;
162 GCCounter *gccnt_;
163 };
164
165 template <typename F, size_t repeat, MemStatsGenGCTest::TargetSpace SPACE>
MakeAllocationsWithRepeats(size_t min_size,size_t max_size,size_t count,size_t * allocated,size_t * requested,F space_checker,bool check_oom_in_tenured)166 MemStatsGenGCTest::ObjVec MemStatsGenGCTest::MakeAllocationsWithRepeats(size_t min_size, size_t max_size, size_t count,
167 size_t *allocated, size_t *requested,
168 [[maybe_unused]] F space_checker,
169 bool check_oom_in_tenured)
170 {
171 ASSERT(min_size <= max_size);
172 *allocated = 0;
173 *requested = 0;
174 // Create array of object templates based on count and max size
175 PandaVector<PandaString> obj_templates(count);
176 size_t obj_size = sizeof(coretypes::String) + min_size;
177 for (size_t i = 0; i < count; ++i) {
178 PandaString simple_string;
179 simple_string.resize(obj_size - sizeof(coretypes::String));
180 obj_templates[i] = std::move(simple_string);
181 obj_size += (max_size / count + i); // +i to mess with the alignment
182 if (obj_size > max_size) {
183 obj_size = max_size;
184 }
185 }
186 ObjVec result;
187 result.reserve(count * repeat);
188 for (size_t j = 0; j < count; ++j) {
189 size_t size = obj_templates[j].length() + sizeof(coretypes::String);
190 if (check_oom_in_tenured) {
191 // Leaving 5MB in tenured seems OK
192 auto free =
193 reinterpret_cast<GenerationalSpaces *>(object_allocator_->GetHeapSpace())->GetCurrentFreeTenuredSize();
194 if (size + 5000000 > free) {
195 return result;
196 }
197 }
198 for (size_t i = 0; i < repeat; ++i) {
199 coretypes::String *string_obj = coretypes::String::CreateFromMUtf8(
200 reinterpret_cast<const uint8_t *>(&obj_templates[j][0]), obj_templates[j].length(), ctx_, vm_);
201 ASSERT(string_obj != nullptr);
202 ASSERT(space_checker(ToUintPtr(string_obj)) == true);
203 if (gc_type_ == GCType::G1_GC && SPACE == TargetSpace::HUMONGOUS) {
204 // for humongous objects in G1 we calculate size of the region instead of just alignment size
205 Region *region = AddrToRegion(string_obj);
206 *allocated += region->Size();
207 } else {
208 *allocated += GetAlignedObjectSize(size);
209 }
210 *requested += size;
211 result.push_back(string_obj);
212 }
213 }
214 return result;
215 }
216
InitRoot()217 void MemStatsGenGCTest::InitRoot()
218 {
219 ClassLinker *class_linker = Runtime::GetCurrent()->GetClassLinker();
220 Class *klass = class_linker->GetExtension(
221 panda_file::SourceLang::PANDA_ASSEMBLY)->GetClass(ctx_.GetStringArrayClassDescriptor());
222 ASSERT_NE(klass, nullptr);
223 root_ = coretypes::Array::Create(klass, ROOT_MAX_SIZE);
224 root_size_ = 0;
225 MakeObjectsPermAlive({root_});
226 }
227
MakeObjectsAlive(ObjVec objects,int every)228 void MemStatsGenGCTest::MakeObjectsAlive(ObjVec objects, int every)
229 {
230 int cnt = every;
231 for (auto *obj : objects) {
232 cnt--;
233 if (cnt != 0) {
234 continue;
235 }
236 root_->Set(root_size_, obj);
237 root_size_++;
238 ASSERT(root_size_ < ROOT_MAX_SIZE);
239 cnt = every;
240 }
241 }
242
MakeObjectsGarbage(size_t start_idx,size_t after_end_idx,int every)243 void MemStatsGenGCTest::MakeObjectsGarbage(size_t start_idx, size_t after_end_idx, int every)
244 {
245 int cnt = every;
246 for (size_t i = start_idx; i < after_end_idx; ++i) {
247 cnt--;
248 if (cnt != 0) {
249 continue;
250 }
251 root_->Set(i, 0);
252 cnt = every;
253 }
254 }
255
MakeObjectsPermAlive(ObjVec objects,int every)256 void MemStatsGenGCTest::MakeObjectsPermAlive(ObjVec objects, int every)
257 {
258 HanVec result;
259 result.reserve(objects.size() / every);
260 int cnt = every;
261 for (auto *obj : objects) {
262 cnt--;
263 if (cnt != 0) {
264 continue;
265 }
266 result.push_back(internal_allocator_->New<VMHandle<ObjectHeader *>>(thread_, obj));
267 cnt = every;
268 }
269 handles_.push_back(result);
270 }
271
DumpHandles()272 void MemStatsGenGCTest::DumpHandles()
273 {
274 for (auto &hv : handles_) {
275 for (auto *handle : hv) {
276 std::cout << "vector " << (void *)&hv << " handle " << (void *)handle << " obj " << handle->GetPtr()
277 << std::endl;
278 }
279 }
280 }
281
DumpAliveObjects()282 void MemStatsGenGCTest::DumpAliveObjects()
283 {
284 std::cout << "Alive root array : " << handles_[0][0]->GetPtr() << std::endl;
285 for (size_t i = 0; i < root_size_; ++i) {
286 if (root_->Get<ObjectHeader *>(i) != nullptr) {
287 std::cout << "Alive idx " << i << " : " << root_->Get<ObjectHeader *>(i) << std::endl;
288 }
289 }
290 }
291
DeleteHandles()292 void MemStatsGenGCTest::DeleteHandles()
293 {
294 for (auto &hv : handles_) {
295 for (auto *handle : hv) {
296 internal_allocator_->Delete(handle);
297 }
298 }
299 handles_.clear();
300 }
301
302 template <class LanguageConfig>
PrepareTest()303 void MemStatsGenGCTest::PrepareTest()
304 {
305 if constexpr (std::is_same<LanguageConfig, panda::PandaAssemblyLanguageConfig>::value) {
306 DeleteHandles();
307 ctx_ = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
308 object_allocator_ = thread_->GetVM()->GetHeapManager()->GetObjectAllocator().AsObjectAllocator();
309 vm_ = Runtime::GetCurrent()->GetPandaVM();
310 internal_allocator_ = Runtime::GetCurrent()->GetClassLinker()->GetAllocator();
311 gc_ = vm_->GetGC();
312 ms_ = vm_->GetMemStats();
313 gc_ms_ = vm_->GetGCStats();
314 gccnt_ = internal_allocator_->New<GCCounter>();
315 gc_->AddListener(gccnt_);
316 InitRoot();
317 } else {
318 UNREACHABLE();
319 }
320 }
321
322 template <class LanguageConfig>
GetGenMemStats()323 typename GenerationalGC<LanguageConfig>::MemStats *MemStatsGenGCTest::GetGenMemStats()
324 {
325 // An explicit getter, because the typename has to be template-specialized
326 return &reinterpret_cast<GenerationalGC<LanguageConfig> *>(gc_)->mem_stats_;
327 }
328
IsInYoung(uintptr_t addr)329 bool MemStatsGenGCTest::IsInYoung(uintptr_t addr)
330 {
331 switch (gc_type_) {
332 case GCType::GEN_GC: {
333 return object_allocator_->IsAddressInYoungSpace(addr);
334 }
335 case GCType::G1_GC: {
336 auto mem_pool = PoolManager::GetMmapMemPool();
337 if (mem_pool->GetSpaceTypeForAddr(reinterpret_cast<ObjectHeader *>(addr)) != SpaceType::SPACE_TYPE_OBJECT) {
338 return false;
339 }
340 return Region::AddrToRegion<false>(reinterpret_cast<ObjectHeader *>(addr))->HasFlag(RegionFlag::IS_EDEN);
341 }
342 default:
343 UNREACHABLE(); // NYI
344 }
345 return false;
346 }
347
348 template <MemStatsGenGCTest::TargetSpace SPACE, bool DO_SAVE, bool IS_SINGLE>
MakeAllocations()349 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::MakeAllocations()
350 {
351 [[maybe_unused]] int gc_cnt = gccnt_->count;
352 MemStatsGenGCTest::MemOpReport report;
353 report.allocated_count = 0;
354 report.allocated_bytes = 0;
355 report.saved_count = 0;
356 report.saved_bytes = 0;
357 size_t bytes = 0;
358 [[maybe_unused]] size_t raw_objects_size; // currently not tracked by memstats
359 size_t count = 0;
360 size_t min_size = 0;
361 size_t max_size = 0;
362 bool check_oom = false;
363 size_t young_size = reinterpret_cast<GenerationalSpaces *>(reinterpret_cast<ObjectAllocatorGenBase *>(
364 object_allocator_)->GetHeapSpace())->GetCurrentMaxYoungSize();
365 switch (gc_type_) {
366 case GCType::GEN_GC: {
367 auto gen_alloc = reinterpret_cast<ObjectAllocatorGen<MT_MODE_MULTI> *>(object_allocator_);
368 count = 15;
369 if constexpr (SPACE == TargetSpace::YOUNG) {
370 min_size = 0;
371 max_size = gen_alloc->GetYoungAllocMaxSize();
372 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
373 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
374 max_size = gen_alloc->GetRegularObjectMaxSize();
375 if (min_size >= max_size) {
376 // Allocator configuration disallows allocating directly in this space
377 return report;
378 }
379 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
380 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
381 min_size = std::max(min_size, gen_alloc->GetRegularObjectMaxSize() + 1);
382 max_size = gen_alloc->GetLargeObjectMaxSize();
383 if (min_size >= max_size) {
384 // Allocator configuration disallows allocating directly in this space
385 return report;
386 }
387 } else {
388 ASSERT(SPACE == TargetSpace::HUMONGOUS);
389 count = 3;
390 min_size = gen_alloc->GetYoungAllocMaxSize() + 1;
391 min_size = std::max(min_size, gen_alloc->GetRegularObjectMaxSize() + 1);
392 min_size = std::max(min_size, gen_alloc->GetLargeObjectMaxSize() + 1);
393 max_size = min_size * 3;
394 check_oom = true;
395 }
396 break;
397 }
398 case GCType::G1_GC: {
399 auto g1_alloc = reinterpret_cast<ObjectAllocatorG1<MT_MODE_MULTI> *>(object_allocator_);
400 count = 15;
401 if constexpr (SPACE == TargetSpace::YOUNG) {
402 min_size = 0;
403 max_size = g1_alloc->GetYoungAllocMaxSize();
404 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
405 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
406 max_size = g1_alloc->GetRegularObjectMaxSize();
407 if (min_size >= max_size) {
408 // Allocator configuration disallows allocating directly in this space
409 return report;
410 }
411 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
412 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
413 min_size = std::max(min_size, g1_alloc->GetRegularObjectMaxSize() + 1);
414 max_size = g1_alloc->GetLargeObjectMaxSize();
415 if (min_size >= max_size) {
416 // Allocator configuration disallows allocating directly in this space
417 return report;
418 }
419 } else {
420 ASSERT(SPACE == TargetSpace::HUMONGOUS);
421 count = 3;
422 min_size = g1_alloc->GetYoungAllocMaxSize() + 1;
423 min_size = std::max(min_size, g1_alloc->GetRegularObjectMaxSize() + 1);
424 min_size = std::max(min_size, g1_alloc->GetLargeObjectMaxSize() + 1);
425 max_size = min_size * 3;
426 check_oom = true;
427 }
428 break;
429 }
430 default:
431 UNREACHABLE();
432 }
433
434 auto space_check = [&SPACE](uintptr_t addr) -> bool {
435 if constexpr (SPACE == TargetSpace::YOUNG) {
436 return IsInYoung(addr);
437 } else if constexpr (SPACE == TargetSpace::TENURED_REGULAR) {
438 return !IsInYoung(addr);
439 } else if constexpr (SPACE == TargetSpace::TENURED_LARGE) {
440 return !IsInYoung(addr);
441 } else if constexpr (SPACE == TargetSpace::HUMONGOUS) {
442 return !IsInYoung(addr);
443 }
444 UNREACHABLE();
445 };
446
447 if constexpr (SPACE == TargetSpace::YOUNG) {
448 // To prevent Young GC collection while we're allocating
449 max_size = std::min(young_size / (count * 6), max_size);
450 }
451
452 if (IS_SINGLE) {
453 ObjVec ov1 = MakeAllocationsWithRepeats<decltype(space_check), 1, SPACE>(
454 min_size + 1, max_size, 1, &bytes, &raw_objects_size, space_check, check_oom);
455 report.allocated_count += 1;
456 report.allocated_bytes += bytes;
457 if constexpr (DO_SAVE) {
458 MakeObjectsAlive(ov1, 1);
459 report.saved_count = report.allocated_count;
460 report.saved_bytes = report.allocated_bytes;
461 }
462 } else {
463 ObjVec ov1 = MakeAllocationsWithRepeats<decltype(space_check), 3, SPACE>(
464 min_size, max_size, count, &bytes, &raw_objects_size, space_check, check_oom);
465 report.allocated_count += count * 3;
466 report.allocated_bytes += bytes;
467 ObjVec ov2 = MakeAllocationsWithRepeats<decltype(space_check), 3, SPACE>(
468 min_size, max_size, count, &bytes, &raw_objects_size, space_check, check_oom);
469 report.allocated_count += count * 3;
470 report.allocated_bytes += bytes;
471 if constexpr (DO_SAVE) {
472 MakeObjectsAlive(ov1, 3);
473 MakeObjectsAlive(ov2, 3);
474 report.saved_count = report.allocated_count / 3;
475 report.saved_bytes = report.allocated_bytes / 3;
476 }
477 }
478
479 // We must not have uncounted GCs
480 ASSERT(gc_cnt == gccnt_->count);
481 return report;
482 }
483
HelpAllocTenured()484 typename MemStatsGenGCTest::MemOpReport MemStatsGenGCTest::HelpAllocTenured()
485 {
486 MemStatsGenGCTest::MemOpReport report;
487 report.allocated_count = 0;
488 report.allocated_bytes = 0;
489 report.saved_count = 0;
490 report.saved_bytes = 0;
491
492 auto old_root_size = root_size_;
493
494 // One way to get objects into tenured space - by promotion
495 auto r = MakeAllocations<TargetSpace::YOUNG, true>();
496 gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
497 MakeObjectsGarbage(old_root_size, old_root_size + (root_size_ - old_root_size) / 2);
498
499 report.allocated_count = r.saved_count;
500 report.allocated_bytes = r.saved_bytes;
501 report.saved_count = r.saved_count / 2;
502 report.saved_bytes = r.saved_bytes / 2;
503
504 // Another way - by direct allocation in tenured if possible
505 auto r2 = MakeAllocations<TargetSpace::TENURED_REGULAR, true>();
506
507 report.allocated_count += r2.allocated_count;
508 report.allocated_bytes += r2.allocated_bytes;
509 report.saved_count += r2.saved_count;
510 report.saved_bytes += r2.saved_bytes;
511
512 // Large objects are also tenured in terms of gen memstats
513 auto r3 = MakeAllocations<TargetSpace::TENURED_LARGE, true>();
514
515 report.allocated_count += r3.allocated_count;
516 report.allocated_bytes += r3.allocated_bytes;
517 report.saved_count += r3.saved_count;
518 report.saved_bytes += r3.saved_bytes;
519
520 auto r4 = MakeAllocations<TargetSpace::HUMONGOUS, true>();
521
522 report.allocated_count += r4.allocated_count;
523 report.allocated_bytes += r4.allocated_bytes;
524 report.saved_count += r4.saved_count;
525 report.saved_bytes += r4.saved_bytes;
526 return report;
527 }
528
529 template <typename T>
GetGenMemStatsDetails(T gms)530 MemStatsGenGCTest::RealStatsLocations MemStatsGenGCTest::GetGenMemStatsDetails(T gms)
531 {
532 RealStatsLocations loc;
533 loc.young_freed_objects_count_ = &gms->young_free_object_count_;
534 loc.young_freed_objects_size_ = &gms->young_free_object_size_;
535 loc.young_moved_objects_count_ = &gms->young_move_object_count_;
536 loc.young_moved_objects_size_ = &gms->young_move_object_size_;
537 loc.tenured_freed_objects_count_ = &gms->tenured_free_object_count_;
538 loc.tenured_freed_objects_size_ = &gms->tenured_free_object_size_;
539 return loc;
540 }
541
TEST_F(MemStatsGenGCTest,TrivialStatsGenGcTest)542 TEST_F(MemStatsGenGCTest, TrivialStatsGenGcTest)
543 {
544 for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
545 GCType gc_type = static_cast<GCType>(gctype_idx);
546 if (gc_type == GCType::INVALID_GC) {
547 continue;
548 }
549 if (!IsGenerationalGCType(gc_type)) {
550 continue;
551 }
552 std::string gctype = static_cast<std::string>(GCStringFromType(gc_type));
553 SetupRuntime(gctype);
554
555 {
556 HandleScope<ObjectHeader *> scope(thread_);
557 PrepareTest<panda::PandaAssemblyLanguageConfig>();
558 auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
559 RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
560
561 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE)); // Heap doesn't have unexpected garbage now
562
563 // Make a trivial allocation of unaligned size and make it garbage
564 auto r = MakeAllocations<TargetSpace::YOUNG, false, true>();
565 gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
566 ASSERT_EQ(2, gccnt_->count);
567 ASSERT_EQ(*loc.young_freed_objects_count_, r.allocated_count);
568 ASSERT_EQ(*loc.young_freed_objects_size_, r.allocated_bytes);
569 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
570 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
571 ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
572 ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
573 ASSERT_EQ(gc_ms_->GetObjectsFreedCount(), r.allocated_count);
574 ASSERT_EQ(gc_ms_->GetObjectsFreedBytes(), r.allocated_bytes);
575 ASSERT_EQ(gc_ms_->GetLargeObjectsFreedCount(), 0);
576 ASSERT_EQ(gc_ms_->GetLargeObjectsFreedBytes(), 0);
577
578 // Make a trivial allocation of unaligned size and make it alive
579 r = MakeAllocations<TargetSpace::YOUNG, true, true>();
580 gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
581 ASSERT_EQ(3, gccnt_->count);
582 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
583 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
584 ASSERT_EQ(*loc.young_moved_objects_count_, r.saved_count);
585 ASSERT_EQ(*loc.young_moved_objects_size_, r.saved_bytes);
586 ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
587 ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
588
589 // Expecting that r.saved_bytes/count have been promoted into tenured
590 // Make them garbage
591 MakeObjectsGarbage(0, root_size_);
592 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
593 ASSERT_EQ(4, gccnt_->count);
594 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
595 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
596 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
597 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
598 ASSERT_EQ(*loc.tenured_freed_objects_count_, r.saved_count);
599 ASSERT_EQ(*loc.tenured_freed_objects_size_, r.saved_bytes);
600
601 // Make a trivial allocation of unaligned size in tenured space and make it garbage
602 r = MakeAllocations<TargetSpace::TENURED_REGULAR, false, true>();
603 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
604 ASSERT_EQ(5, gccnt_->count);
605 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
606 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
607 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
608 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
609 ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
610 ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
611
612 // Make a trivial allocation of unaligned size large object and make it garbage
613 r = MakeAllocations<TargetSpace::TENURED_LARGE, false, true>();
614 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
615 ASSERT_EQ(6, gccnt_->count);
616 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
617 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
618 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
619 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
620 ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
621 ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
622
623 r = MakeAllocations<TargetSpace::HUMONGOUS, false, true>();
624 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
625 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
626 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
627 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
628 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
629 ASSERT_EQ(*loc.tenured_freed_objects_count_, r.allocated_count);
630 ASSERT_EQ(*loc.tenured_freed_objects_size_, r.allocated_bytes);
631 }
632 ResetRuntime();
633 }
634 }
635
TEST_F(MemStatsGenGCTest,YoungStatsGenGcTest)636 TEST_F(MemStatsGenGCTest, YoungStatsGenGcTest)
637 {
638 for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
639 if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
640 continue;
641 }
642 if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
643 continue;
644 }
645 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
646 SetupRuntime(gctype);
647
648 {
649 HandleScope<ObjectHeader *> scope(thread_);
650 PrepareTest<panda::PandaAssemblyLanguageConfig>();
651 auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
652 RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
653
654 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
655 // Young shall be empty now.
656 auto r = MakeAllocations<TargetSpace::YOUNG, true>();
657 gc_->WaitForGCInManaged(GCTask(GCTaskCause::YOUNG_GC_CAUSE));
658
659 ASSERT_EQ(*loc.young_freed_objects_count_, r.allocated_count - r.saved_count);
660 ASSERT_EQ(*loc.young_freed_objects_size_, r.allocated_bytes - r.saved_bytes);
661 ASSERT_EQ(*loc.young_moved_objects_count_, r.saved_count);
662 ASSERT_EQ(*loc.young_moved_objects_size_, r.saved_bytes);
663 ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
664 ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
665 }
666
667 ResetRuntime();
668 }
669 }
670
TEST_F(MemStatsGenGCTest,TenuredStatsFullGenGcTest)671 TEST_F(MemStatsGenGCTest, TenuredStatsFullGenGcTest)
672 {
673 for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
674 if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
675 continue;
676 }
677 if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
678 continue;
679 }
680 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
681 SetupRuntime(gctype);
682
683 {
684 HandleScope<ObjectHeader *> scope(thread_);
685 PrepareTest<panda::PandaAssemblyLanguageConfig>();
686 auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
687 RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
688
689 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
690 // Young shall be empty now.
691
692 uint32_t t_count = 0;
693 uint64_t t_bytes = 0;
694
695 for (int i = 0; i < FULL_TEST_ALLOC_TIMES; ++i) {
696 [[maybe_unused]] int gc_cnt = gccnt_->count;
697 auto r = HelpAllocTenured();
698 // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
699 ASSERT(gc_cnt + 1 == gccnt_->count);
700 auto tfoc_y = *loc.tenured_freed_objects_count_;
701 auto tfos_y = *loc.tenured_freed_objects_size_;
702 ASSERT(r.allocated_count > 0);
703 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
704 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
705 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
706 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
707 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
708 ASSERT_EQ(*loc.tenured_freed_objects_count_ + tfoc_y, r.allocated_count - r.saved_count);
709 ASSERT_EQ(*loc.tenured_freed_objects_size_ + tfos_y, r.allocated_bytes - r.saved_bytes);
710 t_count += r.saved_count;
711 t_bytes += r.saved_bytes;
712 }
713
714 // Empty everything
715 auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
716 MakeObjectsGarbage(0, root_size_);
717
718 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
719 ASSERT_EQ(*loc.young_freed_objects_count_, ry.allocated_count);
720 ASSERT_EQ(*loc.young_freed_objects_size_, ry.allocated_bytes);
721 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
722 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
723 ASSERT_EQ(*loc.tenured_freed_objects_count_, t_count);
724 ASSERT_EQ(*loc.tenured_freed_objects_size_, t_bytes);
725
726 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
727 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
728 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
729 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
730 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
731 ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
732 ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
733 }
734
735 ResetRuntime();
736 }
737 }
738
TEST_F(MemStatsGenGCTest,TenuredStatsMixGenGcTest)739 TEST_F(MemStatsGenGCTest, TenuredStatsMixGenGcTest)
740 {
741 for (int gctype_idx = 0; static_cast<GCType>(gctype_idx) <= GCType::GCTYPE_LAST; ++gctype_idx) {
742 if (static_cast<GCType>(gctype_idx) == GCType::INVALID_GC) {
743 continue;
744 }
745 if (!IsGenerationalGCType(static_cast<GCType>(gctype_idx))) {
746 continue;
747 }
748 if (static_cast<GCType>(gctype_idx) == GCType::GEN_GC) {
749 // Doesn't have mixed GC collection
750 continue;
751 }
752 std::string gctype = static_cast<std::string>(GCStringFromType(static_cast<GCType>(gctype_idx)));
753 SetupRuntime(gctype);
754
755 {
756 HandleScope<ObjectHeader *> scope(thread_);
757 PrepareTest<panda::PandaAssemblyLanguageConfig>();
758 GCTaskCause mixed_cause;
759 switch (gc_type_) {
760 case GCType::GEN_GC: {
761 UNREACHABLE(); // Doesn't have mixed GC collection
762 }
763 case GCType::G1_GC: {
764 mixed_cause = MIXED_G1_GC_CAUSE;
765 break;
766 }
767 default:
768 UNREACHABLE(); // NIY
769 }
770 auto *gen_ms = GetGenMemStats<panda::PandaAssemblyLanguageConfig>();
771 RealStatsLocations loc = GetGenMemStatsDetails<decltype(gen_ms)>(gen_ms);
772
773 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
774 // Young shall be empty now.
775
776 uint32_t t_count = 0;
777 uint64_t t_bytes = 0;
778
779 {
780 uint32_t dead_count = 0;
781 uint64_t dead_bytes = 0;
782 uint32_t expected_dead_count = 0;
783 uint64_t expected_dead_bytes = 0;
784 for (int i = 0; i < MIX_TEST_ALLOC_TIMES; ++i) {
785 [[maybe_unused]] int gc_cnt = gccnt_->count;
786 auto r = HelpAllocTenured();
787 // HelpAllocTenured shall trigger young gc, which is allowed to be mixed
788 ASSERT(gc_cnt + 1 == gccnt_->count);
789 dead_count += *loc.tenured_freed_objects_count_;
790 dead_bytes += *loc.tenured_freed_objects_size_;
791 // Mixed can free not all the tenured garbage, so run it until it stalls
792 do {
793 gc_->WaitForGCInManaged(GCTask(mixed_cause));
794 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
795 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
796 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
797 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
798 dead_count += *loc.tenured_freed_objects_count_;
799 dead_bytes += *loc.tenured_freed_objects_size_;
800 } while (*loc.tenured_freed_objects_count_ != 0);
801 t_count += r.saved_count;
802 t_bytes += r.saved_bytes;
803 expected_dead_count += r.allocated_count - r.saved_count;
804 expected_dead_bytes += r.allocated_bytes - r.saved_bytes;
805 }
806 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
807 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
808 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
809 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
810 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
811 dead_count += *loc.tenured_freed_objects_count_;
812 dead_bytes += *loc.tenured_freed_objects_size_;
813 ASSERT_EQ(dead_count, expected_dead_count);
814 ASSERT_EQ(dead_bytes, expected_dead_bytes);
815 }
816
817 // Empty everything
818 auto ry = MakeAllocations<TargetSpace::YOUNG, false>();
819 MakeObjectsGarbage(0, root_size_);
820 {
821 uint32_t dead_count = 0;
822 uint64_t dead_bytes = 0;
823 do {
824 gc_->WaitForGCInManaged(GCTask(mixed_cause));
825 ASSERT_EQ(*loc.young_freed_objects_count_, ry.allocated_count);
826 ASSERT_EQ(*loc.young_freed_objects_size_, ry.allocated_bytes);
827 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
828 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
829 dead_count += *loc.tenured_freed_objects_count_;
830 dead_bytes += *loc.tenured_freed_objects_size_;
831 ry.allocated_count = 0;
832 ry.allocated_bytes = 0;
833 } while (*loc.tenured_freed_objects_count_ != 0);
834 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
835 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
836 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
837 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
838 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
839 dead_count += *loc.tenured_freed_objects_count_;
840 dead_bytes += *loc.tenured_freed_objects_size_;
841 ASSERT_EQ(dead_count, t_count);
842 ASSERT_EQ(dead_bytes, t_bytes);
843 }
844 gc_->WaitForGCInManaged(GCTask(FULL_GC_CAUSE));
845 ASSERT_EQ(*loc.young_freed_objects_count_, 0);
846 ASSERT_EQ(*loc.young_freed_objects_size_, 0);
847 ASSERT_EQ(*loc.young_moved_objects_count_, 0);
848 ASSERT_EQ(*loc.young_moved_objects_size_, 0);
849 ASSERT_EQ(*loc.tenured_freed_objects_count_, 0);
850 ASSERT_EQ(*loc.tenured_freed_objects_size_, 0);
851 }
852
853 ResetRuntime();
854 }
855 }
856 } // namespace panda::mem::test
857