1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <sys/mman.h>
17 #include <algorithm>
18
19 #include "libpandabase/mem/mem.h"
20 #include "libpandabase/os/mem.h"
21 #include "libpandabase/utils/asan_interface.h"
22 #include "libpandabase/utils/logger.h"
23 #include "libpandabase/utils/math_helpers.h"
24 #include "runtime/include/runtime.h"
25 #include "runtime/mem/alloc_config.h"
26 #include "runtime/mem/tlab.h"
27 #include "runtime/tests/allocator_test_base.h"
28 #include "runtime/mem/region_allocator-inl.h"
29
30 namespace panda::mem::test {
31 using NonObjectRegionAllocator = RegionAllocator<EmptyAllocConfigWithCrossingMap>;
32
33 static constexpr size_t YOUNG_SPACE_SIZE = 128_MB;
34
35 template <typename ObjectAllocator, bool RegularSpace = true>
36 class RegionAllocatorTestBase : public AllocatorTest<ObjectAllocator> {
37 public:
RegionAllocatorTestBase()38 RegionAllocatorTestBase()
39 {
40 options_.SetShouldLoadBootPandaFiles(false);
41 options_.SetShouldInitializeIntrinsics(false);
42 options_.SetYoungSpaceSize(YOUNG_SPACE_SIZE);
43 options_.SetHeapSizeLimit(320_MB);
44 options_.SetGcType("epsilon");
45 Runtime::Create(options_);
46 // For tests we don't limit spaces
47 size_t space_size = options_.GetHeapSizeLimit();
48 size_t young_size = space_size;
49 if (!RegularSpace) {
50 // we don't need young space for non-movable or humongous allocator tests
51 young_size = 0;
52 }
53 spaces_.young_space_.Initialize(young_size, young_size);
54 spaces_.mem_space_.Initialize(space_size, space_size);
55 spaces_.InitializePercentages(0, 100);
56 spaces_.is_initialized_ = true;
57 thread_ = panda::MTManagedThread::GetCurrent();
58 thread_->ManagedCodeBegin();
59 class_linker_ = Runtime::GetCurrent()->GetClassLinker();
60 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
61 auto *class_linker_ext = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
62 test_class_ = class_linker_ext->CreateClass(nullptr, 0, 0, sizeof(panda::Class));
63 test_class_->SetObjectSize(OBJECT_SIZE);
64 }
~RegionAllocatorTestBase()65 virtual ~RegionAllocatorTestBase()
66 {
67 thread_->ManagedCodeEnd();
68 Runtime::Destroy();
69 }
70
71 protected:
72 static constexpr size_t OBJECT_SIZE = 128;
AddMemoryPoolToAllocator(ObjectAllocator & allocator)73 void AddMemoryPoolToAllocator([[maybe_unused]] ObjectAllocator &allocator) final {}
74
AddMemoryPoolToAllocatorProtected(ObjectAllocator & allocator)75 void AddMemoryPoolToAllocatorProtected([[maybe_unused]] ObjectAllocator &allocator) final {}
76
AllocatedByThisAllocator(ObjectAllocator & allocator,void * mem)77 bool AllocatedByThisAllocator([[maybe_unused]] ObjectAllocator &allocator, [[maybe_unused]] void *mem)
78 {
79 return allocator.ContainObject(reinterpret_cast<ObjectHeader *>(mem));
80 }
81
InitializeObjectAtMem(ObjectHeader * object)82 void InitializeObjectAtMem(ObjectHeader *object)
83 {
84 object->SetClass(test_class_);
85 }
86
87 panda::MTManagedThread *thread_;
88 ClassLinker *class_linker_;
89 Class *test_class_;
90 RuntimeOptions options_;
91 GenerationalSpaces spaces_;
92 };
93
94 class RegionAllocatorTest : public RegionAllocatorTestBase<NonObjectRegionAllocator> {
95 public:
96 static constexpr size_t TEST_REGION_SPACE_SIZE = YOUNG_SPACE_SIZE;
97
GetNumFreeRegions(NonObjectRegionAllocator & allocator)98 size_t GetNumFreeRegions(NonObjectRegionAllocator &allocator)
99 {
100 return allocator.GetSpace()->GetPool()->GetFreeRegionsNumInRegionBlock();
101 }
102
RegionSize()103 size_t static constexpr RegionSize()
104 {
105 return NonObjectRegionAllocator::REGION_SIZE;
106 }
107
GetRegionsNumber()108 size_t static constexpr GetRegionsNumber()
109 {
110 return TEST_REGION_SPACE_SIZE / NonObjectRegionAllocator::REGION_SIZE;
111 }
112
113 template <RegionFlag alloc_type>
AllocateObjectWithClass(NonObjectRegionAllocator & allocator)114 void *AllocateObjectWithClass(NonObjectRegionAllocator &allocator)
115 {
116 void *mem = allocator.Alloc<alloc_type>(OBJECT_SIZE);
117 if (mem == nullptr) {
118 return nullptr;
119 }
120 InitializeObjectAtMem(static_cast<ObjectHeader *>(mem));
121 return mem;
122 }
123
AllocateRegularObject(NonObjectRegionAllocator & allocator,size_t & free_regions,size_t & free_bytes_for_cur_reg,size_t size)124 void AllocateRegularObject(NonObjectRegionAllocator &allocator, size_t &free_regions,
125 size_t &free_bytes_for_cur_reg, size_t size)
126 {
127 ASSERT_EQ(GetNumFreeRegions(allocator), free_regions);
128 size_t align_size = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
129 if (free_bytes_for_cur_reg >= align_size) {
130 ASSERT_TRUE(allocator.Alloc(size) != nullptr)
131 << "fail allocate object with size " << align_size << " with free size " << free_bytes_for_cur_reg;
132 free_bytes_for_cur_reg -= align_size;
133 } else if (free_regions > 0) {
134 ASSERT_TRUE(allocator.Alloc(size) != nullptr);
135 free_regions -= 1;
136 free_bytes_for_cur_reg = NonObjectRegionAllocator::GetMaxRegularObjectSize() - align_size;
137 } else {
138 ASSERT_TRUE(allocator.Alloc(align_size) == nullptr);
139 align_size = free_bytes_for_cur_reg;
140 ASSERT(free_bytes_for_cur_reg % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
141 ASSERT_TRUE(allocator.Alloc(align_size) != nullptr);
142 free_bytes_for_cur_reg = 0;
143 }
144 auto reg = allocator.GetCurrentRegion<true, RegionFlag::IS_EDEN>();
145 ASSERT_EQ(GetNumFreeRegions(allocator), free_regions);
146 ASSERT_EQ(reg->End() - reg->Top(), free_bytes_for_cur_reg);
147 }
148
AllocateLargeObject(NonObjectRegionAllocator & allocator,size_t & free_regions,size_t size)149 void AllocateLargeObject(NonObjectRegionAllocator &allocator, size_t &free_regions, size_t size)
150 {
151 ASSERT_EQ(GetNumFreeRegions(allocator), free_regions);
152 size_t alloc_size = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
153 if (alloc_size + Region::HeadSize() > free_regions * RegionSize()) {
154 ASSERT_TRUE(allocator.Alloc(alloc_size) == nullptr);
155 alloc_size = std::min(alloc_size, free_regions * NonObjectRegionAllocator::GetMaxRegularObjectSize());
156 }
157 ASSERT_TRUE(allocator.Alloc(alloc_size) != nullptr);
158 free_regions -= (alloc_size + Region::HeadSize() + RegionSize() - 1) / RegionSize();
159 ASSERT_EQ(GetNumFreeRegions(allocator), free_regions);
160 }
161
162 static const int LOOP_COUNT = 100;
163 };
164
TEST_F(RegionAllocatorTest,AllocateTooMuchRegularObject)165 TEST_F(RegionAllocatorTest, AllocateTooMuchRegularObject)
166 {
167 mem::MemStatsType *mem_stats = new mem::MemStatsType();
168 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
169 false);
170 size_t alloc_times = GetRegionsNumber();
171 for (size_t i = 0; i < alloc_times; i++) {
172 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() / 2 + 1) != nullptr);
173 }
174 delete mem_stats;
175 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() / 2 + 1) == nullptr);
176 }
177
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomRegularObject)178 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomRegularObject)
179 {
180 mem::MemStatsType *mem_stats = new mem::MemStatsType();
181 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
182 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
183 false);
184 size_t free_regions = GetRegionsNumber();
185 size_t free_bytes_for_cur_reg = 0;
186 while (free_regions != 0 || free_bytes_for_cur_reg != 0) {
187 size_t size = RandFromRange(1, allocator.GetMaxRegularObjectSize());
188 AllocateRegularObject(allocator, free_regions, free_bytes_for_cur_reg, size);
189 }
190 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
191 }
192 delete mem_stats;
193 }
194
TEST_F(RegionAllocatorTest,AllocateTooMuchLargeObject)195 TEST_F(RegionAllocatorTest, AllocateTooMuchLargeObject)
196 {
197 mem::MemStatsType *mem_stats = new mem::MemStatsType();
198 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
199 false);
200 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
201 size_t alloc_times = (GetRegionsNumber() - 1) / 2;
202 for (size_t i = 0; i < alloc_times; i++) {
203 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 1) != nullptr);
204 }
205 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 1) == nullptr);
206 allocator.Alloc(allocator.GetMaxRegularObjectSize());
207 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
208 delete mem_stats;
209 }
210
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomLargeObject)211 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomLargeObject)
212 {
213 mem::MemStatsType *mem_stats = new mem::MemStatsType();
214 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
215 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
216 false);
217 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
218 size_t free_regions = GetRegionsNumber() - 1;
219 while (free_regions > 1) {
220 size_t size =
221 RandFromRange(allocator.GetMaxRegularObjectSize() + 1, 3 * allocator.GetMaxRegularObjectSize());
222 AllocateLargeObject(allocator, free_regions, size);
223 }
224 if (free_regions == 1) {
225 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
226 }
227 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
228 }
229 delete mem_stats;
230 }
231
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomRegularAndLargeObjectTest)232 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomRegularAndLargeObjectTest)
233 {
234 mem::MemStatsType *mem_stats = new mem::MemStatsType();
235 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
236 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
237 false);
238 size_t free_regions = GetRegionsNumber();
239 size_t free_bytes_for_cur_reg = 0;
240 while (free_regions != 0 || free_bytes_for_cur_reg != 0) {
241 ASSERT(free_bytes_for_cur_reg % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
242 size_t size = RandFromRange(1, 3 * allocator.GetMaxRegularObjectSize());
243 size_t align_size = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
244 if (align_size <= NonObjectRegionAllocator::GetMaxRegularObjectSize()) {
245 AllocateRegularObject(allocator, free_regions, free_bytes_for_cur_reg, align_size);
246 } else if (free_regions > 1) {
247 AllocateLargeObject(allocator, free_regions, align_size);
248 }
249 }
250 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
251 }
252 delete mem_stats;
253 }
254
TEST_F(RegionAllocatorTest,AllocatedByRegionAllocatorTest)255 TEST_F(RegionAllocatorTest, AllocatedByRegionAllocatorTest)
256 {
257 mem::MemStatsType mem_stats;
258 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
259 false);
260 AllocatedByThisAllocatorTest(allocator);
261 }
262
TEST_F(RegionAllocatorTest,OneAlignmentAllocTest)263 TEST_F(RegionAllocatorTest, OneAlignmentAllocTest)
264 {
265 OneAlignedAllocFreeTest<NonObjectRegionAllocator::GetMaxRegularObjectSize() - 128,
266 NonObjectRegionAllocator::GetMaxRegularObjectSize() + 128, DEFAULT_ALIGNMENT>(1, &spaces_);
267 }
268
TEST_F(RegionAllocatorTest,AllocateFreeDifferentSizesTest)269 TEST_F(RegionAllocatorTest, AllocateFreeDifferentSizesTest)
270 {
271 static constexpr size_t ELEMENTS_COUNT = 256;
272 static constexpr size_t POOLS_COUNT = 1;
273 AllocateFreeDifferentSizesTest<NonObjectRegionAllocator::GetMaxRegularObjectSize() - 128,
274 NonObjectRegionAllocator::GetMaxRegularObjectSize() + 128>(ELEMENTS_COUNT,
275 POOLS_COUNT, &spaces_);
276 }
277
TEST_F(RegionAllocatorTest,RegionTLABAllocTest)278 TEST_F(RegionAllocatorTest, RegionTLABAllocTest)
279 {
280 static constexpr size_t ALLOC_SIZE = 512;
281 static constexpr size_t ALLOC_COUNT = 5000000;
282 auto thread = ManagedThread::GetCurrent();
283 mem::MemStatsType *mem_stats = new mem::MemStatsType();
284 NonObjectRegionAllocator allocator(mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
285 false);
286 bool is_oom = false;
287 TLAB *tlab = allocator.CreateNewTLAB(thread);
288 for (size_t i = 0; i < ALLOC_COUNT; i++) {
289 auto old_start_pointer = tlab->GetStartAddr();
290 auto mem = tlab->Alloc(ALLOC_SIZE);
291 // checking new tlab address
292 if (mem == nullptr) {
293 auto new_tlab = allocator.CreateNewTLAB(thread);
294 if (new_tlab != nullptr) {
295 auto new_start_pointer = new_tlab->GetStartAddr();
296 ASSERT_NE(new_start_pointer, nullptr);
297 ASSERT_NE(new_start_pointer, old_start_pointer);
298 ASSERT_NE(new_tlab, tlab);
299 tlab = new_tlab;
300 mem = tlab->Alloc(ALLOC_SIZE);
301 }
302 }
303 if (mem == nullptr) {
304 ASSERT_EQ(GetNumFreeRegions(allocator), 0);
305 is_oom = true;
306 break;
307 }
308 ASSERT_NE(mem, nullptr);
309 }
310 ASSERT_EQ(is_oom, true) << "Increase the size of alloc_count to get OOM";
311 delete mem_stats;
312 }
313
TEST_F(RegionAllocatorTest,RegionPoolTest)314 TEST_F(RegionAllocatorTest, RegionPoolTest)
315 {
316 mem::MemStatsType mem_stats;
317 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 2, true);
318
319 // alloc two small objects in a region
320 ASSERT_EQ(GetNumFreeRegions(allocator), 2);
321 auto *obj1 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(1)); // one byte
322 ASSERT_TRUE(obj1 != nullptr);
323 ASSERT_EQ(GetNumFreeRegions(allocator), 1);
324 auto *obj2 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(DEFAULT_ALIGNMENT_IN_BYTES + 2)); // two byte
325 ASSERT_TRUE(obj2 != nullptr);
326 ASSERT_EQ(GetNumFreeRegions(allocator), 1);
327
328 // check that the two objects should be in a region
329 ASSERT_EQ(ToUintPtr(obj2), ToUintPtr(obj1) + DEFAULT_ALIGNMENT_IN_BYTES);
330 auto *region1 = allocator.GetRegion(obj1);
331 ASSERT_TRUE(region1 != nullptr);
332 auto *region2 = allocator.GetRegion(obj2);
333 ASSERT_TRUE(region2 != nullptr);
334 ASSERT_EQ(region1, region2);
335 ASSERT_EQ(region1->Top() - region1->Begin(), 3 * DEFAULT_ALIGNMENT_IN_BYTES);
336
337 // allocate a large object in pool(not in initial block)
338 ASSERT_EQ(GetNumFreeRegions(allocator), 1);
339 auto *obj3 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 200));
340 ASSERT_TRUE(obj3 != nullptr);
341 ASSERT_EQ(GetNumFreeRegions(allocator), 1);
342 auto *region3 = allocator.GetRegion(obj3);
343 ASSERT_TRUE(region3 != nullptr);
344 ASSERT_NE(region2, region3);
345 ASSERT_TRUE(region3->HasFlag(RegionFlag::IS_LARGE_OBJECT));
346
347 // allocate a regular object which can't be allocated in current region
348 auto *obj4 = reinterpret_cast<ObjectHeader *>(
349 allocator.Alloc(allocator.GetMaxRegularObjectSize() - DEFAULT_ALIGNMENT_IN_BYTES));
350 ASSERT_TRUE(obj4 != nullptr);
351 ASSERT_EQ(GetNumFreeRegions(allocator), 0);
352 auto *region4 = allocator.GetRegion(obj4);
353 ASSERT_TRUE(region4 != nullptr);
354 ASSERT_EQ(ToUintPtr(region4), ToUintPtr(region2) + RegionSize());
355
356 auto *obj5 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(DEFAULT_ALIGNMENT_IN_BYTES));
357 ASSERT_TRUE(obj5 != nullptr);
358 auto *region5 = allocator.GetRegion(obj5);
359 ASSERT_EQ(region4, region5);
360 }
361
TEST_F(RegionAllocatorTest,IterateOverObjectsTest)362 TEST_F(RegionAllocatorTest, IterateOverObjectsTest)
363 {
364 mem::MemStatsType mem_stats;
365 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, 0, true);
366 auto *obj1 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(test_class_->GetObjectSize()));
367 obj1->SetClass(test_class_);
368 auto *obj2 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(test_class_->GetObjectSize()));
369 obj2->SetClass(test_class_);
370 auto *region = allocator.GetRegion(obj1);
371 size_t obj1_num = 0;
372 size_t obj2_num = 0;
373 region->IterateOverObjects([this, obj1, obj2, region, &obj1_num, &obj2_num, &allocator](ObjectHeader *object) {
374 ASSERT_TRUE(object == obj1 || object == obj2);
375 ASSERT_EQ(allocator.GetRegion(object), region);
376 ASSERT_EQ(object->ClassAddr<Class>(), test_class_);
377 if (object == obj1) {
378 obj1_num++;
379 } else if (object == obj2) {
380 obj2_num++;
381 }
382
383 #ifndef NDEBUG
384 // can't allocator object while iterating the region
385 ASSERT_DEATH(allocator.Alloc(test_class_->GetObjectSize()), "");
386 #endif
387 });
388 ASSERT_EQ(obj1_num, 1);
389 ASSERT_EQ(obj2_num, 1);
390
391 #ifndef NDEBUG
392 ASSERT_TRUE(region->SetAllocating(true));
393 // can't iterating the region while allocating
394 ASSERT_DEATH(region->IterateOverObjects([]([[maybe_unused]] ObjectHeader *object) {});, "");
395 ASSERT_TRUE(region->SetAllocating(false));
396 #endif
397 }
398
TEST_F(RegionAllocatorTest,AllocateAndMoveYoungObjectsToTenured)399 TEST_F(RegionAllocatorTest, AllocateAndMoveYoungObjectsToTenured)
400 {
401 static constexpr size_t ALLOCATION_COUNT = 10000;
402 static constexpr size_t TENURED_OBJECTS_CREATION_RATE = 4;
403 mem::MemStatsType mem_stats;
404 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
405 false);
406 // Allocate some objects (young and tenured) in allocator
407 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
408 void *mem = nullptr;
409 if (i % TENURED_OBJECTS_CREATION_RATE == 0) {
410 mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
411 } else {
412 mem = AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator);
413 }
414 ASSERT_TRUE(mem != nullptr);
415 }
416 // Iterate over young objects and move them into tenured:
417 allocator.CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(
418 [&](ObjectHeader *object) {
419 (void)object;
420 return ObjectStatus::ALIVE_OBJECT;
421 },
422 []([[maybe_unused]] ObjectHeader *src, [[maybe_unused]] ObjectHeader *dst) {});
423 allocator.ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
424 size_t object_found = 0;
425 allocator.IterateOverObjects([&](ObjectHeader *object) {
426 (void)object;
427 object_found++;
428 });
429 ASSERT_EQ(object_found, ALLOCATION_COUNT);
430 }
431
TEST_F(RegionAllocatorTest,AllocateAndCompactTenuredObjects)432 TEST_F(RegionAllocatorTest, AllocateAndCompactTenuredObjects)
433 {
434 static constexpr size_t ALLOCATION_COUNT = 7000;
435 static constexpr size_t YOUNG_OBJECTS_CREATION_RATE = 100;
436 mem::MemStatsType mem_stats;
437 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
438 false);
439 PandaVector<Region *> regions_vector;
440 size_t tenured_object_count = 0;
441 // Allocate some objects (young and tenured) in allocator
442 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
443 void *mem = nullptr;
444 if (i % YOUNG_OBJECTS_CREATION_RATE != 0) {
445 mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
446 tenured_object_count++;
447 Region *region = allocator.GetRegion(static_cast<ObjectHeader *>(mem));
448 if (std::find(regions_vector.begin(), regions_vector.end(), region) == regions_vector.end()) {
449 regions_vector.insert(regions_vector.begin(), region);
450 }
451 } else {
452 mem = AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator);
453 }
454 ASSERT_TRUE(mem != nullptr);
455 }
456 ASSERT_TRUE(regions_vector.size() > 1);
457 ASSERT_EQ(allocator.GetAllSpecificRegions<RegionFlag::IS_OLD>().size(), regions_vector.size());
458 // Iterate over some tenured regions and compact them:
459 allocator.ClearCurrentRegion<RegionFlag::IS_OLD>();
460 size_t object_found = 0;
461 allocator.CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD>(
462 regions_vector,
463 [&](ObjectHeader *object) {
464 (void)object;
465 object_found++;
466 return ObjectStatus::ALIVE_OBJECT;
467 },
468 []([[maybe_unused]] ObjectHeader *from, [[maybe_unused]] ObjectHeader *to) {
469 // no need anything here
470 });
471 ASSERT_EQ(object_found, tenured_object_count);
472 object_found = 0;
473 allocator.IterateOverObjects([&](ObjectHeader *object) {
474 (void)object;
475 object_found++;
476 });
477 ASSERT_EQ(object_found, ALLOCATION_COUNT + tenured_object_count);
478 allocator.ResetSeveralSpecificRegions<RegionFlag::IS_OLD>(regions_vector);
479 // Check that we have the same object amount.
480 object_found = 0;
481 allocator.IterateOverObjects([&](ObjectHeader *object) {
482 (void)object;
483 object_found++;
484 });
485 ASSERT_EQ(object_found, ALLOCATION_COUNT);
486 // Check that we can still correctly allocate smth in tenured:
487 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
488 // Reset tenured regions:
489 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
490 // Check that we can still correctly allocate smth in tenured:
491 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
492 }
493
TEST_F(RegionAllocatorTest,AllocateAndCompactTenuredObjectsViaMarkedBitmap)494 TEST_F(RegionAllocatorTest, AllocateAndCompactTenuredObjectsViaMarkedBitmap)
495 {
496 static constexpr size_t ALLOCATION_COUNT = 7000;
497 static constexpr size_t MARKED_OBJECTS_RATE = 2;
498 mem::MemStatsType mem_stats;
499 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
500 false);
501 PandaVector<Region *> regions_vector;
502 size_t marked_tenured_object_count = 0;
503 // Allocate some objects (young and tenured) in allocator
504 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
505 void *mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
506 Region *region = allocator.GetRegion(static_cast<ObjectHeader *>(mem));
507 if (std::find(regions_vector.begin(), regions_vector.end(), region) == regions_vector.end()) {
508 regions_vector.insert(regions_vector.begin(), region);
509 }
510 if (i % MARKED_OBJECTS_RATE != 0) {
511 region->SetMarkBit(static_cast<ObjectHeader *>(mem));
512 marked_tenured_object_count++;
513 }
514 ASSERT_TRUE(mem != nullptr);
515 }
516 ASSERT_TRUE(regions_vector.size() > 1);
517 ASSERT_EQ(allocator.GetAllSpecificRegions<RegionFlag::IS_OLD>().size(), regions_vector.size());
518 // Iterate over some tenured regions and compact them:
519 allocator.ClearCurrentRegion<RegionFlag::IS_OLD>();
520 size_t object_found = 0;
521 allocator.CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD, true>(
522 regions_vector,
523 [&](ObjectHeader *object) {
524 (void)object;
525 object_found++;
526 return ObjectStatus::ALIVE_OBJECT;
527 },
528 []([[maybe_unused]] ObjectHeader *from, [[maybe_unused]] ObjectHeader *to) {
529 // stub
530 });
531 ASSERT_EQ(object_found, marked_tenured_object_count);
532 object_found = 0;
533 allocator.IterateOverObjects([&](ObjectHeader *object) {
534 (void)object;
535 object_found++;
536 });
537 ASSERT_EQ(object_found, ALLOCATION_COUNT + marked_tenured_object_count);
538 allocator.ResetSeveralSpecificRegions<RegionFlag::IS_OLD>(regions_vector);
539 // Check that we have the same object amount.
540 object_found = 0;
541 allocator.IterateOverObjects([&](ObjectHeader *object) {
542 (void)object;
543 object_found++;
544 });
545 ASSERT_EQ(object_found, marked_tenured_object_count);
546 // Check that we can still correctly allocate smth in tenured:
547 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
548 // Reset tenured regions:
549 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
550 // Check that we can still correctly allocate smth in tenured:
551 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
552 }
553
TEST_F(RegionAllocatorTest,AsanTest)554 TEST_F(RegionAllocatorTest, AsanTest)
555 {
556 static constexpr size_t ALLOCATION_COUNT = 100;
557 static constexpr size_t TENURED_OBJECTS_CREATION_RATE = 4;
558 mem::MemStatsType mem_stats;
559 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
560 false);
561 std::vector<void *> young_objects;
562 std::vector<void *> old_objects;
563 // Allocate some objects (young and tenured) in allocator
564 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
565 if (i % TENURED_OBJECTS_CREATION_RATE == 0) {
566 old_objects.push_back(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator));
567 } else {
568 young_objects.push_back(AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator));
569 }
570 }
571 // Iterate over young objects and move them into tenured:
572 allocator.CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(
573 [&](ObjectHeader *object) {
574 (void)object;
575 return ObjectStatus::ALIVE_OBJECT;
576 },
577 []([[maybe_unused]] ObjectHeader *src, [[maybe_unused]] ObjectHeader *dst) {});
578 allocator.ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
579 for (auto i : young_objects) {
580 #ifdef PANDA_ASAN_ON
581 EXPECT_DEATH(DeathWriteUint64(i), "") << "Write " << sizeof(uint64_t) << " bytes at address " << std::hex << i;
582 #else
583 (void)i;
584 continue;
585 #endif // PANDA_ASAN_ON
586 }
587 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
588 for (auto i : old_objects) {
589 #ifdef PANDA_ASAN_ON
590 EXPECT_DEATH(DeathWriteUint64(i), "") << "Write " << sizeof(uint64_t) << " bytes at address " << std::hex << i;
591 #else
592 (void)i;
593 continue;
594 #endif // PANDA_ASAN_ON
595 }
596 }
597
TEST_F(RegionAllocatorTest,MTAllocTest)598 TEST_F(RegionAllocatorTest, MTAllocTest)
599 {
600 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
601 // We have an issue with QEMU during MT tests. Issue 2852
602 static constexpr size_t THREADS_COUNT = 1;
603 #else
604 static constexpr size_t THREADS_COUNT = 10;
605 #endif
606 static constexpr size_t MIN_MT_ALLOC_SIZE = 16;
607 static constexpr size_t MAX_MT_ALLOC_SIZE = 256;
608 static constexpr size_t MIN_ELEMENTS_COUNT = 500;
609 static constexpr size_t MAX_ELEMENTS_COUNT = 1000;
610 static constexpr size_t MT_TEST_RUN_COUNT = 20;
611 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
612 mem::MemStatsType mem_stats;
613 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 128,
614 true);
615 MT_AllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
616 MAX_ELEMENTS_COUNT);
617 }
618 }
619
TEST_F(RegionAllocatorTest,MTAllocLargeTest)620 TEST_F(RegionAllocatorTest, MTAllocLargeTest)
621 {
622 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
623 // We have an issue with QEMU during MT tests. Issue 2852
624 static constexpr size_t THREADS_COUNT = 1;
625 #else
626 static constexpr size_t THREADS_COUNT = 10;
627 #endif
628 static constexpr size_t MIN_MT_ALLOC_SIZE = 128;
629 static constexpr size_t MAX_MT_ALLOC_SIZE = NonObjectRegionAllocator::GetMaxRegularObjectSize() * 3;
630 static constexpr size_t MIN_ELEMENTS_COUNT = 10;
631 static constexpr size_t MAX_ELEMENTS_COUNT = 30;
632 static constexpr size_t MT_TEST_RUN_COUNT = 20;
633 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
634 mem::MemStatsType mem_stats;
635 NonObjectRegionAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 256,
636 true);
637 MT_AllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
638 MAX_ELEMENTS_COUNT);
639 }
640 }
641
642 using RegionNonmovableObjectAllocator =
643 RegionRunslotsAllocator<ObjectAllocConfigWithCrossingMap, RegionAllocatorLockConfig::CommonLock>;
644 class RegionNonmovableObjectAllocatorTest : public RegionAllocatorTestBase<RegionNonmovableObjectAllocator, false> {
645 };
646
647 using RegionNonmovableLargeObjectAllocator =
648 RegionFreeListAllocator<ObjectAllocConfigWithCrossingMap, RegionAllocatorLockConfig::CommonLock>;
649 class RegionNonmovableLargeObjectAllocatorTest
650 : public RegionAllocatorTestBase<RegionNonmovableLargeObjectAllocator, false> {
651 };
652
TEST_F(RegionNonmovableObjectAllocatorTest,AllocatorTest)653 TEST_F(RegionNonmovableObjectAllocatorTest, AllocatorTest)
654 {
655 mem::MemStatsType mem_stats;
656 RegionNonmovableObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
657 for (uint32_t i = 8; i <= RegionNonmovableObjectAllocator::GetMaxSize(); i++) {
658 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
659 }
660 }
661
TEST_F(RegionNonmovableObjectAllocatorTest,MTAllocatorTest)662 TEST_F(RegionNonmovableObjectAllocatorTest, MTAllocatorTest)
663 {
664 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
665 // We have an issue with QEMU during MT tests. Issue 2852
666 static constexpr size_t THREADS_COUNT = 1;
667 #else
668 static constexpr size_t THREADS_COUNT = 10;
669 #endif
670 static constexpr size_t MIN_MT_ALLOC_SIZE = 8;
671 static constexpr size_t MAX_MT_ALLOC_SIZE = RegionNonmovableObjectAllocator::GetMaxSize();
672 static constexpr size_t MIN_ELEMENTS_COUNT = 200;
673 static constexpr size_t MAX_ELEMENTS_COUNT = 300;
674 static constexpr size_t MT_TEST_RUN_COUNT = 20;
675 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
676 mem::MemStatsType mem_stats;
677 RegionNonmovableObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
678 MT_AllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
679 MAX_ELEMENTS_COUNT);
680 // region is allocated in allocator, so don't free it explicitly
681 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
682 }
683 }
684
TEST_F(RegionNonmovableLargeObjectAllocatorTest,AllocatorTest)685 TEST_F(RegionNonmovableLargeObjectAllocatorTest, AllocatorTest)
686 {
687 mem::MemStatsType mem_stats;
688 RegionNonmovableLargeObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
689 size_t start_object_size = RegionNonmovableObjectAllocator::GetMaxSize() + 1;
690 for (uint32_t i = start_object_size; i <= start_object_size + 200; i++) {
691 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
692 }
693 ASSERT_TRUE(allocator.Alloc(RegionNonmovableLargeObjectAllocator::GetMaxSize() - 1) != nullptr);
694 ASSERT_TRUE(allocator.Alloc(RegionNonmovableLargeObjectAllocator::GetMaxSize()) != nullptr);
695 }
696
TEST_F(RegionNonmovableLargeObjectAllocatorTest,MTAllocatorTest)697 TEST_F(RegionNonmovableLargeObjectAllocatorTest, MTAllocatorTest)
698 {
699 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
700 // We have an issue with QEMU during MT tests. Issue 2852
701 static constexpr size_t THREADS_COUNT = 1;
702 #else
703 static constexpr size_t THREADS_COUNT = 10;
704 #endif
705 static constexpr size_t MIN_MT_ALLOC_SIZE = RegionNonmovableObjectAllocator::GetMaxSize() + 1;
706 static constexpr size_t MAX_MT_ALLOC_SIZE = RegionNonmovableLargeObjectAllocator::GetMaxSize();
707 static constexpr size_t MIN_ELEMENTS_COUNT = 10;
708 static constexpr size_t MAX_ELEMENTS_COUNT = 20;
709 static constexpr size_t MT_TEST_RUN_COUNT = 20;
710 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
711 mem::MemStatsType mem_stats;
712 RegionNonmovableLargeObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
713 MT_AllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
714 MAX_ELEMENTS_COUNT);
715 // region is allocated in allocator, so don't free it explicitly
716 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
717 }
718 }
719
TEST_F(RegionNonmovableLargeObjectAllocatorTest,MemStatsAllocatorTest)720 TEST_F(RegionNonmovableLargeObjectAllocatorTest, MemStatsAllocatorTest)
721 {
722 mem::MemStatsType mem_stats;
723 RegionNonmovableLargeObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
724 static constexpr size_t ALLOC_SIZE = 128;
725 void *mem = nullptr;
726
727 auto object_allocated_size = mem_stats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
728 ASSERT_TRUE(object_allocated_size == 0);
729 mem = allocator.Alloc(ALLOC_SIZE);
730 ASSERT_TRUE(mem != nullptr);
731 auto object_allocated_size_1 = mem_stats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
732 ASSERT_EQ(mem_stats.GetTotalObjectsAllocated(), 1);
733 ASSERT_TRUE(object_allocated_size_1 != 0);
734
735 mem = allocator.Alloc(ALLOC_SIZE);
736 ASSERT_TRUE(mem != nullptr);
737 auto object_allocated_size_2 = mem_stats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
738 ASSERT_EQ(mem_stats.GetTotalObjectsAllocated(), 2);
739 ASSERT_EQ(object_allocated_size_2, object_allocated_size_1 + object_allocated_size_1);
740 }
741
742 using RegionHumongousObjectAllocator =
743 RegionHumongousAllocator<ObjectAllocConfig, RegionAllocatorLockConfig::CommonLock>;
744 class RegionHumongousObjectAllocatorTest : public RegionAllocatorTestBase<RegionHumongousObjectAllocator, false> {
745 protected:
__anon953319961202(void *mem, size_t size) 746 static constexpr auto RegionVisitor = []([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {};
747 };
748
TEST_F(RegionHumongousObjectAllocatorTest,AllocatorTest)749 TEST_F(RegionHumongousObjectAllocatorTest, AllocatorTest)
750 {
751 static constexpr size_t MAX_ALLOC_SIZE = 5_MB;
752 static constexpr size_t ALLOC_COUNT = 20;
753 mem::MemStatsType mem_stats;
754 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
755 for (uint32_t i = MAX_ALLOC_SIZE / ALLOC_COUNT; i <= MAX_ALLOC_SIZE; i += MAX_ALLOC_SIZE / ALLOC_COUNT) {
756 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
757 }
758 }
759
TEST_F(RegionHumongousObjectAllocatorTest,MTAllocatorTest)760 TEST_F(RegionHumongousObjectAllocatorTest, MTAllocatorTest)
761 {
762 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
763 // We have an issue with QEMU during MT tests. Issue 2852
764 static constexpr size_t THREADS_COUNT = 1;
765 #else
766 static constexpr size_t THREADS_COUNT = 5;
767 #endif
768 static constexpr size_t MIN_MT_ALLOC_SIZE = DEFAULT_REGION_SIZE;
769 static constexpr size_t MAX_MT_ALLOC_SIZE = 1_MB;
770 static constexpr size_t MIN_ELEMENTS_COUNT = 20;
771 static constexpr size_t MAX_ELEMENTS_COUNT = 30;
772 // Test with DEFAULT_REGION_SIZE
773 {
774 mem::MemStatsType mem_stats;
775 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
776 MT_AllocTest<MIN_MT_ALLOC_SIZE, MIN_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
777 MAX_ELEMENTS_COUNT);
778 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
779 }
780 // Test with 1Mb
781 {
782 mem::MemStatsType mem_stats;
783 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
784 MT_AllocTest<MAX_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
785 MAX_ELEMENTS_COUNT);
786 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
787 }
788 }
789
TEST_F(RegionHumongousObjectAllocatorTest,CollectTest)790 TEST_F(RegionHumongousObjectAllocatorTest, CollectTest)
791 {
792 static constexpr size_t MIN_ALLOC_SIZE = 1_MB;
793 static constexpr size_t MAX_ALLOC_SIZE = 9_MB;
794 static constexpr size_t ALLOCATION_COUNT = 50;
795 std::vector<void *> allocated_elements;
796 mem::MemStatsType mem_stats;
797 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
798 size_t current_alloc_size = MIN_ALLOC_SIZE;
799 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
800 auto *class_linker_ext = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
801 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
802 auto test_class = class_linker_ext->CreateClass(nullptr, 0, 0, sizeof(panda::Class));
803 test_class->SetObjectSize(current_alloc_size);
804 void *mem = allocator.Alloc(current_alloc_size);
805 ASSERT_TRUE(mem != nullptr);
806 allocated_elements.push_back(mem);
807 auto object = static_cast<ObjectHeader *>(mem);
808 object->SetClass(test_class);
809 current_alloc_size += ((MAX_ALLOC_SIZE - MIN_ALLOC_SIZE) / ALLOCATION_COUNT);
810 }
811 static std::set<void *> founded_elements;
812 static auto delete_all = [](ObjectHeader *object) {
813 founded_elements.insert(object);
814 return ObjectStatus::ALIVE_OBJECT;
815 };
816 // Collect all objects into unordered_set via allocator's method
817 allocator.CollectAndRemoveFreeRegions(RegionVisitor, delete_all);
818 for (auto i : allocated_elements) {
819 auto element = founded_elements.find(i);
820 ASSERT_TRUE(element != founded_elements.end());
821 founded_elements.erase(element);
822 }
823 ASSERT_TRUE(founded_elements.empty());
824 }
825
TEST_F(RegionHumongousObjectAllocatorTest,TestCollectAliveObject)826 TEST_F(RegionHumongousObjectAllocatorTest, TestCollectAliveObject)
827 {
828 mem::MemStatsType mem_stats;
829 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
830 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
831 auto *class_linker_ext = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
832 auto test_class = class_linker_ext->CreateClass(nullptr, 0, 0, sizeof(panda::Class));
833 size_t object_size = DEFAULT_REGION_SIZE + 1;
834 test_class->SetObjectSize(object_size);
835 void *mem = allocator.Alloc(object_size);
836 ASSERT_TRUE(mem != nullptr);
837 auto object = static_cast<ObjectHeader *>(mem);
838 object->SetClass(test_class);
839 Region *region = ObjectToRegion(object);
840
841 allocator.CollectAndRemoveFreeRegions(RegionVisitor, [](ObjectHeader *) { return ObjectStatus::ALIVE_OBJECT; });
842 bool has_region = false;
843 allocator.GetSpace()->IterateRegions([region, &has_region](Region *r) { has_region |= region == r; });
844 ASSERT_TRUE(has_region);
845 ASSERT(!region->HasFlag(RegionFlag::IS_FREE));
846 }
847
TEST_F(RegionHumongousObjectAllocatorTest,TestCollectDeadObject)848 TEST_F(RegionHumongousObjectAllocatorTest, TestCollectDeadObject)
849 {
850 mem::MemStatsType mem_stats;
851 RegionHumongousObjectAllocator allocator(&mem_stats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
852 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
853 auto *class_linker_ext = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
854 auto test_class = class_linker_ext->CreateClass(nullptr, 0, 0, sizeof(panda::Class));
855 size_t object_size = DEFAULT_REGION_SIZE + 1;
856 test_class->SetObjectSize(object_size);
857 void *mem = allocator.Alloc(object_size);
858 ASSERT_TRUE(mem != nullptr);
859 auto object = static_cast<ObjectHeader *>(mem);
860 object->SetClass(test_class);
861 Region *region = ObjectToRegion(object);
862
863 allocator.CollectAndRemoveFreeRegions(RegionVisitor, [](ObjectHeader *) { return ObjectStatus::DEAD_OBJECT; });
864 bool has_region = false;
865 allocator.GetSpace()->IterateRegions([region, &has_region](Region *r) { has_region |= region == r; });
866 ASSERT_TRUE(!has_region);
867 }
868
869 } // namespace panda::mem::test
870