1 /**
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <sys/mman.h>
17 #include <algorithm>
18 #include <thread>
19
20 #include "libpandabase/mem/mem.h"
21 #include "libpandabase/os/mem.h"
22 #include "libpandabase/utils/asan_interface.h"
23 #include "libpandabase/utils/logger.h"
24 #include "libpandabase/utils/math_helpers.h"
25 #include "runtime/include/runtime.h"
26 #include "runtime/mem/alloc_config.h"
27 #include "runtime/mem/tlab.h"
28 #include "runtime/tests/allocator_test_base.h"
29 #include "runtime/mem/region_allocator-inl.h"
30
31 namespace ark::mem::test {
32 using NonObjectRegionAllocator = RegionAllocator<EmptyAllocConfigWithCrossingMap>;
33
34 static constexpr size_t YOUNG_SPACE_SIZE = 128_MB;
35
36 template <typename ObjectAllocator, bool REGULAR_SPACE = true>
37 class RegionAllocatorTestBase : public AllocatorTest<ObjectAllocator> {
38 public:
RegionAllocatorTestBase()39 RegionAllocatorTestBase()
40 {
41 options_.SetShouldLoadBootPandaFiles(false);
42 options_.SetShouldInitializeIntrinsics(false);
43 options_.SetYoungSpaceSize(YOUNG_SPACE_SIZE);
44 options_.SetHeapSizeLimit(320_MB); // NOLINT(readability-magic-numbers)
45 options_.SetGcType("epsilon");
46 Runtime::Create(options_);
47 // For tests we don't limit spaces
48 size_t spaceSize = options_.GetHeapSizeLimit();
49 size_t youngSize = spaceSize;
50 if constexpr (!REGULAR_SPACE) {
51 // we don't need young space for non-movable or humongous allocator tests
52 youngSize = 0;
53 }
54 spaces_.youngSpace_.Initialize(youngSize, youngSize);
55 spaces_.memSpace_.Initialize(spaceSize, spaceSize);
56 spaces_.InitializePercentages(0, PERCENT_100_D);
57 spaces_.isInitialized_ = true;
58 thread_ = ark::MTManagedThread::GetCurrent();
59 thread_->ManagedCodeBegin();
60 classLinker_ = Runtime::GetCurrent()->GetClassLinker();
61 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
62 auto *classLinkerExt = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
63 testClass_ = classLinkerExt->CreateClass(nullptr, 0, 0, sizeof(ark::Class));
64 testClass_->SetObjectSize(OBJECT_SIZE);
65 }
~RegionAllocatorTestBase()66 ~RegionAllocatorTestBase() override
67 {
68 thread_->ManagedCodeEnd();
69 Runtime::Destroy();
70 }
71
72 NO_COPY_SEMANTIC(RegionAllocatorTestBase);
73 NO_MOVE_SEMANTIC(RegionAllocatorTestBase);
74
75 protected:
76 static constexpr size_t OBJECT_SIZE = 128;
AddMemoryPoolToAllocator(ObjectAllocator & allocator)77 void AddMemoryPoolToAllocator([[maybe_unused]] ObjectAllocator &allocator) final {}
78
AddMemoryPoolToAllocatorProtected(ObjectAllocator & allocator)79 void AddMemoryPoolToAllocatorProtected([[maybe_unused]] ObjectAllocator &allocator) final {}
80
AllocatedByThisAllocator(ObjectAllocator & allocator,void * mem)81 bool AllocatedByThisAllocator([[maybe_unused]] ObjectAllocator &allocator, [[maybe_unused]] void *mem) override
82 {
83 return allocator.ContainObject(reinterpret_cast<ObjectHeader *>(mem));
84 }
85
InitializeObjectAtMem(ObjectHeader * object)86 void InitializeObjectAtMem(ObjectHeader *object)
87 {
88 object->SetClass(testClass_);
89 }
90
91 Class *testClass_; // NOLINT(misc-non-private-member-variables-in-classes)
92 GenerationalSpaces spaces_; // NOLINT(misc-non-private-member-variables-in-classes)
93
94 private:
95 ark::MTManagedThread *thread_;
96 ClassLinker *classLinker_;
97 RuntimeOptions options_;
98 };
99
100 class RegionAllocatorTest : public RegionAllocatorTestBase<NonObjectRegionAllocator> {
101 public:
102 static constexpr size_t TEST_REGION_SPACE_SIZE = YOUNG_SPACE_SIZE;
103
GetNumFreeRegions(NonObjectRegionAllocator & allocator)104 size_t GetNumFreeRegions(NonObjectRegionAllocator &allocator)
105 {
106 return allocator.GetSpace()->GetPool()->GetFreeRegionsNumInRegionBlock();
107 }
108
RegionSize()109 static size_t constexpr RegionSize()
110 {
111 return NonObjectRegionAllocator::REGION_SIZE;
112 }
113
GetRegionsNumber()114 static size_t constexpr GetRegionsNumber()
115 {
116 return TEST_REGION_SPACE_SIZE / NonObjectRegionAllocator::REGION_SIZE;
117 }
118
119 template <RegionFlag ALLOC_TYPE>
AllocateObjectWithClass(NonObjectRegionAllocator & allocator)120 void *AllocateObjectWithClass(NonObjectRegionAllocator &allocator)
121 {
122 void *mem = allocator.Alloc<ALLOC_TYPE>(OBJECT_SIZE);
123 if (mem == nullptr) {
124 return nullptr;
125 }
126 InitializeObjectAtMem(static_cast<ObjectHeader *>(mem));
127 return mem;
128 }
129
AllocateRegularObject(NonObjectRegionAllocator & allocator,size_t & freeRegions,size_t & freeBytesForCurReg,size_t size)130 void AllocateRegularObject(NonObjectRegionAllocator &allocator, size_t &freeRegions, size_t &freeBytesForCurReg,
131 size_t size)
132 {
133 ASSERT_EQ(GetNumFreeRegions(allocator), freeRegions);
134 size_t alignSize = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
135 if (freeBytesForCurReg >= alignSize) {
136 ASSERT_TRUE(allocator.Alloc(size) != nullptr)
137 << "fail allocate object with size " << alignSize << " with free size " << freeBytesForCurReg;
138 freeBytesForCurReg -= alignSize;
139 } else if (freeRegions > 0) {
140 ASSERT_TRUE(allocator.Alloc(size) != nullptr);
141 freeRegions -= 1;
142 freeBytesForCurReg = NonObjectRegionAllocator::GetMaxRegularObjectSize() - alignSize;
143 } else {
144 ASSERT_TRUE(allocator.Alloc(alignSize) == nullptr);
145 alignSize = freeBytesForCurReg;
146 ASSERT(freeBytesForCurReg % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
147 ASSERT_TRUE(allocator.Alloc(alignSize) != nullptr);
148 freeBytesForCurReg = 0;
149 }
150 auto reg = allocator.GetCurrentRegion<true, RegionFlag::IS_EDEN>();
151 ASSERT_EQ(GetNumFreeRegions(allocator), freeRegions);
152 ASSERT_EQ(reg->End() - reg->Top(), freeBytesForCurReg);
153 }
154
AllocateLargeObject(NonObjectRegionAllocator & allocator,size_t & freeRegions,size_t size)155 void AllocateLargeObject(NonObjectRegionAllocator &allocator, size_t &freeRegions, size_t size)
156 {
157 ASSERT_EQ(GetNumFreeRegions(allocator), freeRegions);
158 size_t allocSize = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
159 if (allocSize + Region::HeadSize() > freeRegions * RegionSize()) {
160 ASSERT_TRUE(allocator.Alloc(allocSize) == nullptr);
161 allocSize = std::min(allocSize, freeRegions * NonObjectRegionAllocator::GetMaxRegularObjectSize());
162 }
163 ASSERT_TRUE(allocator.Alloc(allocSize) != nullptr);
164 freeRegions -= (allocSize + Region::HeadSize() + RegionSize() - 1) / RegionSize();
165 ASSERT_EQ(GetNumFreeRegions(allocator), freeRegions);
166 }
167
AllocateYoungRegular(NonObjectRegionAllocator & allocator,size_t size)168 void *AllocateYoungRegular(NonObjectRegionAllocator &allocator, size_t size)
169 {
170 auto alignSize = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
171 return allocator.AllocRegular<RegionFlag::IS_EDEN>(alignSize);
172 }
173
ObjectChecker(size_t & objectFound)174 auto ObjectChecker(size_t &objectFound)
175 {
176 auto ptrObjectFound = &objectFound;
177 auto objectChecker = [ptrObjectFound](ObjectHeader *object) {
178 (void)object;
179 (*ptrObjectFound)++;
180 return ObjectStatus::ALIVE_OBJECT;
181 };
182 return objectChecker;
183 }
184
185 static const int LOOP_COUNT = 100;
186 };
187
TEST_F(RegionAllocatorTest,AllocateTooMuchRegularObject)188 TEST_F(RegionAllocatorTest, AllocateTooMuchRegularObject)
189 {
190 auto *memStats = new mem::MemStatsType();
191 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE, false);
192 size_t allocTimes = GetRegionsNumber();
193 for (size_t i = 0; i < allocTimes; i++) {
194 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() / 2UL + 1UL) != nullptr);
195 }
196 delete memStats;
197 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() / 2UL + 1UL) == nullptr);
198 }
199
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomRegularObject)200 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomRegularObject)
201 {
202 auto *memStats = new mem::MemStatsType();
203 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
204 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
205 false);
206 size_t freeRegions = GetRegionsNumber();
207 size_t freeBytesForCurReg = 0;
208 while (freeRegions != 0 || freeBytesForCurReg != 0) {
209 size_t size = RandFromRange(1, allocator.GetMaxRegularObjectSize());
210 AllocateRegularObject(allocator, freeRegions, freeBytesForCurReg, size);
211 }
212 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
213 }
214 delete memStats;
215 }
216
TEST_F(RegionAllocatorTest,AllocateTooMuchLargeObject)217 TEST_F(RegionAllocatorTest, AllocateTooMuchLargeObject)
218 {
219 auto *memStats = new mem::MemStatsType();
220 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE, false);
221 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
222 size_t allocTimes = (GetRegionsNumber() - 1) / 2;
223 for (size_t i = 0; i < allocTimes; i++) {
224 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 1) != nullptr);
225 }
226 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 1) == nullptr);
227 allocator.Alloc(allocator.GetMaxRegularObjectSize());
228 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
229 delete memStats;
230 }
231
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomLargeObject)232 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomLargeObject)
233 {
234 auto *memStats = new mem::MemStatsType();
235 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
236 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
237 false);
238 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
239 size_t freeRegions = GetRegionsNumber() - 1;
240 while (freeRegions > 1) {
241 size_t size =
242 RandFromRange(allocator.GetMaxRegularObjectSize() + 1, 3 * allocator.GetMaxRegularObjectSize());
243 AllocateLargeObject(allocator, freeRegions, size);
244 }
245 if (freeRegions == 1) {
246 ASSERT_TRUE(allocator.Alloc(allocator.GetMaxRegularObjectSize()) != nullptr);
247 }
248 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
249 }
250 delete memStats;
251 }
252
TEST_F(RegionAllocatorTest,AllocateTooMuchRandomRegularAndLargeObjectTest)253 TEST_F(RegionAllocatorTest, AllocateTooMuchRandomRegularAndLargeObjectTest)
254 {
255 auto *memStats = new mem::MemStatsType();
256 for (int i = 0; i < RegionAllocatorTest::LOOP_COUNT; i++) {
257 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
258 false);
259 size_t freeRegions = GetRegionsNumber();
260 size_t freeBytesForCurReg = 0;
261 while (freeRegions != 0 || freeBytesForCurReg != 0) {
262 ASSERT(freeBytesForCurReg % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
263 size_t size = RandFromRange(1, 3 * allocator.GetMaxRegularObjectSize());
264 size_t alignSize = AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT));
265 if (alignSize <= NonObjectRegionAllocator::GetMaxRegularObjectSize()) {
266 AllocateRegularObject(allocator, freeRegions, freeBytesForCurReg, alignSize);
267 } else if (freeRegions > 1) {
268 AllocateLargeObject(allocator, freeRegions, alignSize);
269 }
270 }
271 ASSERT_TRUE(allocator.Alloc(1) == nullptr);
272 }
273 delete memStats;
274 }
275
TEST_F(RegionAllocatorTest,AllocatedByRegionAllocatorTest)276 TEST_F(RegionAllocatorTest, AllocatedByRegionAllocatorTest)
277 {
278 mem::MemStatsType memStats;
279 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
280 false);
281 AllocatedByThisAllocatorTest(allocator);
282 }
283
TEST_F(RegionAllocatorTest,OneAlignmentAllocTest)284 TEST_F(RegionAllocatorTest, OneAlignmentAllocTest)
285 {
286 // NOLINTNEXTLINE(readability-magic-numbers)
287 OneAlignedAllocFreeTest<NonObjectRegionAllocator::GetMaxRegularObjectSize() - 128UL,
288 // NOLINTNEXTLINE(readability-magic-numbers)
289 NonObjectRegionAllocator::GetMaxRegularObjectSize() + 128UL, DEFAULT_ALIGNMENT>(1UL,
290 &spaces_);
291 }
292
TEST_F(RegionAllocatorTest,AllocateFreeDifferentSizesTest)293 TEST_F(RegionAllocatorTest, AllocateFreeDifferentSizesTest)
294 {
295 static constexpr size_t ELEMENTS_COUNT = 256;
296 static constexpr size_t POOLS_COUNT = 1;
297 // NOLINTNEXTLINE(readability-magic-numbers)
298 AllocateFreeDifferentSizesTest<NonObjectRegionAllocator::GetMaxRegularObjectSize() - 128UL,
299 // NOLINTNEXTLINE(readability-magic-numbers)
300 NonObjectRegionAllocator::GetMaxRegularObjectSize() + 128UL>(ELEMENTS_COUNT,
301 POOLS_COUNT, &spaces_);
302 }
303
TEST_F(RegionAllocatorTest,RegionTLABAllocTest)304 TEST_F(RegionAllocatorTest, RegionTLABAllocTest)
305 {
306 static constexpr size_t TLAB_SIZE = 4_KB;
307 static constexpr size_t ALLOC_SIZE = 512;
308 static constexpr size_t ALLOC_COUNT = 5000000;
309 auto *memStats = new mem::MemStatsType();
310 NonObjectRegionAllocator allocator(memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE, false);
311 bool isOom = false;
312 TLAB *tlab = allocator.CreateTLAB(TLAB_SIZE);
313 for (size_t i = 0; i < ALLOC_COUNT; i++) {
314 auto oldStartPointer = tlab->GetStartAddr();
315 auto mem = tlab->Alloc(ALLOC_SIZE);
316 // checking new tlab address
317 if (mem == nullptr) {
318 auto newTlab = allocator.CreateTLAB(TLAB_SIZE);
319 if (newTlab != nullptr) {
320 auto newStartPointer = newTlab->GetStartAddr();
321 ASSERT_NE(newStartPointer, nullptr);
322 ASSERT_NE(newStartPointer, oldStartPointer);
323 ASSERT_NE(newTlab, tlab);
324 tlab = newTlab;
325 mem = tlab->Alloc(ALLOC_SIZE);
326 }
327 }
328 if (mem == nullptr) {
329 ASSERT_EQ(GetNumFreeRegions(allocator), 0);
330 isOom = true;
331 break;
332 }
333 ASSERT_NE(mem, nullptr);
334 }
335 ASSERT_EQ(isOom, true) << "Increase the size of alloc_count to get OOM";
336 delete memStats;
337 }
338
TEST_F(RegionAllocatorTest,RegionPoolTest)339 TEST_F(RegionAllocatorTest, RegionPoolTest)
340 {
341 mem::MemStatsType memStats;
342 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 2U, true);
343
344 // alloc two small objects in a region
345 ASSERT_EQ(GetNumFreeRegions(allocator), 2U);
346 auto *obj1 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(1UL)); // one byte
347 ASSERT_TRUE(obj1 != nullptr);
348 ASSERT_EQ(GetNumFreeRegions(allocator), 1UL);
349 auto *obj2 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(DEFAULT_ALIGNMENT_IN_BYTES + 2U)); // two byte
350 ASSERT_TRUE(obj2 != nullptr);
351 ASSERT_EQ(GetNumFreeRegions(allocator), 1UL);
352
353 // check that the two objects should be in a region
354 ASSERT_EQ(ToUintPtr(obj2), ToUintPtr(obj1) + DEFAULT_ALIGNMENT_IN_BYTES);
355 auto *region1 = allocator.GetRegion(obj1);
356 ASSERT_TRUE(region1 != nullptr);
357 auto *region2 = allocator.GetRegion(obj2);
358 ASSERT_TRUE(region2 != nullptr);
359 ASSERT_EQ(region1, region2);
360 ASSERT_EQ(region1->Top() - region1->Begin(), 3U * DEFAULT_ALIGNMENT_IN_BYTES);
361
362 // allocate a large object in pool(not in initial block)
363 ASSERT_EQ(GetNumFreeRegions(allocator), 1);
364 // NOLINTNEXTLINE(readability-magic-numbers)
365 auto *obj3 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(allocator.GetMaxRegularObjectSize() + 200U));
366 ASSERT_TRUE(obj3 != nullptr);
367 ASSERT_EQ(GetNumFreeRegions(allocator), 1UL);
368 auto *region3 = allocator.GetRegion(obj3);
369 ASSERT_TRUE(region3 != nullptr);
370 ASSERT_NE(region2, region3);
371 ASSERT_TRUE(region3->HasFlag(RegionFlag::IS_LARGE_OBJECT));
372
373 // allocate a regular object which can't be allocated in current region
374 auto *obj4 = reinterpret_cast<ObjectHeader *>(
375 allocator.Alloc(allocator.GetMaxRegularObjectSize() - DEFAULT_ALIGNMENT_IN_BYTES));
376 ASSERT_TRUE(obj4 != nullptr);
377 ASSERT_EQ(GetNumFreeRegions(allocator), 0);
378 auto *region4 = allocator.GetRegion(obj4);
379 ASSERT_TRUE(region4 != nullptr);
380 ASSERT_EQ(ToUintPtr(region4), ToUintPtr(region2) + RegionSize());
381
382 auto *obj5 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(DEFAULT_ALIGNMENT_IN_BYTES));
383 ASSERT_TRUE(obj5 != nullptr);
384 auto *region5 = allocator.GetRegion(obj5);
385 ASSERT_EQ(region4, region5);
386 }
387
TEST_F(RegionAllocatorTest,IterateOverObjectsTest)388 TEST_F(RegionAllocatorTest, IterateOverObjectsTest)
389 {
390 mem::MemStatsType memStats;
391 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, 0, true);
392 auto *obj1 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(testClass_->GetObjectSize()));
393 obj1->SetClass(testClass_);
394 auto *obj2 = reinterpret_cast<ObjectHeader *>(allocator.Alloc(testClass_->GetObjectSize()));
395 obj2->SetClass(testClass_);
396 auto *region = allocator.GetRegion(obj1);
397 size_t obj1Num = 0;
398 size_t obj2Num = 0;
399 region->IterateOverObjects([this, obj1, obj2, region, &obj1Num, &obj2Num, &allocator](ObjectHeader *object) {
400 ASSERT_TRUE(object == obj1 || object == obj2);
401 ASSERT_EQ(allocator.GetRegion(object), region);
402 ASSERT_EQ(object->ClassAddr<Class>(), testClass_);
403 if (object == obj1) {
404 obj1Num++;
405 } else if (object == obj2) {
406 obj2Num++;
407 }
408
409 #ifndef NDEBUG
410 // can't allocator object while iterating the region
411 ASSERT_DEATH(allocator.Alloc(testClass_->GetObjectSize()), "");
412 #endif
413 });
414 ASSERT_EQ(obj1Num, 1);
415 ASSERT_EQ(obj2Num, 1);
416
417 #ifndef NDEBUG
418 ASSERT_TRUE(region->SetAllocating(true));
419 // can't iterating the region while allocating
420 ASSERT_DEATH(region->IterateOverObjects([]([[maybe_unused]] ObjectHeader *object) {}), "");
421 ASSERT_TRUE(region->SetAllocating(false));
422 #endif
423 }
424
TEST_F(RegionAllocatorTest,AllocateAndMoveYoungObjectsToTenured)425 TEST_F(RegionAllocatorTest, AllocateAndMoveYoungObjectsToTenured)
426 {
427 static constexpr size_t ALLOCATION_COUNT = 10000;
428 static constexpr size_t TENURED_OBJECTS_CREATION_RATE = 4;
429 mem::MemStatsType memStats;
430 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
431 false);
432 // Allocate some objects (young and tenured) in allocator
433 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
434 void *mem = nullptr;
435 if (i % TENURED_OBJECTS_CREATION_RATE == 0) {
436 mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
437 } else {
438 mem = AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator);
439 }
440 ASSERT_TRUE(mem != nullptr);
441 }
442 // Iterate over young objects and move them into tenured:
443 allocator.CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(
444 [&](ObjectHeader *object) {
445 (void)object;
446 return ObjectStatus::ALIVE_OBJECT;
447 },
448 []([[maybe_unused]] ObjectHeader *src, [[maybe_unused]] ObjectHeader *dst) {});
449 allocator.ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
450 size_t objectFound = 0;
451 allocator.IterateOverObjects([&objectFound](ObjectHeader *object) {
452 (void)object;
453 objectFound++;
454 });
455 ASSERT_EQ(objectFound, ALLOCATION_COUNT);
456 }
457
TEST_F(RegionAllocatorTest,AllocateAndCompactTenuredObjects)458 TEST_F(RegionAllocatorTest, AllocateAndCompactTenuredObjects)
459 {
460 static constexpr size_t ALLOCATION_COUNT = 7000;
461 static constexpr size_t YOUNG_OBJECTS_CREATION_RATE = 100;
462 mem::MemStatsType memStats;
463 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
464 false);
465 PandaVector<Region *> regionsVector;
466 size_t tenuredObjectCount = 0;
467 // Allocate some objects (young and tenured) in allocator
468 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
469 void *mem = nullptr;
470 if (i % YOUNG_OBJECTS_CREATION_RATE != 0) {
471 mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
472 tenuredObjectCount++;
473 Region *region = allocator.GetRegion(static_cast<ObjectHeader *>(mem));
474 if (std::find(regionsVector.begin(), regionsVector.end(), region) == regionsVector.end()) {
475 regionsVector.insert(regionsVector.begin(), region);
476 }
477 } else {
478 mem = AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator);
479 }
480 ASSERT_TRUE(mem != nullptr);
481 }
482 ASSERT_TRUE(regionsVector.size() > 1);
483 ASSERT_EQ(allocator.GetAllSpecificRegions<RegionFlag::IS_OLD>().size(), regionsVector.size());
484 // Iterate over some tenured regions and compact them:
485 allocator.ClearCurrentRegion<RegionFlag::IS_OLD>();
486 size_t objectFound = 0;
487 allocator.CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD>(
488 regionsVector, ObjectChecker(objectFound),
489 []([[maybe_unused]] ObjectHeader *from, [[maybe_unused]] ObjectHeader *to) {
490 // no need anything here
491 });
492 ASSERT_EQ(objectFound, tenuredObjectCount);
493 objectFound = 0;
494 allocator.IterateOverObjects([&objectFound](ObjectHeader *object) {
495 (void)object;
496 objectFound++;
497 });
498 ASSERT_EQ(objectFound, ALLOCATION_COUNT + tenuredObjectCount);
499 allocator.ResetSeveralSpecificRegions<RegionFlag::IS_OLD, RegionSpace::ReleaseRegionsPolicy::NoRelease,
500 OSPagesPolicy::IMMEDIATE_RETURN, false>(regionsVector);
501 // Check that we have the same object amount.
502 objectFound = 0;
503 allocator.IterateOverObjects([&objectFound](ObjectHeader *object) {
504 (void)object;
505 objectFound++;
506 });
507 ASSERT_EQ(objectFound, ALLOCATION_COUNT);
508 // Check that we can still correctly allocate smth in tenured:
509 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
510 // Reset tenured regions:
511 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
512 // Check that we can still correctly allocate smth in tenured:
513 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
514 }
515
TEST_F(RegionAllocatorTest,AllocateAndCompactTenuredObjectsViaMarkedBitmap)516 TEST_F(RegionAllocatorTest, AllocateAndCompactTenuredObjectsViaMarkedBitmap)
517 {
518 static constexpr size_t ALLOCATION_COUNT = 7000;
519 static constexpr size_t MARKED_OBJECTS_RATE = 2;
520 mem::MemStatsType memStats;
521 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
522 false);
523 PandaVector<Region *> regionsVector;
524 size_t markedTenuredObjectCount = 0;
525 // Allocate some objects (young and tenured) in allocator
526 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
527 void *mem = AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator);
528 Region *region = allocator.GetRegion(static_cast<ObjectHeader *>(mem));
529 if (std::find(regionsVector.begin(), regionsVector.end(), region) == regionsVector.end()) {
530 regionsVector.insert(regionsVector.begin(), region);
531 }
532 if (i % MARKED_OBJECTS_RATE != 0) {
533 region->SetMarkBit(static_cast<ObjectHeader *>(mem));
534 markedTenuredObjectCount++;
535 }
536 ASSERT_TRUE(mem != nullptr);
537 }
538 ASSERT_TRUE(regionsVector.size() > 1);
539 ASSERT_EQ(allocator.GetAllSpecificRegions<RegionFlag::IS_OLD>().size(), regionsVector.size());
540 // Iterate over some tenured regions and compact them:
541 allocator.ClearCurrentRegion<RegionFlag::IS_OLD>();
542 size_t objectFound = 0;
543 allocator.CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD, true>(
544 regionsVector, []([[maybe_unused]] ObjectHeader *object) { return ObjectStatus::ALIVE_OBJECT; },
545 [&objectFound]([[maybe_unused]] ObjectHeader *from, [[maybe_unused]] ObjectHeader *to) { ++objectFound; });
546 ASSERT_EQ(objectFound, markedTenuredObjectCount);
547 objectFound = 0;
548 allocator.IterateOverObjects([&objectFound](ObjectHeader *object) {
549 (void)object;
550 objectFound++;
551 });
552 ASSERT_EQ(objectFound, ALLOCATION_COUNT + markedTenuredObjectCount);
553 allocator.ResetSeveralSpecificRegions<RegionFlag::IS_OLD, RegionSpace::ReleaseRegionsPolicy::NoRelease,
554 OSPagesPolicy::IMMEDIATE_RETURN, false>(regionsVector);
555 // Check that we have the same object amount.
556 objectFound = 0;
557 allocator.IterateOverObjects([&objectFound](ObjectHeader *object) {
558 (void)object;
559 objectFound++;
560 });
561 ASSERT_EQ(objectFound, markedTenuredObjectCount);
562 // Check that we can still correctly allocate smth in tenured:
563 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
564 // Reset tenured regions:
565 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
566 // Check that we can still correctly allocate smth in tenured:
567 ASSERT_TRUE(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator) != nullptr);
568 }
569
TEST_F(RegionAllocatorTest,AsanTest)570 TEST_F(RegionAllocatorTest, AsanTest)
571 {
572 static constexpr size_t ALLOCATION_COUNT = 100;
573 static constexpr size_t TENURED_OBJECTS_CREATION_RATE = 4;
574 mem::MemStatsType memStats;
575 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, TEST_REGION_SPACE_SIZE,
576 false);
577 std::vector<void *> youngObjects;
578 std::vector<void *> oldObjects;
579 // Allocate some objects (young and tenured) in allocator
580 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
581 if (i % TENURED_OBJECTS_CREATION_RATE == 0) {
582 oldObjects.push_back(AllocateObjectWithClass<RegionFlag::IS_OLD>(allocator));
583 } else {
584 youngObjects.push_back(AllocateObjectWithClass<RegionFlag::IS_EDEN>(allocator));
585 }
586 }
587 // Iterate over young objects and move them into tenured:
588 allocator.CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(
589 [&](ObjectHeader *object) {
590 (void)object;
591 return ObjectStatus::ALIVE_OBJECT;
592 },
593 []([[maybe_unused]] ObjectHeader *src, [[maybe_unused]] ObjectHeader *dst) {});
594 allocator.ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
595 for (auto i : youngObjects) {
596 #ifdef PANDA_ASAN_ON
597 EXPECT_DEATH(DeathWriteUint64(i), "") << "Write " << sizeof(uint64_t) << " bytes at address " << std::hex << i;
598 #else
599 (void)i;
600 #endif // PANDA_ASAN_ON
601 }
602 allocator.ResetAllSpecificRegions<RegionFlag::IS_OLD>();
603 for (auto i : oldObjects) {
604 #ifdef PANDA_ASAN_ON
605 EXPECT_DEATH(DeathWriteUint64(i), "") << "Write " << sizeof(uint64_t) << " bytes at address " << std::hex << i;
606 #else
607 (void)i;
608 #endif // PANDA_ASAN_ON
609 }
610 }
611
TEST_F(RegionAllocatorTest,MTAllocTest)612 TEST_F(RegionAllocatorTest, MTAllocTest)
613 {
614 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
615 // We have an issue with QEMU during MT tests. Issue 2852
616 static constexpr size_t THREADS_COUNT = 1;
617 #else
618 static constexpr size_t THREADS_COUNT = 10;
619 #endif
620 static constexpr size_t MIN_MT_ALLOC_SIZE = 16;
621 static constexpr size_t MAX_MT_ALLOC_SIZE = 256;
622 static constexpr size_t MIN_ELEMENTS_COUNT = 500;
623 static constexpr size_t MAX_ELEMENTS_COUNT = 1000;
624 static constexpr size_t MT_TEST_RUN_COUNT = 20;
625 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
626 mem::MemStatsType memStats;
627 // NOLINTNEXTLINE(readability-magic-numbers)
628 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 128U,
629 true);
630 MtAllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
631 MAX_ELEMENTS_COUNT);
632 }
633 }
634
TEST_F(RegionAllocatorTest,MTAllocLargeTest)635 TEST_F(RegionAllocatorTest, MTAllocLargeTest)
636 {
637 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
638 // We have an issue with QEMU during MT tests. Issue 2852
639 static constexpr size_t THREADS_COUNT = 1;
640 #else
641 static constexpr size_t THREADS_COUNT = 10;
642 #endif
643 static constexpr size_t MIN_MT_ALLOC_SIZE = 128;
644 static constexpr size_t MAX_MT_ALLOC_SIZE = NonObjectRegionAllocator::GetMaxRegularObjectSize() * 3U;
645 static constexpr size_t MIN_ELEMENTS_COUNT = 10;
646 static constexpr size_t MAX_ELEMENTS_COUNT = 30;
647 static constexpr size_t MT_TEST_RUN_COUNT = 20;
648 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
649 mem::MemStatsType memStats;
650 // NOLINTNEXTLINE(readability-magic-numbers)
651 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, RegionSize() * 256U,
652 true);
653 MtAllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
654 MAX_ELEMENTS_COUNT);
655 }
656 }
657
TEST_F(RegionAllocatorTest,ConcurrentAllocRegular)658 TEST_F(RegionAllocatorTest, ConcurrentAllocRegular)
659 {
660 mem::MemStatsType memStats;
661
662 constexpr size_t SPACE_SIZE = RegionSize() * 1024U;
663 NonObjectRegionAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_OBJECT, SPACE_SIZE, true);
664
665 auto allocateObjects = [&allocator, this](std::vector<void *> &vec) {
666 constexpr size_t ITERATIONS = 100'500;
667 constexpr size_t OBJ_SIZE = DEFAULT_REGION_SIZE - Region::HeadSize();
668 vec.reserve(ITERATIONS);
669 [[maybe_unused]] volatile size_t cnt = 0;
670 for (size_t i = 0; i < ITERATIONS; ++i) {
671 // We need this loop to make the test longer and split threads on physical cores
672 // and increase the probability of the test failure
673 for (size_t j = 0; j < ITERATIONS; ++j) {
674 cnt += j;
675 }
676 void *mem = AllocateYoungRegular(allocator, OBJ_SIZE);
677 if (mem == nullptr) {
678 break;
679 }
680 vec.push_back(mem);
681 }
682 };
683
684 std::vector<void *> vec1;
685 std::vector<void *> vec2;
686
687 std::thread worker([&allocateObjects, &vec1] {
688 os::CpuAffinityManager::SetAffinityForCurrentThread(os::CpuPower::WEAK);
689 allocateObjects(vec1);
690 });
691
692 os::CpuAffinityManager::SetAffinityForCurrentThread(os::CpuPower::BEST);
693 allocateObjects(vec2);
694
695 worker.join();
696
697 for (auto &elem1 : vec1) {
698 for (auto &elem2 : vec2) {
699 ASSERT_TRUE(elem1 != elem2);
700 }
701 }
702 }
703
704 using RegionNonmovableObjectAllocator =
705 RegionRunslotsAllocator<ObjectAllocConfigWithCrossingMap, RegionAllocatorLockConfig::CommonLock>;
706 class RegionNonmovableObjectAllocatorTest : public RegionAllocatorTestBase<RegionNonmovableObjectAllocator, false> {};
707
708 using RegionNonmovableLargeObjectAllocator =
709 RegionFreeListAllocator<ObjectAllocConfigWithCrossingMap, RegionAllocatorLockConfig::CommonLock>;
710 class RegionNonmovableLargeObjectAllocatorTest
711 : public RegionAllocatorTestBase<RegionNonmovableLargeObjectAllocator, false> {};
712
TEST_F(RegionNonmovableObjectAllocatorTest,AllocatorTest)713 TEST_F(RegionNonmovableObjectAllocatorTest, AllocatorTest)
714 {
715 mem::MemStatsType memStats;
716 RegionNonmovableObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
717 for (uint32_t i = 8; i <= RegionNonmovableObjectAllocator::GetMaxSize(); i++) {
718 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
719 }
720 }
721
TEST_F(RegionNonmovableObjectAllocatorTest,MTAllocatorTest)722 TEST_F(RegionNonmovableObjectAllocatorTest, MTAllocatorTest)
723 {
724 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
725 // We have an issue with QEMU during MT tests. Issue 2852
726 static constexpr size_t THREADS_COUNT = 1;
727 #else
728 static constexpr size_t THREADS_COUNT = 10;
729 #endif
730 static constexpr size_t MIN_MT_ALLOC_SIZE = 8;
731 static constexpr size_t MAX_MT_ALLOC_SIZE = RegionNonmovableObjectAllocator::GetMaxSize();
732 static constexpr size_t MIN_ELEMENTS_COUNT = 200;
733 static constexpr size_t MAX_ELEMENTS_COUNT = 300;
734 static constexpr size_t MT_TEST_RUN_COUNT = 20;
735 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
736 mem::MemStatsType memStats;
737 RegionNonmovableObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
738 MtAllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
739 MAX_ELEMENTS_COUNT);
740 // region is allocated in allocator, so don't free it explicitly
741 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
742 }
743 }
744
TEST_F(RegionNonmovableLargeObjectAllocatorTest,AllocatorTest)745 TEST_F(RegionNonmovableLargeObjectAllocatorTest, AllocatorTest)
746 {
747 mem::MemStatsType memStats;
748 RegionNonmovableLargeObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
749 size_t startObjectSize = RegionNonmovableObjectAllocator::GetMaxSize() + 1;
750 // NOLINTNEXTLINE(readability-magic-numbers)
751 for (uint32_t i = startObjectSize; i <= startObjectSize + 200U; i++) {
752 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
753 }
754 ASSERT_TRUE(allocator.Alloc(RegionNonmovableLargeObjectAllocator::GetMaxSize() - 1) != nullptr);
755 ASSERT_TRUE(allocator.Alloc(RegionNonmovableLargeObjectAllocator::GetMaxSize()) != nullptr);
756 }
757
TEST_F(RegionNonmovableLargeObjectAllocatorTest,MTAllocatorTest)758 TEST_F(RegionNonmovableLargeObjectAllocatorTest, MTAllocatorTest)
759 {
760 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
761 // We have an issue with QEMU during MT tests. Issue 2852
762 static constexpr size_t THREADS_COUNT = 1;
763 #else
764 static constexpr size_t THREADS_COUNT = 10;
765 #endif
766 static constexpr size_t MIN_MT_ALLOC_SIZE = RegionNonmovableObjectAllocator::GetMaxSize() + 1;
767 static constexpr size_t MAX_MT_ALLOC_SIZE = RegionNonmovableLargeObjectAllocator::GetMaxSize();
768 static constexpr size_t MIN_ELEMENTS_COUNT = 10;
769 static constexpr size_t MAX_ELEMENTS_COUNT = 20;
770 static constexpr size_t MT_TEST_RUN_COUNT = 20;
771 for (size_t i = 0; i < MT_TEST_RUN_COUNT; i++) {
772 mem::MemStatsType memStats;
773 RegionNonmovableLargeObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
774 MtAllocTest<MIN_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
775 MAX_ELEMENTS_COUNT);
776 // region is allocated in allocator, so don't free it explicitly
777 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
778 }
779 }
780
TEST_F(RegionNonmovableLargeObjectAllocatorTest,MemStatsAllocatorTest)781 TEST_F(RegionNonmovableLargeObjectAllocatorTest, MemStatsAllocatorTest)
782 {
783 mem::MemStatsType memStats;
784 RegionNonmovableLargeObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
785 static constexpr size_t ALLOC_SIZE = 128;
786 void *mem = nullptr;
787
788 auto objectAllocatedSize = memStats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
789 ASSERT_TRUE(objectAllocatedSize == 0);
790 mem = allocator.Alloc(ALLOC_SIZE);
791 ASSERT_TRUE(mem != nullptr);
792 auto objectAllocatedSize1 = memStats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
793 ASSERT_EQ(memStats.GetTotalObjectsAllocated(), 1);
794 ASSERT_TRUE(objectAllocatedSize1 != 0);
795
796 mem = allocator.Alloc(ALLOC_SIZE);
797 ASSERT_TRUE(mem != nullptr);
798 auto objectAllocatedSize2 = memStats.GetAllocated(SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
799 ASSERT_EQ(memStats.GetTotalObjectsAllocated(), 2U);
800 ASSERT_EQ(objectAllocatedSize2, objectAllocatedSize1 + objectAllocatedSize1);
801 }
802
803 using RegionHumongousObjectAllocator =
804 RegionHumongousAllocator<ObjectAllocConfig, RegionAllocatorLockConfig::CommonLock>;
805 class RegionHumongousObjectAllocatorTest : public RegionAllocatorTestBase<RegionHumongousObjectAllocator, false> {
806 protected:
__anon1251e0d51402(PandaVector<Region *> ®ions) 807 static constexpr auto REGION_VISITOR = []([[maybe_unused]] PandaVector<Region *> ®ions) {};
808 };
809
TEST_F(RegionHumongousObjectAllocatorTest,AllocatorTest)810 TEST_F(RegionHumongousObjectAllocatorTest, AllocatorTest)
811 {
812 static constexpr size_t MAX_ALLOC_SIZE = 5_MB;
813 static constexpr size_t ALLOC_COUNT = 20;
814 mem::MemStatsType memStats;
815 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
816 for (uint32_t i = MAX_ALLOC_SIZE / ALLOC_COUNT; i <= MAX_ALLOC_SIZE; i += MAX_ALLOC_SIZE / ALLOC_COUNT) {
817 ASSERT_TRUE(allocator.Alloc(i) != nullptr);
818 }
819 }
820
TEST_F(RegionHumongousObjectAllocatorTest,MTAllocatorTest)821 TEST_F(RegionHumongousObjectAllocatorTest, MTAllocatorTest)
822 {
823 #if defined(PANDA_TARGET_ARM64) || defined(PANDA_TARGET_32)
824 // We have an issue with QEMU during MT tests. Issue 2852
825 static constexpr size_t THREADS_COUNT = 1;
826 #else
827 static constexpr size_t THREADS_COUNT = 5;
828 #endif
829 static constexpr size_t MIN_MT_ALLOC_SIZE = DEFAULT_REGION_SIZE;
830 static constexpr size_t MAX_MT_ALLOC_SIZE = 1_MB;
831 static constexpr size_t MIN_ELEMENTS_COUNT = 20;
832 static constexpr size_t MAX_ELEMENTS_COUNT = 30;
833 // Test with DEFAULT_REGION_SIZE
834 {
835 mem::MemStatsType memStats;
836 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
837 MtAllocTest<MIN_MT_ALLOC_SIZE, MIN_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
838 MAX_ELEMENTS_COUNT);
839 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
840 }
841 // Test with 1Mb
842 {
843 mem::MemStatsType memStats;
844 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
845 MtAllocTest<MAX_MT_ALLOC_SIZE, MAX_MT_ALLOC_SIZE, THREADS_COUNT>(&allocator, MIN_ELEMENTS_COUNT,
846 MAX_ELEMENTS_COUNT);
847 allocator.VisitAndRemoveAllPools([]([[maybe_unused]] void *mem, [[maybe_unused]] size_t size) {});
848 }
849 }
850
TEST_F(RegionHumongousObjectAllocatorTest,CollectTest)851 TEST_F(RegionHumongousObjectAllocatorTest, CollectTest)
852 {
853 static constexpr size_t MIN_ALLOC_SIZE = 1_MB;
854 static constexpr size_t MAX_ALLOC_SIZE = 9_MB;
855 static constexpr size_t ALLOCATION_COUNT = 50;
856 std::vector<void *> allocatedElements;
857 mem::MemStatsType memStats;
858 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
859 size_t currentAllocSize = MIN_ALLOC_SIZE;
860 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
861 auto *classLinkerExt = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
862 for (size_t i = 0; i < ALLOCATION_COUNT; i++) {
863 auto testClass = classLinkerExt->CreateClass(nullptr, 0, 0, sizeof(ark::Class));
864 testClass->SetObjectSize(currentAllocSize);
865 void *mem = allocator.Alloc(currentAllocSize);
866 ASSERT_TRUE(mem != nullptr);
867 allocatedElements.push_back(mem);
868 auto object = static_cast<ObjectHeader *>(mem);
869 object->SetClass(testClass);
870 currentAllocSize += ((MAX_ALLOC_SIZE - MIN_ALLOC_SIZE) / ALLOCATION_COUNT);
871 }
872 static std::set<void *> foundedElements;
873 static auto deleteAll = [](ObjectHeader *object) {
874 foundedElements.insert(object);
875 return ObjectStatus::ALIVE_OBJECT;
876 };
877 // Collect all objects into unordered_set via allocator's method
878 allocator.CollectAndRemoveFreeRegions(REGION_VISITOR, deleteAll);
879 for (auto i : allocatedElements) {
880 auto element = foundedElements.find(i);
881 ASSERT_TRUE(element != foundedElements.end());
882 foundedElements.erase(element);
883 }
884 ASSERT_TRUE(foundedElements.empty());
885 }
886
TEST_F(RegionHumongousObjectAllocatorTest,TestCollectAliveObject)887 TEST_F(RegionHumongousObjectAllocatorTest, TestCollectAliveObject)
888 {
889 mem::MemStatsType memStats;
890 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
891 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
892 auto *classLinkerExt = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
893 auto testClass = classLinkerExt->CreateClass(nullptr, 0, 0, sizeof(ark::Class));
894 size_t objectSize = DEFAULT_REGION_SIZE + 1;
895 testClass->SetObjectSize(objectSize);
896 void *mem = allocator.Alloc(objectSize);
897 ASSERT_TRUE(mem != nullptr);
898 auto object = static_cast<ObjectHeader *>(mem);
899 object->SetClass(testClass);
900 Region *region = ObjectToRegion(object);
901
902 allocator.CollectAndRemoveFreeRegions(REGION_VISITOR, [](ObjectHeader *) { return ObjectStatus::ALIVE_OBJECT; });
903 bool hasRegion = false;
904 allocator.GetSpace()->IterateRegions([region, &hasRegion](Region *r) { hasRegion |= region == r; });
905 ASSERT_TRUE(hasRegion);
906 ASSERT(!region->HasFlag(RegionFlag::IS_FREE));
907 }
908
TEST_F(RegionHumongousObjectAllocatorTest,TestCollectDeadObject)909 TEST_F(RegionHumongousObjectAllocatorTest, TestCollectDeadObject)
910 {
911 mem::MemStatsType memStats;
912 RegionHumongousObjectAllocator allocator(&memStats, &spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
913 auto lang = Runtime::GetCurrent()->GetLanguageContext(panda_file::SourceLang::PANDA_ASSEMBLY);
914 auto *classLinkerExt = Runtime::GetCurrent()->GetClassLinker()->GetExtension(lang);
915 auto testClass = classLinkerExt->CreateClass(nullptr, 0, 0, sizeof(ark::Class));
916 size_t objectSize = DEFAULT_REGION_SIZE + 1;
917 testClass->SetObjectSize(objectSize);
918 void *mem = allocator.Alloc(objectSize);
919 ASSERT_TRUE(mem != nullptr);
920 auto object = static_cast<ObjectHeader *>(mem);
921 object->SetClass(testClass);
922 Region *region = ObjectToRegion(object);
923
924 allocator.CollectAndRemoveFreeRegions(REGION_VISITOR, [](ObjectHeader *) { return ObjectStatus::DEAD_OBJECT; });
925 bool hasRegion = false;
926 allocator.GetSpace()->IterateRegions([region, &hasRegion](Region *r) { hasRegion |= region == r; });
927 ASSERT_TRUE(!hasRegion);
928 }
929
930 } // namespace ark::mem::test
931