1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
17
18 #include <atomic>
19 #include <cstdint>
20
21 #include "runtime/mem/region_space.h"
22
23 namespace panda {
24 class ManagedThread;
25 struct GCTask;
26 } // namespace panda
27
28 namespace panda::mem {
29
30 class RegionAllocatorLockConfig {
31 public:
32 using CommonLock = os::memory::Mutex;
33 using DummyLock = os::memory::DummyLock;
34 };
35
36 using RegionsVisitor = std::function<void(PandaVector<Region *> &vector)>;
37
38 /**
39 * Return the region which corresponds to the start of the object.
40 */
ObjectToRegion(const ObjectHeader * object)41 static inline Region *ObjectToRegion(const ObjectHeader *object)
42 {
43 auto *region = reinterpret_cast<Region *>(((ToUintPtr(object)) & ~DEFAULT_REGION_MASK));
44 // Getting region by object is a bit operation and TSAN doesn't
45 // sees the relation between region creation and region access.
46 // This annotation suggests TSAN that this code always executes after
47 // the region gets created.
48 // See the corresponding annotation in RegionAllocatorBase::CreateAndSetUpNewRegion
49 TSAN_ANNOTATE_HAPPENS_AFTER(region);
50 return region;
51 }
52
53 /**
54 * Return the region which corresponds to the address.
55 */
AddrToRegion(const void * addr)56 static inline Region *AddrToRegion(const void *addr)
57 {
58 auto mem_pool = PoolManager::GetMmapMemPool();
59 auto space = mem_pool->GetSpaceTypeForAddr(addr);
60 if (space == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT) {
61 ASSERT(PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(addr) == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
62
63 return Region::AddrToRegion<true>(addr, DEFAULT_REGION_MASK);
64 }
65 return Region::AddrToRegion<false>(addr, DEFAULT_REGION_MASK);
66 }
67
68 template <typename LockConfigT>
69 class RegionAllocatorBase {
70 public:
71 NO_MOVE_SEMANTIC(RegionAllocatorBase);
72 NO_COPY_SEMANTIC(RegionAllocatorBase);
73
74 explicit RegionAllocatorBase(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type,
75 AllocatorType allocator_type, size_t init_space_size, bool extend, size_t region_size);
76 explicit RegionAllocatorBase(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type,
77 AllocatorType allocator_type, RegionPool *shared_region_pool);
78
~RegionAllocatorBase()79 virtual ~RegionAllocatorBase()
80 {
81 ClearRegionsPool();
82 }
83
GetRegion(const ObjectHeader * object)84 Region *GetRegion(const ObjectHeader *object) const
85 {
86 return region_space_.GetRegion(object);
87 }
88
GetSpace()89 RegionSpace *GetSpace()
90 {
91 return ®ion_space_;
92 }
93
GetSpace()94 const RegionSpace *GetSpace() const
95 {
96 return ®ion_space_;
97 }
98
99 PandaVector<Region *> GetAllRegions();
100
101 protected:
ClearRegionsPool()102 void ClearRegionsPool()
103 {
104 region_space_.FreeAllRegions();
105 if (init_block_.GetMem() != nullptr) {
106 spaces_->FreeSharedPool(init_block_.GetMem(), init_block_.GetSize());
107 init_block_ = NULLPOOL;
108 }
109 }
110
AllocRegion(size_t region_size,RegionFlag eden_or_old_or_nonmovable,RegionFlag properties)111 Region *AllocRegion(size_t region_size, RegionFlag eden_or_old_or_nonmovable, RegionFlag properties)
112 {
113 return region_space_.NewRegion(region_size, eden_or_old_or_nonmovable, properties);
114 }
115
GetSpaceType()116 SpaceType GetSpaceType() const
117 {
118 return space_type_;
119 }
120
121 template <typename AllocConfigT>
122 Region *CreateAndSetUpNewRegion(size_t region_size, RegionFlag region_type, RegionFlag properties = IS_UNUSED)
123 REQUIRES(region_lock_);
124
125 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
126 LockConfigT region_lock_;
127 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
128 MemStatsType *mem_stats_;
129 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
130 SpaceType space_type_;
131 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
132 GenerationalSpaces *spaces_;
133 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
134 RegionPool region_pool_; // self created pool, only used by this allocator
135 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
136 RegionSpace region_space_; // the target region space used by this allocator
137 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
138 Pool init_block_; // the initial memory block for region allocation
139 };
140
141 /**
142 * \brief A region-based bump-pointer allocator.
143 */
144 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
145 class RegionAllocator final : public RegionAllocatorBase<LockConfigT> {
146 public:
147 static constexpr bool USE_PARTIAL_TLAB = true;
148 static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
149
150 NO_MOVE_SEMANTIC(RegionAllocator);
151 NO_COPY_SEMANTIC(RegionAllocator);
152
153 /**
154 * \brief Create new region allocator
155 * @param mem_stats - memory statistics
156 * @param space_type - space type
157 * @param init_space_size - initial continuous space size, 0 means no need for initial space
158 * @param extend - true means that will allocate more regions from mmap pool if initial space is not enough
159 */
160 explicit RegionAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces,
161 SpaceType space_type = SpaceType::SPACE_TYPE_OBJECT, size_t init_space_size = 0,
162 bool extend = true);
163
164 /**
165 * \brief Create new region allocator with shared region pool specified
166 * @param mem_stats - memory statistics
167 * @param space_type - space type
168 * @param shared_region_pool - a shared region pool that can be reused by multi-spaces
169 */
170 explicit RegionAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type,
171 RegionPool *shared_region_pool);
172
173 ~RegionAllocator() override = default;
174
175 template <RegionFlag region_type = RegionFlag::IS_EDEN, bool update_memstats = true>
176 void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
177
178 template <typename T>
AllocArray(size_t arr_length)179 T *AllocArray(size_t arr_length)
180 {
181 return static_cast<T *>(Alloc(sizeof(T) * arr_length));
182 }
183
Free(void * mem)184 void Free([[maybe_unused]] void *mem) {}
185
186 /**
187 * \brief Create new region allocator as thread local allocator buffer.
188 * @param thread - pointer to thread
189 * @param size - required size of tlab
190 * @return newly allocated TLAB, TLAB is set to Empty is allocation failed.
191 */
192 TLAB *CreateNewTLAB(panda::ManagedThread *thread, size_t size = GetMaxRegularObjectSize());
193
194 /**
195 * \brief Iterates over all objects allocated by this allocator.
196 * @param visitor - function pointer or functor
197 */
198 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)199 void IterateOverObjects(const ObjectVisitor &visitor)
200 {
201 this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
202 }
203
204 template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)205 void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
206 {
207 this->GetSpace()->IterateRegions([&](Region *region) {
208 if (region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
209 region->IterateOverObjects([&visitor, begin, end](ObjectHeader *obj) {
210 if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
211 visitor(obj);
212 }
213 });
214 }
215 });
216 }
217
218 template <bool include_current_region>
219 PandaVector<Region *> GetTopGarbageRegions(size_t region_count);
220
221 /**
222 * Return a vector of all regions with the specific type.
223 * @tparam regions_type - type of regions needed to proceed.
224 * @return vector of all regions with the /param regions_type type
225 */
226 template <RegionFlag regions_type>
227 PandaVector<Region *> GetAllSpecificRegions();
228
229 /**
230 * Iterate over all regions with type /param regions_type_from
231 * and move all alive objects to the regions with type /param regions_type_to.
232 * NOTE: /param regions_type_from and /param regions_type_to can't be equal.
233 * @tparam regions_type_from - type of regions needed to proceed.
234 * @tparam regions_type_to - type of regions to which we want to move all alive objects.
235 * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
236 * @param death_checker - checker what will return objects status for iterated object.
237 * @param move_handler - called for every moved object
238 * can be used as a simple visitor if we enable /param use_marked_bitmap
239 */
240 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap = false>
241 void CompactAllSpecificRegions(const GCObjectVisitor &death_checker, const ObjectVisitorEx &move_handler);
242
243 template <RegionFlag region_type>
ClearCurrentRegion()244 void ClearCurrentRegion()
245 {
246 ResetCurrentRegion<false, region_type>();
247 }
248
249 /**
250 * Iterate over specific regions from vector
251 * and move all alive objects to the regions with type /param regions_type_to.
252 * @tparam regions_type_from - type of regions needed to proceed.
253 * @tparam regions_type_to - type of regions to which we want to move all alive objects.
254 * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
255 * @param regions - vector of regions needed to proceed.
256 * @param death_checker - checker what will return objects status for iterated object.
257 * @param move_handler - called for every moved object
258 * can be used as a simple visitor if we enable /param use_marked_bitmap
259 */
260 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap = false>
261 void CompactSeveralSpecificRegions(const PandaVector<Region *> ®ions, const GCObjectVisitor &death_checker,
262 const ObjectVisitorEx &move_handler);
263
264 /**
265 * Iterate over specific region
266 * and move all alive objects to the regions with type /param regions_type_to.
267 * @tparam regions_type_from - type of regions needed to proceed.
268 * @tparam regions_type_to - type of regions to which we want to move all alive objects.
269 * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
270 * @param region - region needed to proceed.
271 * @param death_checker - checker what will return objects status for iterated object.
272 * @param move_handler - called for every moved object
273 * can be used as a simple visitor if we enable /param use_marked_bitmap
274 */
275 template <RegionFlag regions_type_from, RegionFlag regions_type_to, bool use_marked_bitmap = false>
276 void CompactSpecificRegion(Region *regions, const GCObjectVisitor &death_checker,
277 const ObjectVisitorEx &move_handler);
278
279 template <bool use_marked_bitmap = false>
280 void PromoteYoungRegion(Region *region, const GCObjectVisitor &death_checker,
281 const ObjectVisitor &alive_objects_handler);
282
283 /**
284 * Reset all regions with type /param regions_type.
285 * @tparam regions_type - type of regions needed to proceed.
286 */
287 template <RegionFlag regions_type>
288 void ResetAllSpecificRegions();
289
290 /**
291 * Reset regions from vector.
292 * @tparam regions_type - type of regions needed to proceed.
293 * @param regions - vector of regions needed to proceed.
294 */
295 template <RegionFlag regions_type>
296 void ResetSeveralSpecificRegions(const PandaVector<Region *> ®ions);
297
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)298 void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &mem_visitor)
299 {
300 this->ClearRegionsPool();
301 }
302
GetMaxRegularObjectSize()303 constexpr static size_t GetMaxRegularObjectSize()
304 {
305 return REGION_SIZE - AlignUp(sizeof(Region), DEFAULT_ALIGNMENT_IN_BYTES);
306 }
307
ContainObject(const ObjectHeader * object)308 bool ContainObject(const ObjectHeader *object) const
309 {
310 return this->GetSpace()->ContainObject(object);
311 }
312
IsLive(const ObjectHeader * object)313 bool IsLive(const ObjectHeader *object) const
314 {
315 return this->GetSpace()->IsLive(object);
316 }
317
GetAllocatorType()318 static constexpr AllocatorType GetAllocatorType()
319 {
320 return AllocatorType::REGION_ALLOCATOR;
321 }
322
323 private:
324 template <bool atomic = true, RegionFlag region_type>
GetCurrentRegion()325 Region *GetCurrentRegion()
326 {
327 Region **cur_region = GetCurrentRegionPointerUnsafe<region_type>();
328 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
329 if constexpr (atomic) {
330 // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
331 // constraints imposed on other reads or writes
332 return reinterpret_cast<std::atomic<Region *> *>(cur_region)->load(std::memory_order_relaxed);
333 // NOLINTNEXTLINE(readability-misleading-indentation)
334 }
335 return *cur_region;
336 }
337
338 template <bool atomic = true, RegionFlag region_type>
SetCurrentRegion(Region * region)339 void SetCurrentRegion(Region *region)
340 {
341 Region **cur_region = GetCurrentRegionPointerUnsafe<region_type>();
342 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
343 if constexpr (atomic) {
344 // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
345 // constraints imposed on other reads or writes
346 reinterpret_cast<std::atomic<Region *> *>(cur_region)->store(region, std::memory_order_relaxed);
347 // NOLINTNEXTLINE(readability-misleading-indentation)
348 } else {
349 *cur_region = region;
350 }
351 }
352
353 template <RegionFlag region_type>
GetCurrentRegionPointerUnsafe()354 Region **GetCurrentRegionPointerUnsafe()
355 {
356 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
357 if constexpr (region_type == RegionFlag::IS_EDEN) {
358 return &eden_current_region_;
359 }
360 UNREACHABLE();
361 return nullptr;
362 }
363
364 template <bool atomic = true, RegionFlag region_type>
ResetCurrentRegion()365 void ResetCurrentRegion()
366 {
367 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
368 if constexpr (region_type == RegionFlag::IS_EDEN) {
369 SetCurrentRegion<atomic, region_type>(&full_region_);
370 return;
371 }
372 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
373 if constexpr (region_type == RegionFlag::IS_OLD) {
374 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
375 if constexpr (atomic) {
376 os::memory::LockHolder lock(*GetQueueLock<region_type>());
377 GetRegionQueuePointer<region_type>()->clear();
378 return;
379 }
380 GetRegionQueuePointer<region_type>()->clear();
381 return;
382 }
383 UNREACHABLE();
384 }
385
386 template <bool atomic = true, RegionFlag region_type>
IsInCurrentRegion(Region * region)387 bool IsInCurrentRegion(Region *region)
388 {
389 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
390 if constexpr (region_type == RegionFlag::IS_EDEN) {
391 return GetCurrentRegion<atomic, region_type>() == region;
392 }
393 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
394 if constexpr (region_type == RegionFlag::IS_OLD) {
395 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
396 if constexpr (atomic) {
397 os::memory::LockHolder lock(*GetQueueLock<region_type>());
398 for (auto i : *GetRegionQueuePointer<region_type>()) {
399 if (i == region) {
400 return true;
401 }
402 }
403 return false;
404 }
405 for (auto i : *GetRegionQueuePointer<region_type>()) {
406 if (i == region) {
407 return true;
408 }
409 }
410 return false;
411 }
412 UNREACHABLE();
413 return false;
414 }
415
416 template <bool atomic = true, RegionFlag region_type>
PopFromRegionQueue()417 Region *PopFromRegionQueue()
418 {
419 PandaVector<Region *> *region_queue = GetRegionQueuePointer<region_type>();
420 Region *region = nullptr;
421 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
422 if constexpr (atomic) {
423 os::memory::LockHolder lock(*GetQueueLock<region_type>());
424 if (!region_queue->empty()) {
425 region = region_queue->back();
426 region_queue->pop_back();
427 }
428 return region;
429 // NOLINTNEXTLINE(readability-misleading-indentation)
430 }
431 if (!region_queue->empty()) {
432 region = region_queue->back();
433 region_queue->pop_back();
434 }
435 return region;
436 }
437
438 template <bool atomic = true, RegionFlag region_type>
PushToRegionQueue(Region * region)439 void PushToRegionQueue(Region *region)
440 {
441 PandaVector<Region *> *region_queue = GetRegionQueuePointer<region_type>();
442 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
443 if constexpr (atomic) {
444 os::memory::LockHolder lock(*GetQueueLock<region_type>());
445 region_queue->push_back(region);
446 return;
447 // NOLINTNEXTLINE(readability-misleading-indentation)
448 }
449 region_queue->push_back(region);
450 }
451
452 template <RegionFlag region_type>
GetQueueLock()453 os::memory::Mutex *GetQueueLock()
454 {
455 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
456 if constexpr (region_type == RegionFlag::IS_OLD) {
457 return &old_queue_lock;
458 }
459 UNREACHABLE();
460 return nullptr;
461 }
462
463 template <RegionFlag region_type>
GetRegionQueuePointer()464 PandaVector<Region *> *GetRegionQueuePointer()
465 {
466 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
467 if constexpr (region_type == RegionFlag::IS_OLD) {
468 return &old_region_queue_;
469 }
470 UNREACHABLE();
471 return nullptr;
472 }
473
474 template <RegionFlag region_type>
475 void *AllocRegular(size_t align_size);
476
477 Region full_region_;
478 Region *eden_current_region_;
479 os::memory::Mutex old_queue_lock;
480 PandaVector<Region *> old_region_queue_;
481 // To store partially used Regions that can be reused later.
482 panda::PandaMultiMap<size_t, Region *, std::greater<size_t>> retained_tlabs_;
483 friend class test::RegionAllocatorTest;
484 };
485
486 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
487 class RegionNonmovableAllocator final : public RegionAllocatorBase<LockConfigT> {
488 public:
489 static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
490
491 NO_MOVE_SEMANTIC(RegionNonmovableAllocator);
492 NO_COPY_SEMANTIC(RegionNonmovableAllocator);
493
494 explicit RegionNonmovableAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type,
495 size_t init_space_size = 0, bool extend = true);
496 explicit RegionNonmovableAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type,
497 RegionPool *shared_region_pool);
498
499 ~RegionNonmovableAllocator() override = default;
500
501 void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
502
503 void Free(void *mem);
504
505 void Collect(const GCObjectVisitor &death_checker);
506
507 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & obj_visitor)508 void IterateOverObjects(const ObjectVisitor &obj_visitor)
509 {
510 object_allocator_.IterateOverObjects(obj_visitor);
511 }
512
513 template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & mem_visitor,void * begin,void * end)514 void IterateOverObjectsInRange(const MemVisitor &mem_visitor, void *begin, void *end)
515 {
516 object_allocator_.IterateOverObjectsInRange(mem_visitor, begin, end);
517 }
518
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)519 void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &mem_visitor)
520 {
521 object_allocator_.VisitAndRemoveAllPools([this](void *mem, [[maybe_unused]] size_t size) {
522 auto *region = AddrToRegion(mem);
523 ASSERT(ToUintPtr(mem) + size == region->End());
524 this->GetSpace()->FreeRegion(region);
525 });
526 }
527
528 void VisitAndRemoveFreeRegions(const RegionsVisitor ®ion_visitor);
529
GetMaxSize()530 constexpr static size_t GetMaxSize()
531 {
532 // TODO(yxr) : get accurate max payload size in a freelist pool
533 return std::min(ObjectAllocator::GetMaxSize(), static_cast<size_t>(REGION_SIZE - 1_KB));
534 }
535
ContainObject(const ObjectHeader * object)536 bool ContainObject(const ObjectHeader *object) const
537 {
538 return object_allocator_.ContainObject(object);
539 }
540
IsLive(const ObjectHeader * object)541 bool IsLive(const ObjectHeader *object) const
542 {
543 ASSERT(this->GetRegion(object)->GetLiveBitmap() != nullptr);
544 return this->GetRegion(object)->GetLiveBitmap()->AtomicTest(const_cast<ObjectHeader *>(object));
545 }
546
547 private:
548 void *NewRegionAndRetryAlloc(size_t object_size, Alignment align);
549
550 mutable ObjectAllocator object_allocator_;
551 };
552
553 /**
554 * \brief A region-based humongous allocator.
555 */
556 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
557 class RegionHumongousAllocator final : public RegionAllocatorBase<LockConfigT> {
558 public:
559 static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
560
561 NO_MOVE_SEMANTIC(RegionHumongousAllocator);
562 NO_COPY_SEMANTIC(RegionHumongousAllocator);
563
564 /**
565 * \brief Create new humongous region allocator
566 * @param mem_stats - memory statistics
567 * @param space_type - space type
568 */
569 explicit RegionHumongousAllocator(MemStatsType *mem_stats, GenerationalSpaces *spaces, SpaceType space_type);
570
571 ~RegionHumongousAllocator() override = default;
572
573 template <bool update_memstats = true>
574 void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
575
576 template <typename T>
AllocArray(size_t arr_length)577 T *AllocArray(size_t arr_length)
578 {
579 return static_cast<T *>(Alloc(sizeof(T) * arr_length));
580 }
581
Free(void * mem)582 void Free([[maybe_unused]] void *mem) {}
583
584 void CollectAndRemoveFreeRegions(const RegionsVisitor ®ion_visitor, const GCObjectVisitor &death_checker);
585
586 /**
587 * \brief Iterates over all objects allocated by this allocator.
588 * @param visitor - function pointer or functor
589 */
590 template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)591 void IterateOverObjects(const ObjectVisitor &visitor)
592 {
593 this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
594 }
595
596 template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)597 void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
598 {
599 this->GetSpace()->IterateRegions([&](Region *region) {
600 if (region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
601 region->IterateOverObjects([&visitor, begin, end](ObjectHeader *obj) {
602 if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
603 visitor(obj);
604 }
605 });
606 }
607 });
608 }
609
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)610 void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &mem_visitor)
611 {
612 this->ClearRegionsPool();
613 }
614
ContainObject(const ObjectHeader * object)615 bool ContainObject(const ObjectHeader *object) const
616 {
617 return this->GetSpace()->template ContainObject<true>(object);
618 }
619
IsLive(const ObjectHeader * object)620 bool IsLive(const ObjectHeader *object) const
621 {
622 return this->GetSpace()->template IsLive<true>(object);
623 }
624
625 private:
626 void ResetRegion(Region *region);
627 void Collect(Region *region, const GCObjectVisitor &death_checker);
628
629 // If we change this constant, we will increase fragmentation dramatically
630 static_assert(REGION_SIZE / PANDA_POOL_ALIGNMENT_IN_BYTES == 1);
631 friend class test::RegionAllocatorTest;
632 };
633
634 } // namespace panda::mem
635
636 #endif // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
637