• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2024-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "runtime/include/mem/allocator-inl.h"
17 #include "runtime/mem/gc/g1/g1-allocator.h"
18 #include "runtime/mem/freelist_allocator-inl.h"
19 #include "runtime/mem/humongous_obj_allocator-inl.h"
20 #include "runtime/mem/pygote_space_allocator-inl.h"
21 #include "runtime/mem/rem_set-inl.h"
22 #include "runtime/include/panda_vm.h"
23 
24 namespace ark::mem {
25 
26 template <MTModeT MT_MODE>
ObjectAllocatorG1(MemStatsType * memStats,bool createPygoteSpaceAllocator)27 ObjectAllocatorG1<MT_MODE>::ObjectAllocatorG1(MemStatsType *memStats, [[maybe_unused]] bool createPygoteSpaceAllocator)
28     : ObjectAllocatorGenBase(memStats, GCCollectMode::GC_ALL, false)
29 {
30     size_t reservedTenuredRegionsCount = Runtime::GetOptions().GetG1NumberOfTenuredRegionsAtMixedCollection();
31     objectAllocator_ = MakePandaUnique<ObjectAllocator>(memStats, &heapSpaces_, SpaceType::SPACE_TYPE_OBJECT, 0, true,
32                                                         reservedTenuredRegionsCount);
33     nonmovableAllocator_ =
34         MakePandaUnique<NonMovableAllocator>(memStats, &heapSpaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
35     humongousObjectAllocator_ =
36         MakePandaUnique<HumongousObjectAllocator>(memStats, &heapSpaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
37     memStats_ = memStats;
38 }
39 
40 template <MTModeT MT_MODE>
GetRegularObjectMaxSize()41 size_t ObjectAllocatorG1<MT_MODE>::GetRegularObjectMaxSize()
42 {
43     return ObjectAllocator::GetMaxRegularObjectSize();
44 }
45 
46 template <MTModeT MT_MODE>
GetLargeObjectMaxSize()47 size_t ObjectAllocatorG1<MT_MODE>::GetLargeObjectMaxSize()
48 {
49     return ObjectAllocator::GetMaxRegularObjectSize();
50 }
51 
52 template <MTModeT MT_MODE>
IsObjectInYoungSpace(const ObjectHeader * obj)53 bool ObjectAllocatorG1<MT_MODE>::IsObjectInYoungSpace(const ObjectHeader *obj)
54 {
55     Region *regWithObj = ObjectToRegion(obj);
56     ASSERT(regWithObj != nullptr);
57     return regWithObj->IsYoung();
58 }
59 
60 template <MTModeT MT_MODE>
IsIntersectedWithYoung(const MemRange & memRange)61 bool ObjectAllocatorG1<MT_MODE>::IsIntersectedWithYoung(const MemRange &memRange)
62 {
63     auto youngMemRanges = GetYoungSpaceMemRanges();
64     for (const auto &youngMemRange : youngMemRanges) {
65         if (youngMemRange.IsIntersect(memRange)) {
66             return true;
67         }
68     }
69     return false;
70 }
71 
72 template <MTModeT MT_MODE>
HasYoungSpace()73 bool ObjectAllocatorG1<MT_MODE>::HasYoungSpace()
74 {
75     return true;
76 }
77 
78 template <MTModeT MT_MODE>
GetYoungSpaceMemRanges()79 const std::vector<MemRange> &ObjectAllocatorG1<MT_MODE>::GetYoungSpaceMemRanges()
80 {
81     return GetYoungRanges();
82 }
83 
84 template <MTModeT MT_MODE>
GetYoungSpaceBitmaps()85 std::vector<MarkBitmap *> &ObjectAllocatorG1<MT_MODE>::GetYoungSpaceBitmaps()
86 {
87     return GetYoungBitmaps();
88 }
89 
90 template <MTModeT MT_MODE>
CreateNewTLAB(size_t tlabSize)91 TLAB *ObjectAllocatorG1<MT_MODE>::CreateNewTLAB([[maybe_unused]] size_t tlabSize)
92 {
93     TLAB *newTlab = nullptr;
94     if constexpr (MT_MODE == MT_MODE_SINGLE) {
95         // For single-threaded VMs allocate a whole region for TLAB
96         newTlab = objectAllocator_->CreateRegionSizeTLAB();
97     } else {
98         newTlab = objectAllocator_->CreateTLAB(tlabSize);
99     }
100     if (newTlab != nullptr) {
101         ASAN_UNPOISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
102         MemoryInitialize(newTlab->GetStartAddr(), newTlab->GetSize());
103         ASAN_POISON_MEMORY_REGION(newTlab->GetStartAddr(), newTlab->GetSize());
104     }
105     return newTlab;
106 }
107 
108 template <MTModeT MT_MODE>
GetTLABMaxAllocSize()109 size_t ObjectAllocatorG1<MT_MODE>::GetTLABMaxAllocSize()
110 {
111     if constexpr (MT_MODE == MT_MODE_SINGLE) {
112         // For single-threaded VMs we can allocate objects of size up to region size in TLABs.
113         return GetYoungAllocMaxSize();
114     } else {
115         if (Runtime::GetOptions().IsAdaptiveTlabSize()) {
116             return Runtime::GetOptions().GetMaxTlabSize();
117         }
118         return Runtime::GetOptions().GetInitTlabSize();
119     }
120 }
121 
122 template <MTModeT MT_MODE>
IterateOverObjectsInRange(MemRange memRange,const ObjectVisitor & objectVisitor)123 void ObjectAllocatorG1<MT_MODE>::IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor)
124 {
125     // we need ensure that the mem range related to a card must be located in one allocator
126     auto spaceType = PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(ToVoidPtr(memRange.GetStartAddress()));
127     switch (spaceType) {
128         case SpaceType::SPACE_TYPE_OBJECT:
129             objectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
130                                                         ToVoidPtr(memRange.GetEndAddress()));
131             break;
132         case SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT: {
133             if (pygoteSpaceAllocator_ != nullptr) {
134                 pygoteSpaceAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
135                                                                  ToVoidPtr(memRange.GetEndAddress()));
136             }
137             auto region = AddrToRegion(ToVoidPtr(memRange.GetStartAddress()));
138             region->GetLiveBitmap()->IterateOverMarkedChunkInRange(
139                 ToVoidPtr(memRange.GetStartAddress()), ToVoidPtr(memRange.GetEndAddress()),
140                 [&objectVisitor](void *mem) { objectVisitor(reinterpret_cast<ObjectHeader *>(mem)); });
141             break;
142         }
143         case SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT:
144             humongousObjectAllocator_->IterateOverObjectsInRange(objectVisitor, ToVoidPtr(memRange.GetStartAddress()),
145                                                                  ToVoidPtr(memRange.GetEndAddress()));
146             break;
147         default:
148             // if we reach this line, we may have an issue with multiVM CardTable iteration
149             UNREACHABLE();
150             break;
151     }
152 }
153 
154 // maybe ObjectAllocatorGen and ObjectAllocatorNoGen should have inheritance relationship
155 template <MTModeT MT_MODE>
ContainObject(const ObjectHeader * obj) const156 bool ObjectAllocatorG1<MT_MODE>::ContainObject(const ObjectHeader *obj) const
157 {
158     if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
159         return true;
160     }
161     if (objectAllocator_->ContainObject(obj)) {
162         return true;
163     }
164     if (nonmovableAllocator_->ContainObject(obj)) {
165         return true;
166     }
167     if (humongousObjectAllocator_->ContainObject(obj)) {
168         return true;
169     }
170 
171     return false;
172 }
173 
174 template <MTModeT MT_MODE>
IsLive(const ObjectHeader * obj)175 bool ObjectAllocatorG1<MT_MODE>::IsLive(const ObjectHeader *obj)
176 {
177     if (pygoteSpaceAllocator_ != nullptr && pygoteSpaceAllocator_->ContainObject(obj)) {
178         return pygoteSpaceAllocator_->IsLive(obj);
179     }
180     if (objectAllocator_->ContainObject(obj)) {
181         return objectAllocator_->IsLive(obj);
182     }
183     if (nonmovableAllocator_->ContainObject(obj)) {
184         return nonmovableAllocator_->IsLive(obj);
185     }
186     if (humongousObjectAllocator_->ContainObject(obj)) {
187         return humongousObjectAllocator_->IsLive(obj);
188     }
189     return false;
190 }
191 
192 template <MTModeT MT_MODE>
Allocate(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit,bool pinned)193 void *ObjectAllocatorG1<MT_MODE>::Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
194                                            ObjMemInitPolicy objInit, [[maybe_unused]] bool pinned)
195 {
196     void *mem = nullptr;
197     size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
198     if (LIKELY(alignedSize <= GetYoungAllocMaxSize())) {
199         mem = objectAllocator_->Alloc(size, align, pinned);
200     } else {
201         mem = humongousObjectAllocator_->Alloc(size, DEFAULT_ALIGNMENT);
202         // Humongous allocations have initialized memory by a default
203         return mem;
204     }
205     if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
206         ObjectMemoryInit(mem, size);
207     }
208     return mem;
209 }
210 
211 template <MTModeT MT_MODE>
AllocateNonMovable(size_t size,Alignment align,ark::ManagedThread * thread,ObjMemInitPolicy objInit)212 void *ObjectAllocatorG1<MT_MODE>::AllocateNonMovable(size_t size, Alignment align,
213                                                      [[maybe_unused]] ark::ManagedThread *thread,
214                                                      ObjMemInitPolicy objInit)
215 {
216     void *mem = nullptr;
217     // before pygote fork, allocate small non-movable objects in pygote space
218     if (UNLIKELY(IsPygoteAllocEnabled() && pygoteSpaceAllocator_->CanAllocNonMovable(size, align))) {
219         mem = pygoteSpaceAllocator_->Alloc(size, align);
220     } else {
221         size_t alignedSize = AlignUp(size, GetAlignmentInBytes(align));
222         if (alignedSize <= ObjectAllocator::GetMaxRegularObjectSize()) {
223             // NOTE(dtrubenkov): check if we don't need to handle OOM
224             mem = nonmovableAllocator_->Alloc(alignedSize, align);
225         } else {
226             // Humongous objects are non-movable
227             mem = humongousObjectAllocator_->Alloc(alignedSize, align);
228             // Humongous allocations have initialized memory by a default
229             return mem;
230         }
231     }
232     if (objInit == ObjMemInitPolicy::REQUIRE_INIT) {
233         ObjectMemoryInit(mem, size);
234     }
235     return mem;
236 }
237 
238 template <MTModeT MT_MODE>
CalculateAllocatorAlignment(size_t align)239 Alignment ObjectAllocatorG1<MT_MODE>::CalculateAllocatorAlignment(size_t align)
240 {
241     ASSERT(GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
242     return GetAlignment(align);
243 }
244 
245 template <MTModeT MT_MODE>
AllocateTenured(size_t size)246 void *ObjectAllocatorG1<MT_MODE>::AllocateTenured([[maybe_unused]] size_t size)
247 {
248     UNREACHABLE();
249     return nullptr;
250 }
251 
252 template <MTModeT MT_MODE>
AllocateTenuredWithoutLocks(size_t size)253 void *ObjectAllocatorG1<MT_MODE>::AllocateTenuredWithoutLocks([[maybe_unused]] size_t size)
254 {
255     UNREACHABLE();
256     return nullptr;
257 }
258 
259 template <MTModeT MT_MODE>
VisitAndRemoveAllPools(const MemVisitor & memVisitor)260 void ObjectAllocatorG1<MT_MODE>::VisitAndRemoveAllPools(const MemVisitor &memVisitor)
261 {
262     if (pygoteSpaceAllocator_ != nullptr) {
263         pygoteSpaceAllocator_->VisitAndRemoveAllPools(memVisitor);
264     }
265     objectAllocator_->VisitAndRemoveAllPools(memVisitor);
266     nonmovableAllocator_->VisitAndRemoveAllPools(memVisitor);
267     humongousObjectAllocator_->VisitAndRemoveAllPools(memVisitor);
268 }
269 
270 template <MTModeT MT_MODE>
VisitAndRemoveFreePools(const MemVisitor & memVisitor)271 void ObjectAllocatorG1<MT_MODE>::VisitAndRemoveFreePools(const MemVisitor &memVisitor)
272 {
273     if (pygoteSpaceAllocator_ != nullptr) {
274         pygoteSpaceAllocator_->VisitAndRemoveFreePools(memVisitor);
275     }
276 }
277 
278 template <MTModeT MT_MODE>
IterateOverYoungObjects(const ObjectVisitor & objectVisitor)279 void ObjectAllocatorG1<MT_MODE>::IterateOverYoungObjects(const ObjectVisitor &objectVisitor)
280 {
281     auto youngRegions = objectAllocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>();
282     for (auto r : youngRegions) {
283         r->template IterateOverObjects(objectVisitor);
284     }
285 }
286 
287 template <MTModeT MT_MODE>
GetMaxYoungRegionsCount()288 size_t ObjectAllocatorG1<MT_MODE>::GetMaxYoungRegionsCount()
289 {
290     return GetHeapSpace()->GetMaxYoungSize() / REGION_SIZE;
291 }
292 
293 template <MTModeT MT_MODE>
GetYoungRegions()294 PandaVector<Region *> ObjectAllocatorG1<MT_MODE>::GetYoungRegions()
295 {
296     return objectAllocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>();
297 }
298 
299 template <MTModeT MT_MODE>
GetMovableRegions()300 PandaVector<Region *> ObjectAllocatorG1<MT_MODE>::GetMovableRegions()
301 {
302     return objectAllocator_->GetAllRegions();
303 }
304 
305 template <MTModeT MT_MODE>
GetAllRegions()306 PandaVector<Region *> ObjectAllocatorG1<MT_MODE>::GetAllRegions()
307 {
308     PandaVector<Region *> regions = objectAllocator_->GetAllRegions();
309     PandaVector<Region *> nonMovableRegions = nonmovableAllocator_->GetAllRegions();
310     PandaVector<Region *> humongousRegions = humongousObjectAllocator_->GetAllRegions();
311     regions.insert(regions.end(), nonMovableRegions.begin(), nonMovableRegions.end());
312     regions.insert(regions.end(), humongousRegions.begin(), humongousRegions.end());
313     return regions;
314 }
315 
316 template <MTModeT MT_MODE>
GetNonRegularRegions()317 PandaVector<Region *> ObjectAllocatorG1<MT_MODE>::GetNonRegularRegions()
318 {
319     PandaVector<Region *> regions = nonmovableAllocator_->GetAllRegions();
320     PandaVector<Region *> humongousRegions = humongousObjectAllocator_->GetAllRegions();
321     regions.insert(regions.end(), humongousRegions.begin(), humongousRegions.end());
322     return regions;
323 }
324 
325 template <MTModeT MT_MODE>
CollectNonRegularRegions(const RegionsVisitor & regionVisitor,const GCObjectVisitor & gcObjectVisitor)326 void ObjectAllocatorG1<MT_MODE>::CollectNonRegularRegions(const RegionsVisitor &regionVisitor,
327                                                           const GCObjectVisitor &gcObjectVisitor)
328 {
329     nonmovableAllocator_->Collect(gcObjectVisitor);
330     nonmovableAllocator_->VisitAndRemoveFreeRegions(regionVisitor);
331     humongousObjectAllocator_->CollectAndRemoveFreeRegions(regionVisitor, gcObjectVisitor);
332 }
333 
334 template <MTModeT MT_MODE>
IterateOverTenuredObjects(const ObjectVisitor & objectVisitor)335 void ObjectAllocatorG1<MT_MODE>::IterateOverTenuredObjects(const ObjectVisitor &objectVisitor)
336 {
337     if (pygoteSpaceAllocator_ != nullptr) {
338         pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
339     }
340     objectAllocator_->IterateOverObjects(objectVisitor);
341     nonmovableAllocator_->IterateOverObjects(objectVisitor);
342     IterateOverHumongousObjects(objectVisitor);
343 }
344 
345 template <MTModeT MT_MODE>
IterateOverHumongousObjects(const ObjectVisitor & objectVisitor)346 void ObjectAllocatorG1<MT_MODE>::IterateOverHumongousObjects(const ObjectVisitor &objectVisitor)
347 {
348     humongousObjectAllocator_->IterateOverObjects(objectVisitor);
349 }
350 
IterateOverObjectsInRegion(Region * region,const ObjectVisitor & objectVisitor)351 static inline void IterateOverObjectsInRegion(Region *region, const ObjectVisitor &objectVisitor)
352 {
353     if (region->GetLiveBitmap() != nullptr) {
354         region->GetLiveBitmap()->IterateOverMarkedChunks(
355             [&objectVisitor](void *mem) { objectVisitor(static_cast<ObjectHeader *>(mem)); });
356     } else {
357         region->IterateOverObjects(objectVisitor);
358     }
359 }
360 
361 template <MTModeT MT_MODE>
IterateOverObjects(const ObjectVisitor & objectVisitor)362 void ObjectAllocatorG1<MT_MODE>::IterateOverObjects(const ObjectVisitor &objectVisitor)
363 {
364     if (pygoteSpaceAllocator_ != nullptr) {
365         pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
366     }
367     for (Region *region : objectAllocator_->GetAllRegions()) {
368         IterateOverObjectsInRegion(region, objectVisitor);
369     }
370     for (Region *region : nonmovableAllocator_->GetAllRegions()) {
371         IterateOverObjectsInRegion(region, objectVisitor);
372     }
373     for (Region *region : humongousObjectAllocator_->GetAllRegions()) {
374         IterateOverObjectsInRegion(region, objectVisitor);
375     }
376 }
377 
378 template <MTModeT MT_MODE>
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)379 void ObjectAllocatorG1<MT_MODE>::IterateRegularSizeObjects(const ObjectVisitor &objectVisitor)
380 {
381     objectAllocator_->IterateOverObjects(objectVisitor);
382     nonmovableAllocator_->IterateOverObjects(objectVisitor);
383 }
384 
385 template <MTModeT MT_MODE>
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)386 void ObjectAllocatorG1<MT_MODE>::IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor)
387 {
388     if (pygoteSpaceAllocator_ != nullptr) {
389         pygoteSpaceAllocator_->IterateOverObjects(objectVisitor);
390     }
391     humongousObjectAllocator_->IterateOverObjects(objectVisitor);
392 }
393 
394 template <MTModeT MT_MODE>
FreeObjectsMovedToPygoteSpace()395 void ObjectAllocatorG1<MT_MODE>::FreeObjectsMovedToPygoteSpace()
396 {
397     // clear because we have move all objects in it to pygote space
398     // NOTE(dtrubenkov): FIX clean object_allocator_
399     objectAllocator_.reset(new (std::nothrow) ObjectAllocator(memStats_, &heapSpaces_));
400     ASSERT(objectAllocator_.get() != nullptr);
401 }
402 
403 template <MTModeT MT_MODE>
ResetYoungAllocator()404 void ObjectAllocatorG1<MT_MODE>::ResetYoungAllocator()
405 {
406     auto callback = [](ManagedThread *thread) {
407         thread->CollectTLABMetrics();
408         if (Runtime::GetOptions().IsAdaptiveTlabSize()) {
409             thread->GetWeightedTlabAverage()->ComputeNewSumAndResetSamples();
410         }
411         // Here we should not collect current TLAB fill statistics for adaptive size
412         // since it may not be completely filled before resetting
413         thread->ClearTLAB();
414         return true;
415     };
416     Thread::GetCurrent()->GetVM()->GetThreadManager()->EnumerateThreads(callback);
417     objectAllocator_->ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
418 }
419 
420 template <MTModeT MT_MODE>
IsObjectInNonMovableSpace(const ObjectHeader * obj)421 bool ObjectAllocatorG1<MT_MODE>::IsObjectInNonMovableSpace(const ObjectHeader *obj)
422 {
423     return nonmovableAllocator_->ContainObject(obj);
424 }
425 
426 template <MTModeT MT_MODE>
UpdateSpaceData()427 void ObjectAllocatorG1<MT_MODE>::UpdateSpaceData()
428 {
429     ASSERT(GetYoungRanges().empty());
430     ASSERT(GetYoungBitmaps().empty());
431     for (auto r : objectAllocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>()) {
432         GetYoungRanges().emplace_back(r->Begin(), r->End());
433         GetYoungBitmaps().push_back(r->GetMarkBitmap());
434     }
435 }
436 
437 template <MTModeT MT_MODE>
CompactYoungRegions(const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveChecker)438 void ObjectAllocatorG1<MT_MODE>::CompactYoungRegions(const GCObjectVisitor &deathChecker,
439                                                      const ObjectVisitorEx &moveChecker)
440 {
441     objectAllocator_->template CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(deathChecker,
442                                                                                                   moveChecker);
443 }
444 
445 template <MTModeT MT_MODE>
CompactTenuredRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveChecker)446 void ObjectAllocatorG1<MT_MODE>::CompactTenuredRegions(const PandaVector<Region *> &regions,
447                                                        const GCObjectVisitor &deathChecker,
448                                                        const ObjectVisitorEx &moveChecker)
449 {
450     objectAllocator_->template CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD>(
451         regions, deathChecker, moveChecker);
452 }
453 
454 template <MTModeT MT_MODE>
PinObject(ObjectHeader * object)455 void ObjectAllocatorG1<MT_MODE>::PinObject(ObjectHeader *object)
456 {
457     if (objectAllocator_->ContainObject(object)) {
458         objectAllocator_->PinObject(object);
459     }
460 }
461 
462 template <MTModeT MT_MODE>
UnpinObject(ObjectHeader * object)463 void ObjectAllocatorG1<MT_MODE>::UnpinObject(ObjectHeader *object)
464 {
465     if (objectAllocator_->ContainObject(object)) {
466         objectAllocator_->UnpinObject(object);
467     }
468 }
469 
470 template class ObjectAllocatorG1<MT_MODE_SINGLE>;
471 template class ObjectAllocatorG1<MT_MODE_MULTI>;
472 template class ObjectAllocatorG1<MT_MODE_TASK>;
473 
474 }  // namespace ark::mem
475