• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMMON_COMPONENTS_HEAP_ALLOC_BUFFER_H
17 #define COMMON_COMPONENTS_HEAP_ALLOC_BUFFER_H
18 
19 #include <functional>
20 
21 #include "common_components/mutator/thread_local.h"
22 #include "common_components/heap/allocator/region_list.h"
23 #include "common_components/common/mark_work_stack.h"
24 
25 namespace common {
26 
27 enum class AllocBufferType: uint8_t {
28     YOUNG = 0, // for young space
29     OLD,       // for old space
30     TO         // for to space, valid only dring GC copy/fix phase and will become old-space later
31 };
32 
33 // thread-local data structure
34 class AllocationBuffer {
35 public:
AllocationBuffer()36     AllocationBuffer() : tlRawPointerRegions_("thread-local raw-pointer regions") {}
37     ~AllocationBuffer();
38     void Init();
39     static AllocationBuffer* GetOrCreateAllocBuffer();
40     static AllocationBuffer* GetAllocBuffer();
41     HeapAddress ToSpaceAllocate(size_t size);
42     HeapAddress Allocate(size_t size, AllocType allocType);
43 
44     template<AllocBufferType type = AllocBufferType::YOUNG>
GetRegion()45     RegionDesc* GetRegion()
46     {
47         if constexpr (type == AllocBufferType::YOUNG) {
48             return tlRegion_;
49         } else if constexpr (type == AllocBufferType::OLD) {
50             return tlOldRegion_;
51         } else if constexpr (type == AllocBufferType::TO) {
52             return tlToRegion_;
53         }
54     }
55 
56     template <AllocBufferType type = AllocBufferType::YOUNG>
SetRegion(RegionDesc * newRegion)57     void SetRegion(RegionDesc* newRegion)
58     {
59         if constexpr (type == AllocBufferType::YOUNG) {
60             tlRegion_ = newRegion;
61         } else if constexpr (type == AllocBufferType::OLD) {
62             tlOldRegion_ = newRegion;
63         } else if constexpr (type == AllocBufferType::TO) {
64             tlToRegion_ = newRegion;
65         }
66     }
67 
GetPreparedRegion()68     RegionDesc* GetPreparedRegion() { return preparedRegion_.load(std::memory_order_acquire); }
69 
70     template <AllocBufferType type>
ClearRegion()71     inline void ClearRegion()
72     {
73         if constexpr (type == AllocBufferType::YOUNG) {
74             tlRegion_ = RegionDesc::NullRegion();
75         } else if constexpr (type == AllocBufferType::OLD) {
76             tlOldRegion_ = RegionDesc::NullRegion();
77         } else if constexpr (type == AllocBufferType::TO) {
78             tlToRegion_ = RegionDesc::NullRegion();
79         }
80     }
81 
ClearRegions()82     inline void ClearRegions()
83     {
84         ClearRegion<AllocBufferType::YOUNG>();
85         ClearRegion<AllocBufferType::OLD>();
86         ClearRegion<AllocBufferType::TO>();
87     }
88 
89     void ClearThreadLocalRegion();
90     void Unregister();
91 
SetPreparedRegion(RegionDesc * newPreparedRegion)92     bool SetPreparedRegion(RegionDesc* newPreparedRegion)
93     {
94         RegionDesc* expect = nullptr;
95         return preparedRegion_.compare_exchange_strong(expect, newPreparedRegion, std::memory_order_release,
96             std::memory_order_relaxed);
97     }
98 
99     // record stack roots in allocBuffer so that mutator can concurrently enumerate roots without lock.
PushRoot(BaseObject * root)100     void PushRoot(BaseObject* root) { stackRoots_.emplace_back(root); }
101 
102     // record stack roots in allocBuffer so that mutator can concurrently enumerate roots without lock.
PushRoot(uint64_t * root)103     void PushRoot(uint64_t* root) { taggedObjStackRoots_.emplace_back(root); }
104 
105     // move the stack roots to other container so that other threads can visit them.
106     template <class Functor>
MarkStack(Functor consumer)107     inline void MarkStack(Functor consumer)
108     {
109         if (taggedObjStackRoots_.empty()) {
110             return;
111         }
112         for (uint64_t* obj : taggedObjStackRoots_) {
113             consumer(reinterpret_cast<BaseObject*>(obj));
114         }
115         stackRoots_.clear();
116     }
117 
118     template<AllocBufferType allocType>
FastAllocateInTlab(size_t size)119     HeapAddress FastAllocateInTlab(size_t size)
120     {
121         if constexpr (allocType == AllocBufferType::YOUNG) {
122             if (LIKELY_CC(tlRegion_ != RegionDesc::NullRegion())) {
123                 return tlRegion_->Alloc(size);
124             }
125         } else if constexpr (allocType == AllocBufferType::OLD) {
126             if (LIKELY_CC(tlOldRegion_ != RegionDesc::NullRegion())) {
127                 return tlOldRegion_->Alloc(size);
128             }
129         }
130         return 0;
131     }
132 
133     // Allocation buffer is thread local, but held in multiple mutators per thread.
134     // RefCount records how many mutators holds this allocbuffer.
IncreaseRefCount()135     void IncreaseRefCount()
136     {
137         refCount_++;
138     }
139 
DecreaseRefCount()140     bool DecreaseRefCount()
141     {
142         return --refCount_ <= 0;
143     }
144 
GetTLRegionOffset()145     static constexpr size_t GetTLRegionOffset()
146     {
147         return offsetof(AllocationBuffer, tlRegion_);
148     }
149 
GetTLOldRegionOffset()150     static constexpr size_t GetTLOldRegionOffset()
151     {
152         return offsetof(AllocationBuffer, tlOldRegion_);
153     }
154 
155 private:
156     // slow path
157     HeapAddress TryAllocateOnce(size_t totalSize, AllocType allocType);
158     HeapAddress AllocateImpl(size_t totalSize, AllocType allocType);
159     HeapAddress AllocateRawPointerObject(size_t totalSize);
160 
161     // tlRegion in AllocBuffer is a shortcut for fast allocation.
162     // we should handle failure in RegionManager
163     RegionDesc* tlRegion_ = RegionDesc::NullRegion();     // managed by young-space
164     RegionDesc* tlOldRegion_ = RegionDesc::NullRegion();  // managed by old-space
165     // only used in ToSpaceAllocate for GC copy
166     RegionDesc* tlToRegion_ = RegionDesc::NullRegion();   // managed by to-space
167 
168     std::atomic<RegionDesc*> preparedRegion_ = { nullptr };
169     // allocate objects which are exposed to runtime thus can not be moved.
170     // allocation context is responsible to notify collector when these objects are safe to be collected.
171     RegionList tlRawPointerRegions_;
172     int64_t refCount_ { 0 };
173     // Record stack roots in concurrent enum phase, waiting for GC to merge these roots
174     std::list<BaseObject*> stackRoots_;
175 
176     std::list<uint64_t*> taggedObjStackRoots_;
177 };
178 
179 static_assert(AllocationBuffer::GetTLRegionOffset() == 0);
180 } // namespace common
181 
182 #endif // COMMON_COMPONENTS_HEAP_ALLOC_BUFFER_H
183