• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "common_components/heap/heap.h"
17 
18 #include "common_components/heap/ark_collector/idle_barrier.h"
19 #include "common_components/heap/ark_collector/enum_barrier.h"
20 #include "common_components/heap/ark_collector/marking_barrier.h"
21 #include "common_components/heap/ark_collector/remark_barrier.h"
22 #include "common_components/heap/ark_collector/post_marking_barrier.h"
23 #include "common_components/heap/ark_collector/preforward_barrier.h"
24 #include "common_components/heap/ark_collector/copy_barrier.h"
25 #include "common_components/heap/collector/collector_proxy.h"
26 #include "common_components/heap/collector/collector_resources.h"
27 #include "common_components/mutator/mutator_manager.h"
28 
29 #if defined(_WIN64)
30 #include <windows.h>
31 #include <psapi.h>
32 #undef ERROR
33 #endif
34 #if defined(__APPLE__)
35 #include <mach/mach.h>
36 #endif
37 namespace common {
38 static_assert(Heap::NORMAL_UNIT_SIZE == RegionDesc::UNIT_SIZE);
39 static_assert(Heap::NORMAL_UNIT_HEADER_SIZE == RegionDesc::UNIT_HEADER_SIZE);
40 static_assert(Heap::NORMAL_UNIT_AVAILABLE_SIZE == RegionDesc::UNIT_AVAILABLE_SIZE);
41 
42 std::atomic<Barrier*>* Heap::currentBarrierPtr_ = nullptr;
43 Barrier* Heap::stwBarrierPtr_ = nullptr;
44 HeapAddress Heap::heapStartAddr_ = 0;
45 HeapAddress Heap::heapCurrentEnd_ = 0;
46 
47 class HeapImpl : public Heap {
48 public:
HeapImpl()49     HeapImpl()
50         : theSpace_(Allocator::CreateAllocator()), collectorResources_(collectorProxy_),
51           collectorProxy_(*theSpace_, collectorResources_), stwBarrier_(collectorProxy_),
52         idleBarrier_(collectorProxy_), enumBarrier_(collectorProxy_), markingBarrier_(collectorProxy_),
53         remarkBarrier_(collectorProxy_), postMarkingBarrier_(collectorProxy_), preforwardBarrier_(collectorProxy_),
54         copyBarrier_(collectorProxy_)
55     {
56         currentBarrier_.store(&stwBarrier_, std::memory_order_relaxed);
57         stwBarrierPtr_ = &stwBarrier_;
58         Heap::currentBarrierPtr_ = &currentBarrier_;
59         RunType::InitRunTypeMap();
60     }
61 
62     ~HeapImpl() override = default;
63     void Init(const RuntimeParam& param) override;
64     void Fini() override;
65     void StartRuntimeThreads() override;
66     void StopRuntimeThreads() override;
67 
IsSurvivedObject(const BaseObject * obj) const68     bool IsSurvivedObject(const BaseObject* obj) const override
69     {
70         return RegionSpace::IsMarkedObject(obj) || RegionSpace::IsResurrectedObject(obj);
71     }
72 
IsGcStarted() const73     bool IsGcStarted() const override { return collectorResources_.IsGcStarted(); }
74 
WaitForGCFinish()75     void WaitForGCFinish() override { return collectorResources_.WaitForGCFinish(); }
76 
MarkGCStart()77     void MarkGCStart() override { return collectorResources_.MarkGCStart(); }
MarkGCFinish()78     void MarkGCFinish() override { return collectorResources_.MarkGCFinish(); }
79 
IsGCEnabled() const80     bool IsGCEnabled() const override { return isGCEnabled_.load(); }
81 
EnableGC(bool val)82     void EnableGC(bool val) override { return isGCEnabled_.store(val); }
83 
GetGCReason()84     GCReason GetGCReason() override { return gcReason_; }
85 
SetGCReason(GCReason reason)86     void SetGCReason(GCReason reason) override { gcReason_ = reason; }
87 
InRecentSpace(const void * addr)88     bool InRecentSpace(const void *addr) override
89     {
90         RegionDesc *region = RegionDesc::GetRegionDescAt(reinterpret_cast<HeapAddress>(addr));
91         return region->IsInRecentSpace();
92     }
GetForceThrowOOM() const93     bool GetForceThrowOOM() const override { return isForceThrowOOM_; };
SetForceThrowOOM(bool val)94     void SetForceThrowOOM(bool val) override { isForceThrowOOM_ = val; };
95 
96     HeapAddress Allocate(size_t size, AllocType allocType, bool allowGC = true) override;
97 
98     GCPhase GetGCPhase() const override;
99     void SetGCPhase(const GCPhase phase) override;
100     Collector& GetCollector() override;
101     Allocator& GetAllocator() override;
102 
103     size_t GetMaxCapacity() const override;
104     size_t GetCurrentCapacity() const override;
105     size_t GetUsedPageSize() const override;
106     size_t GetAllocatedSize() const override;
107     size_t GetSurvivedSize() const override;
108     size_t GetRemainHeapSize() const override;
109     size_t GetAccumulatedAllocateSize() const override;
110     size_t GetAccumulatedFreeSize() const override;
111     HeapAddress GetStartAddress() const override;
112     HeapAddress GetSpaceEndAddress() const override;
113     void VisitStaticRoots(const RefFieldVisitor& visitor) override;
114     bool ForEachObject(const std::function<void(BaseObject*)>&, bool) override;
115     void InstallBarrier(const GCPhase phase) override;
116     FinalizerProcessor& GetFinalizerProcessor() override;
117     CollectorResources& GetCollectorResources() override;
118     void RegisterAllocBuffer(AllocationBuffer& buffer) override;
119     void UnregisterAllocBuffer(AllocationBuffer& buffer) override;
120     void StopGCWork() override;
121     void TryHeuristicGC() override;
122     void TryIdleGC() override;
123     void NotifyNativeAllocation(size_t bytes) override;
124     void NotifyNativeFree(size_t bytes) override;
125     void NotifyNativeReset(size_t oldBytes, size_t newBytes) override;
126     size_t GetNotifiedNativeSize() const override;
127     void SetNativeHeapThreshold(size_t newThreshold) override;
128     size_t GetNativeHeapThreshold() const override;
129     void ChangeGCParams(bool isBackground) override;
130     void RecordAliveSizeAfterLastGC(size_t aliveBytes) override;
131     bool CheckAndTriggerHintGC(MemoryReduceDegree degree) override;
132     void NotifyHighSensitive(bool isStart) override;
133     void SetRecordHeapObjectSizeBeforeSensitive(size_t objSize) override;
134     AppSensitiveStatus GetSensitiveStatus() override;
135     StartupStatus GetStartupStatus() override;
136 
137 private:
138     // allocator is actually a subspace in heap
139     Allocator* theSpace_;
140 
141     CollectorResources collectorResources_;
142 
143     // collector is closely related to barrier. but we do not put barrier inside collector because even without
144     // collector (i.e. no-gc), allocator and barrier (interface to access heap) is still needed.
145     CollectorProxy collectorProxy_;
146 
147     Barrier stwBarrier_;
148     IdleBarrier idleBarrier_;
149     EnumBarrier enumBarrier_;
150     MarkingBarrier markingBarrier_;
151     RemarkBarrier remarkBarrier_;
152     PostMarkingBarrier postMarkingBarrier_;
153     PreforwardBarrier preforwardBarrier_;
154     CopyBarrier copyBarrier_;
155     std::atomic<Barrier*> currentBarrier_ = nullptr;
156     HeuristicGCPolicy heuristicGCPolicy_;
157     // manage gc roots entry
158     StaticRootTable staticRootTable_;
159 
160     std::atomic<bool> isGCEnabled_ = { true };
161 
162     GCReason gcReason_ = GCReason::GC_REASON_INVALID;
163     bool isForceThrowOOM_ = { false };
164 }; // end class HeapImpl
165 
166 static ImmortalWrapper<HeapImpl> g_heapInstance;
167 
Allocate(size_t size,AllocType allocType,bool allowGC)168 HeapAddress HeapImpl::Allocate(size_t size, AllocType allocType, bool allowGC)
169 {
170     if (allowGC) {
171         return theSpace_->Allocate(size, allocType);
172     } else {
173         return theSpace_->AllocateNoGC(size, allocType);
174     }
175 }
176 
ForEachObject(const std::function<void (BaseObject *)> & visitor,bool safe)177 bool HeapImpl::ForEachObject(const std::function<void(BaseObject*)>& visitor, bool safe)
178 {
179     {
180         ScopedEnterSaferegion enterSaferegion(false);
181         this->GetCollectorResources().WaitForGCFinish();
182     }
183     // Expect no gc in ForEachObj, dfx tools and oom gc should be considered.
184     return theSpace_->ForEachObject(visitor, safe);
185 }
186 
Init(const RuntimeParam & param)187 void HeapImpl::Init(const RuntimeParam& param)
188 {
189     if (theSpace_ == nullptr) {
190         // Hack impl, since HeapImpl is Immortal, this may happen in multi UT case
191         new (this) HeapImpl();
192     }
193     theSpace_->Init(param);
194     Heap::GetHeap().EnableGC(param.gcParam.enableGC);
195     collectorProxy_.Init(param);
196     collectorResources_.Init();
197     heuristicGCPolicy_.Init();
198 }
199 
Fini()200 void HeapImpl::Fini()
201 {
202     collectorResources_.Fini();
203     collectorProxy_.Fini();
204     if (theSpace_ != nullptr) {
205         delete theSpace_;
206         theSpace_ = nullptr;
207     }
208 }
209 
StartRuntimeThreads()210 void HeapImpl::StartRuntimeThreads()
211 {
212     collectorResources_.StartRuntimeThreads();
213 }
214 
StopRuntimeThreads()215 void HeapImpl::StopRuntimeThreads()
216 {
217     collectorResources_.StopRuntimeThreads();
218 }
219 
TryHeuristicGC()220 void HeapImpl::TryHeuristicGC()
221 {
222     heuristicGCPolicy_.TryHeuristicGC();
223 }
224 
TryIdleGC()225 void HeapImpl::TryIdleGC()
226 {
227     heuristicGCPolicy_.TryIdleGC();
228 }
229 
NotifyNativeAllocation(size_t bytes)230 void HeapImpl::NotifyNativeAllocation(size_t bytes)
231 {
232     heuristicGCPolicy_.NotifyNativeAllocation(bytes);
233 }
234 
NotifyNativeFree(size_t bytes)235 void HeapImpl::NotifyNativeFree(size_t bytes)
236 {
237     heuristicGCPolicy_.NotifyNativeFree(bytes);
238 }
239 
NotifyNativeReset(size_t oldBytes,size_t newBytes)240 void HeapImpl::NotifyNativeReset(size_t oldBytes, size_t newBytes)
241 {
242     heuristicGCPolicy_.NotifyNativeFree(oldBytes);
243     heuristicGCPolicy_.NotifyNativeAllocation(newBytes);
244 }
245 
GetNotifiedNativeSize() const246 size_t HeapImpl::GetNotifiedNativeSize() const
247 {
248     return heuristicGCPolicy_.GetNotifiedNativeSize();
249 }
250 
SetNativeHeapThreshold(size_t newThreshold)251 void HeapImpl::SetNativeHeapThreshold(size_t newThreshold)
252 {
253     heuristicGCPolicy_.SetNativeHeapThreshold(newThreshold);
254 }
255 
GetNativeHeapThreshold() const256 size_t HeapImpl::GetNativeHeapThreshold() const
257 {
258     return heuristicGCPolicy_.GetNativeHeapThreshold();
259 }
260 
ChangeGCParams(bool isBackground)261 void HeapImpl::ChangeGCParams(bool isBackground)
262 {
263     heuristicGCPolicy_.ChangeGCParams(isBackground);
264 }
265 
RecordAliveSizeAfterLastGC(size_t aliveBytes)266 void HeapImpl::RecordAliveSizeAfterLastGC(size_t aliveBytes)
267 {
268     heuristicGCPolicy_.RecordAliveSizeAfterLastGC(aliveBytes);
269 }
270 
CheckAndTriggerHintGC(MemoryReduceDegree degree)271 bool HeapImpl::CheckAndTriggerHintGC(MemoryReduceDegree degree)
272 {
273     return heuristicGCPolicy_.CheckAndTriggerHintGC(degree);
274 }
275 
NotifyHighSensitive(bool isStart)276 void HeapImpl::NotifyHighSensitive(bool isStart)
277 {
278     heuristicGCPolicy_.NotifyHighSensitive(isStart);
279 }
280 
SetRecordHeapObjectSizeBeforeSensitive(size_t objSize)281 void HeapImpl::SetRecordHeapObjectSizeBeforeSensitive(size_t objSize)
282 {
283     if (heuristicGCPolicy_.InSensitiveStatus()) {
284         heuristicGCPolicy_.SetRecordHeapObjectSizeBeforeSensitive(objSize);
285     }
286 }
287 
GetSensitiveStatus()288 AppSensitiveStatus HeapImpl::GetSensitiveStatus()
289 {
290     return heuristicGCPolicy_.GetSensitiveStatus();
291 }
292 
GetStartupStatus()293 StartupStatus HeapImpl::GetStartupStatus()
294 {
295     return heuristicGCPolicy_.GetStartupStatus();
296 }
297 
GetCollector()298 Collector& HeapImpl::GetCollector() { return collectorProxy_.GetCurrentCollector(); }
299 
GetAllocator()300 Allocator& HeapImpl::GetAllocator() { return *theSpace_; }
301 
InstallBarrier(const GCPhase phase)302 void HeapImpl::InstallBarrier(const GCPhase phase)
303 {
304     if (phase == GCPhase::GC_PHASE_ENUM) {
305         currentBarrier_.store(&enumBarrier_, std::memory_order_relaxed);
306     } else if (phase == GCPhase::GC_PHASE_MARK) {
307         currentBarrier_.store(&markingBarrier_, std::memory_order_relaxed);
308     } else if (phase == GCPhase::GC_PHASE_PRECOPY) {
309         currentBarrier_.store(&preforwardBarrier_, std::memory_order_relaxed);
310     } else if (phase == GCPhase::GC_PHASE_COPY || phase == GCPhase::GC_PHASE_FIX) {
311         currentBarrier_.store(&copyBarrier_, std::memory_order_relaxed);
312     } else if (phase == GCPhase::GC_PHASE_IDLE) {
313         currentBarrier_.store(&idleBarrier_, std::memory_order_relaxed);
314     } else if (phase == GCPhase::GC_PHASE_POST_MARK) {
315         currentBarrier_.store(&postMarkingBarrier_, std::memory_order_relaxed);
316     } else if (phase == GCPhase::GC_PHASE_FINAL_MARK ||
317                phase == GCPhase::GC_PHASE_REMARK_SATB) {
318         currentBarrier_ = &remarkBarrier_;
319     }
320     DLOG(GCPHASE, "install barrier for gc phase %u", phase);
321 }
322 
GetGCPhase() const323 GCPhase HeapImpl::GetGCPhase() const { return collectorProxy_.GetGCPhase(); }
324 
SetGCPhase(const GCPhase phase)325 void HeapImpl::SetGCPhase(const GCPhase phase) { collectorProxy_.SetGCPhase(phase); }
326 
GetMaxCapacity() const327 size_t HeapImpl::GetMaxCapacity() const { return theSpace_->GetMaxCapacity(); }
328 
GetCurrentCapacity() const329 size_t HeapImpl::GetCurrentCapacity() const { return theSpace_->GetCurrentCapacity(); }
330 
GetUsedPageSize() const331 size_t HeapImpl::GetUsedPageSize() const { return theSpace_->GetUsedPageSize(); }
332 
GetAllocatedSize() const333 size_t HeapImpl::GetAllocatedSize() const { return theSpace_->GetAllocatedBytes(); }
334 
GetRemainHeapSize() const335 size_t HeapImpl::GetRemainHeapSize() const { return theSpace_->GetMaxCapacity() - theSpace_->GetAllocatedBytes(); }
336 
GetSurvivedSize() const337 size_t HeapImpl::GetSurvivedSize() const { return theSpace_->GetSurvivedSize(); }
338 
GetAccumulatedAllocateSize() const339 size_t HeapImpl::GetAccumulatedAllocateSize() const
340 {
341     return collectorResources_.GetGCStats().GetAccumulatedFreeSize() + theSpace_->GetAllocatedBytes();
342 }
343 
GetAccumulatedFreeSize() const344 size_t HeapImpl::GetAccumulatedFreeSize() const { return collectorResources_.GetGCStats().GetAccumulatedFreeSize(); }
345 
GetStartAddress() const346 HeapAddress HeapImpl::GetStartAddress() const { return theSpace_->GetSpaceStartAddress(); }
347 
GetSpaceEndAddress() const348 HeapAddress HeapImpl::GetSpaceEndAddress() const { return theSpace_->GetSpaceEndAddress(); }
349 
GetHeap()350 Heap& Heap::GetHeap() { return *g_heapInstance; }
351 
VisitStaticRoots(const RefFieldVisitor & visitor)352 void HeapImpl::VisitStaticRoots(const RefFieldVisitor& visitor) { staticRootTable_.VisitRoots(visitor); }
353 
GetFinalizerProcessor()354 FinalizerProcessor& HeapImpl::GetFinalizerProcessor() { return collectorResources_.GetFinalizerProcessor(); }
355 
GetCollectorResources()356 CollectorResources& HeapImpl::GetCollectorResources() { return collectorResources_; }
357 
StopGCWork()358 void HeapImpl::StopGCWork() { collectorResources_.StopGCWork(); }
359 
RegisterAllocBuffer(AllocationBuffer & buffer)360 void HeapImpl::RegisterAllocBuffer(AllocationBuffer& buffer) { GetAllocator().RegisterAllocBuffer(buffer); }
UnregisterAllocBuffer(AllocationBuffer & buffer)361 void HeapImpl::UnregisterAllocBuffer(AllocationBuffer& buffer) { GetAllocator().UnregisterAllocBuffer(buffer); }
362 } // namespace common
363