• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef COMMON_COMPONENTS_HEAP_HEAP_H
17 #define COMMON_COMPONENTS_HEAP_HEAP_H
18 
19 #include <cstdlib>
20 #include <functional>
21 
22 #include "common_components/base/immortal_wrapper.h"
23 #include "common_components/common/base_object.h"
24 #include "common_components/common/type_def.h"
25 #include "common_components/heap/barrier/barrier.h"
26 #include "common_components/heap/collector/collector.h"
27 #include "common_components/heap/collector/heuristic_gc_policy.h"
28 #include "common_interfaces/base/runtime_param.h"
29 #include "common_interfaces/base_runtime.h"
30 #include "common_interfaces/profiler/heap_profiler_listener.h"
31 
32 namespace common {
33 class Allocator;
34 class AllocationBuffer;
35 class FinalizerProcessor;
36 class CollectorResources;
37 using MemoryReduceDegree = common::MemoryReduceDegree;
38 
39 class Heap {
40 public:
41     // These need to keep same with that in `RegionDesc`
42     static constexpr size_t NORMAL_UNIT_SIZE = 256 * 1024;
43     static constexpr size_t NORMAL_UNIT_HEADER_SIZE = AlignUp<size_t>(2 * sizeof(void *) + sizeof(uint8_t), 8);
44     static constexpr size_t NORMAL_UNIT_AVAILABLE_SIZE = NORMAL_UNIT_SIZE - NORMAL_UNIT_HEADER_SIZE;
45 
GetNormalRegionSize()46     static constexpr size_t GetNormalRegionSize()
47     {
48         return NORMAL_UNIT_SIZE;
49     }
50 
GetNormalRegionHeaderSize()51     static constexpr size_t GetNormalRegionHeaderSize()
52     {
53         return NORMAL_UNIT_HEADER_SIZE;
54     }
55 
GetNormalRegionAvailableSize()56     static constexpr size_t GetNormalRegionAvailableSize()
57     {
58         return NORMAL_UNIT_AVAILABLE_SIZE;
59     }
60 
throwOOM()61     static void throwOOM()
62     {
63         // Maybe we need to add heapdump logic here
64         HeapProfilerListener::GetInstance().OnOutOfMemoryEventCb();
65         LOG_COMMON(FATAL) << "Out of Memory, abort.";
66         UNREACHABLE_CC();
67     }
68     static Heap& GetHeap();
GetBarrier()69     static Barrier& GetBarrier() { return *currentBarrierPtr_->load(std::memory_order_relaxed); }
70 
71     // concurrent gc uses barrier to access heap.
UseBarrier()72     static bool UseBarrier() { return currentBarrierPtr_->load(std::memory_order_relaxed) != stwBarrierPtr_; }
73 
74     // should be removed after HeapParam is supported
75     virtual void Init(const RuntimeParam& param) = 0;
76     virtual void Fini() = 0;
77 
78     virtual void StartRuntimeThreads() = 0;
79     virtual void StopRuntimeThreads() = 0;
80 
81     virtual bool IsSurvivedObject(const BaseObject*) const = 0;
IsGarbage(const BaseObject * obj)82     bool IsGarbage(const BaseObject* obj) const { return !IsSurvivedObject(obj); }
83 
84     virtual bool IsGcStarted() const = 0;
85     virtual void WaitForGCFinish() = 0;
86     virtual void MarkGCStart() = 0;
87     virtual void MarkGCFinish() = 0;
88 
89     virtual bool IsGCEnabled() const = 0;
90     virtual void EnableGC(bool val) = 0;
91 
92     virtual HeapAddress Allocate(size_t size, AllocType allocType, bool allowGC) = 0;
93 
94     virtual Collector& GetCollector() = 0;
95     virtual Allocator& GetAllocator() = 0;
96     virtual void TryHeuristicGC() = 0;
97     virtual void TryIdleGC() = 0;
98     virtual void NotifyNativeAllocation(size_t bytes) = 0;
99     virtual void NotifyNativeFree(size_t bytes) = 0;
100     virtual void NotifyNativeReset(size_t oldBytes, size_t newBytes) = 0;
101     virtual size_t GetNotifiedNativeSize() const = 0;
102     virtual void SetNativeHeapThreshold(size_t newThreshold) = 0;
103     virtual size_t GetNativeHeapThreshold() const = 0;
104     virtual void ChangeGCParams(bool isBackground) = 0;
105     virtual void RecordAliveSizeAfterLastGC(size_t aliveBytes) = 0;
106     virtual bool CheckAndTriggerHintGC(MemoryReduceDegree degree) = 0;
107     virtual void NotifyHighSensitive(bool isStart) = 0;
108     virtual void SetRecordHeapObjectSizeBeforeSensitive(size_t objSize) = 0;
109     virtual AppSensitiveStatus GetSensitiveStatus() = 0;
110     virtual StartupStatus GetStartupStatus() = 0;
111     /* to avoid misunderstanding, variant types of heap size are defined as followed:
112      * |------------------------------ max capacity ---------------------------------|
113      * |------------------------------ current capacity ------------------------|
114      * |------------------------------ committed size -----------------------|
115      * |------------------------------ used size -------------------------|
116      * |------------------------------ allocated size -------------|
117      * |------------------------------ net size ------------|
118      * so that inequality size <= capacity <= max capacity always holds.
119      */
120     virtual size_t GetMaxCapacity() const = 0;
121 
122     // or current capacity: a continuous address space to help heap management such as GC.
123     virtual size_t GetCurrentCapacity() const = 0;
124 
125     // already used by allocator, including memory block cached for speeding up allocation.
126     // we measure it in OS page granularity because physical memory is occupied by page.
127     virtual size_t GetUsedPageSize() const = 0;
128 
129     // total memory allocated for each allocation request, including memory fragment for alignment or padding.
130     virtual size_t GetAllocatedSize() const = 0;
131 
132     virtual size_t GetSurvivedSize() const = 0;
133 
134     virtual size_t GetRemainHeapSize() const = 0;
135 
136     virtual size_t GetAccumulatedAllocateSize() const = 0;
137     virtual size_t GetAccumulatedFreeSize() const = 0;
138 
139     virtual HeapAddress GetStartAddress() const = 0;
140     virtual HeapAddress GetSpaceEndAddress() const = 0;
141 
142     // IsHeapAddress is a range-based check, used to quickly identify heap address,
143     // assuming non-heap address never falls into this address range.
IsHeapAddress(HeapAddress addr)144     static bool IsHeapAddress(HeapAddress addr) { return (addr >= heapStartAddr_) && (addr < heapCurrentEnd_); }
145 
IsTaggedObject(HeapAddress addr)146     static bool IsTaggedObject(HeapAddress addr)
147     {
148         // relies on the definition of ArkTs
149         static constexpr uint64_t TAG_BITS_SHIFT = 48;
150         static constexpr uint64_t TAG_MARK = 0xFFFFULL << TAG_BITS_SHIFT;
151         static constexpr uint64_t TAG_SPECIAL = 0x02ULL;
152         static constexpr uint64_t TAG_BOOLEAN = 0x04ULL;
153         static constexpr uint64_t TAG_HEAP_OBJECT_MASK = TAG_MARK | TAG_SPECIAL | TAG_BOOLEAN;
154 
155         if ((addr & TAG_HEAP_OBJECT_MASK) == 0) {
156             return true;
157         }
158 
159         return false;
160     }
161 
162     static void MarkJitFortMemInstalled(void *thread, void *obj);
163 
IsHeapAddress(const void * addr)164     static bool IsHeapAddress(const void* addr) { return IsHeapAddress(reinterpret_cast<HeapAddress>(addr)); }
165 
166     virtual void InstallBarrier(const GCPhase) = 0;
167 
168     virtual GCPhase GetGCPhase() const = 0;
169 
170     virtual void SetGCPhase(const GCPhase phase) = 0;
171 
172     virtual bool ForEachObject(const std::function<void(BaseObject*)>&, bool safe) = 0;
173 
174     virtual void VisitStaticRoots(const RefFieldVisitor& visitor) = 0;
175 
176     virtual FinalizerProcessor& GetFinalizerProcessor() = 0;
177 
178     virtual CollectorResources& GetCollectorResources() = 0;
179 
180     virtual void RegisterAllocBuffer(AllocationBuffer& buffer) = 0;
181 
182     virtual void UnregisterAllocBuffer(AllocationBuffer& buffer) = 0;
183 
184     virtual void StopGCWork() = 0;
185 
186     virtual GCReason GetGCReason() = 0;
187 
188     virtual void SetGCReason(GCReason reason) = 0;
189 
190     virtual bool InRecentSpace(const void *addr) = 0;
191     virtual bool GetForceThrowOOM() const = 0;
192     virtual void SetForceThrowOOM(bool val) = 0;
193 
OnHeapCreated(HeapAddress startAddr)194     static void OnHeapCreated(HeapAddress startAddr)
195     {
196         heapStartAddr_ = startAddr;
197         heapCurrentEnd_ = 0;
198     }
199 
OnHeapExtended(HeapAddress newEnd)200     static void OnHeapExtended(HeapAddress newEnd) { heapCurrentEnd_ = newEnd; }
201 
~Heap()202     virtual ~Heap() {}
203     static std::atomic<Barrier*>* currentBarrierPtr_; // record ptr for fast access
204     static Barrier* stwBarrierPtr_;      // record nonGC barrier
205     static HeapAddress heapStartAddr_;
206     static HeapAddress heapCurrentEnd_;
207 }; // class Heap
208 } // namespace common
209 #endif
210