• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_HEAP_INL_H
17 #define ECMASCRIPT_MEM_HEAP_INL_H
18 
19 #include "ecmascript/mem/heap.h"
20 
21 #include "ecmascript/base/block_hook_scope.h"
22 #include "ecmascript/dfx/hprof/heap_tracker.h"
23 #include "ecmascript/ecma_vm.h"
24 #include "ecmascript/mem/allocator-inl.h"
25 #include "ecmascript/mem/concurrent_sweeper.h"
26 #include "ecmascript/mem/linear_space.h"
27 #include "ecmascript/mem/mem_controller.h"
28 #include "ecmascript/mem/sparse_space.h"
29 #include "ecmascript/mem/tagged_object.h"
30 #include "ecmascript/mem/barriers-inl.h"
31 #include "ecmascript/mem/mem_map_allocator.h"
32 
33 namespace panda::ecmascript {
34 using BlockHookScope = base::BlockHookScope;
35 #define CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, space, message)                                         \
36     if (UNLIKELY((object) == nullptr)) {                                                                    \
37         size_t oomOvershootSize = GetEcmaVM()->GetEcmaParamConfiguration().GetOutOfMemoryOvershootSize();   \
38         (space)->IncreaseOutOfMemoryOvershootSize(oomOvershootSize);                                        \
39         object = reinterpret_cast<TaggedObject *>((space)->Allocate(size));                                 \
40         ThrowOutOfMemoryError(size, message);                                                               \
41     }                                                                                                       \
42 
43 template<class Callback>
EnumerateOldSpaceRegions(const Callback & cb,Region * region)44 void Heap::EnumerateOldSpaceRegions(const Callback &cb, Region *region) const
45 {
46     oldSpace_->EnumerateRegions(cb, region);
47     appSpawnSpace_->EnumerateRegions(cb);
48     nonMovableSpace_->EnumerateRegions(cb);
49     hugeObjectSpace_->EnumerateRegions(cb);
50     machineCodeSpace_->EnumerateRegions(cb);
51 }
52 
53 template<class Callback>
EnumerateSnapshotSpaceRegions(const Callback & cb)54 void Heap::EnumerateSnapshotSpaceRegions(const Callback &cb) const
55 {
56     snapshotSpace_->EnumerateRegions(cb);
57 }
58 
59 template<class Callback>
EnumerateNonNewSpaceRegions(const Callback & cb)60 void Heap::EnumerateNonNewSpaceRegions(const Callback &cb) const
61 {
62     oldSpace_->EnumerateRegions(cb);
63     oldSpace_->EnumerateCollectRegionSet(cb);
64     appSpawnSpace_->EnumerateRegions(cb);
65     snapshotSpace_->EnumerateRegions(cb);
66     nonMovableSpace_->EnumerateRegions(cb);
67     hugeObjectSpace_->EnumerateRegions(cb);
68     machineCodeSpace_->EnumerateRegions(cb);
69 }
70 
71 template<class Callback>
EnumerateNonNewSpaceRegionsWithRecord(const Callback & cb)72 void Heap::EnumerateNonNewSpaceRegionsWithRecord(const Callback &cb) const
73 {
74     oldSpace_->EnumerateRegionsWithRecord(cb);
75     snapshotSpace_->EnumerateRegionsWithRecord(cb);
76     nonMovableSpace_->EnumerateRegionsWithRecord(cb);
77     hugeObjectSpace_->EnumerateRegionsWithRecord(cb);
78     machineCodeSpace_->EnumerateRegionsWithRecord(cb);
79 }
80 
81 template<class Callback>
EnumerateNewSpaceRegions(const Callback & cb)82 void Heap::EnumerateNewSpaceRegions(const Callback &cb) const
83 {
84     activeSemiSpace_->EnumerateRegions(cb);
85 }
86 
87 template<class Callback>
EnumerateNonMovableRegions(const Callback & cb)88 void Heap::EnumerateNonMovableRegions(const Callback &cb) const
89 {
90     snapshotSpace_->EnumerateRegions(cb);
91     appSpawnSpace_->EnumerateRegions(cb);
92     nonMovableSpace_->EnumerateRegions(cb);
93     hugeObjectSpace_->EnumerateRegions(cb);
94     machineCodeSpace_->EnumerateRegions(cb);
95 }
96 
97 template<class Callback>
EnumerateRegions(const Callback & cb)98 void Heap::EnumerateRegions(const Callback &cb) const
99 {
100     activeSemiSpace_->EnumerateRegions(cb);
101     oldSpace_->EnumerateRegions(cb);
102     oldSpace_->EnumerateCollectRegionSet(cb);
103     appSpawnSpace_->EnumerateRegions(cb);
104     snapshotSpace_->EnumerateRegions(cb);
105     nonMovableSpace_->EnumerateRegions(cb);
106     hugeObjectSpace_->EnumerateRegions(cb);
107     machineCodeSpace_->EnumerateRegions(cb);
108 }
109 
110 template<class Callback>
IterateOverObjects(const Callback & cb)111 void Heap::IterateOverObjects(const Callback &cb) const
112 {
113     activeSemiSpace_->IterateOverObjects(cb);
114     oldSpace_->IterateOverObjects(cb);
115     appSpawnSpace_->IterateOverMarkedObjects(cb);
116     nonMovableSpace_->IterateOverObjects(cb);
117     hugeObjectSpace_->IterateOverObjects(cb);
118 }
119 
AllocateYoungOrHugeObject(JSHClass * hclass)120 TaggedObject *Heap::AllocateYoungOrHugeObject(JSHClass *hclass)
121 {
122     size_t size = hclass->GetObjectSize();
123     return AllocateYoungOrHugeObject(hclass, size);
124 }
125 
AllocateYoungOrHugeObject(size_t size)126 TaggedObject *Heap::AllocateYoungOrHugeObject(size_t size)
127 {
128     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
129     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
130         return AllocateHugeObject(size);
131     }
132 
133     auto object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
134     if (object == nullptr) {
135         CollectGarbage(SelectGCType(), GCReason::ALLOCATION_LIMIT);
136         object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
137         if (object == nullptr) {
138             CollectGarbage(SelectGCType(), GCReason::ALLOCATION_FAILED);
139             object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
140             CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, activeSemiSpace_, "Heap::AllocateYoungOrHugeObject");
141         }
142     }
143     return object;
144 }
145 
AllocateYoungOrHugeObject(JSHClass * hclass,size_t size)146 TaggedObject *Heap::AllocateYoungOrHugeObject(JSHClass *hclass, size_t size)
147 {
148     auto object = AllocateYoungOrHugeObject(size);
149     object->SetClass(hclass);
150     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
151     return object;
152 }
153 
AllocateYoungSync(size_t size)154 uintptr_t Heap::AllocateYoungSync(size_t size)
155 {
156     return activeSemiSpace_->AllocateSync(size);
157 }
158 
MoveYoungRegionSync(Region * region)159 bool Heap::MoveYoungRegionSync(Region *region)
160 {
161     return activeSemiSpace_->SwapRegion(region, inactiveSemiSpace_);
162 }
163 
MergeToOldSpaceSync(LocalSpace * localSpace)164 void Heap::MergeToOldSpaceSync(LocalSpace *localSpace)
165 {
166     oldSpace_->Merge(localSpace);
167 }
168 
TryAllocateYoungGeneration(JSHClass * hclass,size_t size)169 TaggedObject *Heap::TryAllocateYoungGeneration(JSHClass *hclass, size_t size)
170 {
171     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
172     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
173         return nullptr;
174     }
175     auto object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
176     if (object != nullptr) {
177         object->SetClass(hclass);
178     }
179     return object;
180 }
181 
AllocateOldOrHugeObject(JSHClass * hclass)182 TaggedObject *Heap::AllocateOldOrHugeObject(JSHClass *hclass)
183 {
184     size_t size = hclass->GetObjectSize();
185     return AllocateOldOrHugeObject(hclass, size);
186 }
187 
AllocateOldOrHugeObject(JSHClass * hclass,size_t size)188 TaggedObject *Heap::AllocateOldOrHugeObject(JSHClass *hclass, size_t size)
189 {
190     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
191     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
192         return AllocateHugeObject(hclass, size);
193     }
194     auto object = reinterpret_cast<TaggedObject *>(oldSpace_->Allocate(size));
195     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, oldSpace_, "Heap::AllocateOldOrHugeObject");
196     object->SetClass(hclass);
197     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
198     return object;
199 }
200 
AllocateReadOnlyOrHugeObject(JSHClass * hclass)201 TaggedObject *Heap::AllocateReadOnlyOrHugeObject(JSHClass *hclass)
202 {
203     size_t size = hclass->GetObjectSize();
204     return AllocateReadOnlyOrHugeObject(hclass, size);
205 }
206 
AllocateReadOnlyOrHugeObject(JSHClass * hclass,size_t size)207 TaggedObject *Heap::AllocateReadOnlyOrHugeObject(JSHClass *hclass, size_t size)
208 {
209     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
210     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
211         return AllocateHugeObject(hclass, size);
212     }
213     auto object = reinterpret_cast<TaggedObject *>(readOnlySpace_->Allocate(size));
214     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, readOnlySpace_, "Heap::AllocateReadOnlyOrHugeObject");
215     object->SetClass(hclass);
216     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
217     return object;
218 }
219 
AllocateNonMovableOrHugeObject(JSHClass * hclass)220 TaggedObject *Heap::AllocateNonMovableOrHugeObject(JSHClass *hclass)
221 {
222     size_t size = hclass->GetObjectSize();
223     return AllocateNonMovableOrHugeObject(hclass, size);
224 }
225 
AllocateNonMovableOrHugeObject(JSHClass * hclass,size_t size)226 TaggedObject *Heap::AllocateNonMovableOrHugeObject(JSHClass *hclass, size_t size)
227 {
228     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
229     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
230         return AllocateHugeObject(hclass, size);
231     }
232     auto object = reinterpret_cast<TaggedObject *>(nonMovableSpace_->Allocate(size));
233     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, nonMovableSpace_, "Heap::AllocateNonMovableOrHugeObject");
234     object->SetClass(hclass);
235     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
236     return object;
237 }
238 
AllocateClassClass(JSHClass * hclass,size_t size)239 TaggedObject *Heap::AllocateClassClass(JSHClass *hclass, size_t size)
240 {
241     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
242     auto object = reinterpret_cast<TaggedObject *>(nonMovableSpace_->Allocate(size));
243     if (UNLIKELY(object == nullptr)) {
244         LOG_ECMA_MEM(FATAL) << "Heap::AllocateClassClass can not allocate any space";
245         UNREACHABLE();
246     }
247     *reinterpret_cast<MarkWordType *>(ToUintPtr(object)) = reinterpret_cast<MarkWordType>(hclass);
248     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
249     return object;
250 }
251 
AllocateHugeObject(size_t size)252 TaggedObject *Heap::AllocateHugeObject(size_t size)
253 {
254     // Check whether it is necessary to trigger Old GC before expanding to avoid OOM risk.
255     CheckAndTriggerOldGC(size);
256 
257     auto *object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
258     if (UNLIKELY(object == nullptr)) {
259         CollectGarbage(TriggerGCType::OLD_GC, GCReason::ALLOCATION_LIMIT);
260         object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
261         if (UNLIKELY(object == nullptr)) {
262             // if allocate huge object OOM, temporarily increase space size to avoid vm crash
263             size_t oomOvershootSize = GetEcmaVM()->GetEcmaParamConfiguration().GetOutOfMemoryOvershootSize();
264             oldSpace_->IncreaseOutOfMemoryOvershootSize(oomOvershootSize);
265             object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
266             if (UNLIKELY(object == nullptr)) {
267                 FatalOutOfMemoryError(size, "Heap::AllocateHugeObject");
268             }
269             ThrowOutOfMemoryError(size, "Heap::AllocateHugeObject");
270         }
271     }
272     return object;
273 }
274 
AllocateHugeObject(JSHClass * hclass,size_t size)275 TaggedObject *Heap::AllocateHugeObject(JSHClass *hclass, size_t size)
276 {
277     // Check whether it is necessary to trigger Old GC before expanding to avoid OOM risk.
278     CheckAndTriggerOldGC(size);
279     auto object = AllocateHugeObject(size);
280     object->SetClass(hclass);
281     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
282     return object;
283 }
284 
AllocateMachineCodeObject(JSHClass * hclass,size_t size)285 TaggedObject *Heap::AllocateMachineCodeObject(JSHClass *hclass, size_t size)
286 {
287     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
288     auto object = reinterpret_cast<TaggedObject *>(machineCodeSpace_->Allocate(size));
289     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, machineCodeSpace_, "Heap::AllocateMachineCodeObject");
290     object->SetClass(hclass);
291     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
292     return object;
293 }
294 
AllocateSnapshotSpace(size_t size)295 uintptr_t Heap::AllocateSnapshotSpace(size_t size)
296 {
297     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
298     uintptr_t object = snapshotSpace_->Allocate(size);
299     if (UNLIKELY(object == 0)) {
300         FatalOutOfMemoryError(size, "Heap::AllocateSnapshotSpaceObject");
301     }
302     return object;
303 }
304 
SwapNewSpace()305 void Heap::SwapNewSpace()
306 {
307     activeSemiSpace_->Stop();
308     inactiveSemiSpace_->Restart();
309 
310     SemiSpace *newSpace = inactiveSemiSpace_;
311     inactiveSemiSpace_ = activeSemiSpace_;
312     activeSemiSpace_ = newSpace;
313 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
314     activeSemiSpace_->SwapAllocationCounter(inactiveSemiSpace_);
315 #endif
316     auto topAddress = activeSemiSpace_->GetAllocationTopAddress();
317     auto endAddress = activeSemiSpace_->GetAllocationEndAddress();
318     thread_->ReSetNewSpaceAllocationAddress(topAddress, endAddress);
319 }
320 
SwapOldSpace()321 void Heap::SwapOldSpace()
322 {
323     compressSpace_->SetInitialCapacity(oldSpace_->GetInitialCapacity());
324     auto *oldSpace = compressSpace_;
325     compressSpace_ = oldSpace_;
326     oldSpace_ = oldSpace;
327 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
328     oldSpace_->SwapAllocationCounter(compressSpace_);
329 #endif
330 }
331 
ReclaimRegions(TriggerGCType gcType)332 void Heap::ReclaimRegions(TriggerGCType gcType)
333 {
334     activeSemiSpace_->EnumerateRegionsWithRecord([] (Region *region) {
335         region->ClearMarkGCBitset();
336         region->ClearCrossRegionRSet();
337         region->ResetAliveObject();
338         region->ClearGCFlag(RegionGCFlags::IN_NEW_TO_NEW_SET);
339     });
340     if (gcType == TriggerGCType::FULL_GC) {
341         compressSpace_->Reset();
342     } else if (gcType == TriggerGCType::OLD_GC) {
343         oldSpace_->ReclaimCSet();
344     }
345     inactiveSemiSpace_->ReclaimRegions();
346 
347     sweeper_->WaitAllTaskFinished();
348     EnumerateNonNewSpaceRegionsWithRecord([] (Region *region) {
349         region->ClearMarkGCBitset();
350         region->ClearCrossRegionRSet();
351     });
352     if (!clearTaskFinished_) {
353         os::memory::LockHolder holder(waitClearTaskFinishedMutex_);
354         clearTaskFinished_ = true;
355         waitClearTaskFinishedCV_.SignalAll();
356     }
357 }
358 
359 // only call in js-thread
ClearSlotsRange(Region * current,uintptr_t freeStart,uintptr_t freeEnd)360 void Heap::ClearSlotsRange(Region *current, uintptr_t freeStart, uintptr_t freeEnd)
361 {
362     current->AtomicClearSweepingRSetInRange(freeStart, freeEnd);
363     current->ClearOldToNewRSetInRange(freeStart, freeEnd);
364     current->AtomicClearCrossRegionRSetInRange(freeStart, freeEnd);
365 }
366 
GetCommittedSize()367 size_t Heap::GetCommittedSize() const
368 {
369     size_t result = activeSemiSpace_->GetCommittedSize() +
370                     oldSpace_->GetCommittedSize() +
371                     hugeObjectSpace_->GetCommittedSize() +
372                     nonMovableSpace_->GetCommittedSize() +
373                     machineCodeSpace_->GetCommittedSize() +
374                     snapshotSpace_->GetCommittedSize();
375     return result;
376 }
377 
GetHeapObjectSize()378 size_t Heap::GetHeapObjectSize() const
379 {
380     size_t result = activeSemiSpace_->GetHeapObjectSize() +
381                     oldSpace_->GetHeapObjectSize() +
382                     hugeObjectSpace_->GetHeapObjectSize() +
383                     nonMovableSpace_->GetHeapObjectSize() +
384                     machineCodeSpace_->GetCommittedSize() +
385                     snapshotSpace_->GetHeapObjectSize();
386     return result;
387 }
388 
GetHeapObjectCount()389 uint32_t Heap::GetHeapObjectCount() const
390 {
391     uint32_t count = 0;
392     sweeper_->EnsureAllTaskFinished();
393     this->IterateOverObjects([&count]([[maybe_unused]] TaggedObject *obj) {
394         ++count;
395     });
396     return count;
397 }
398 
InitializeIdleStatusControl(std::function<void (bool)> callback)399 void Heap::InitializeIdleStatusControl(std::function<void(bool)> callback)
400 {
401     notifyIdleStatusCallback = callback;
402     if (callback != nullptr) {
403         OPTIONAL_LOG(ecmaVm_, INFO) << "Received idle status control call back";
404         enableIdleGC_ = ecmaVm_->GetJSOptions().EnableIdleGC();
405     }
406 }
407 }  // namespace panda::ecmascript
408 
409 #endif  // ECMASCRIPT_MEM_HEAP_INL_H
410