• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef ECMASCRIPT_MEM_HEAP_INL_H
17 #define ECMASCRIPT_MEM_HEAP_INL_H
18 
19 #include "ecmascript/mem/heap.h"
20 
21 #include "ecmascript/dfx/hprof/heap_tracker.h"
22 #include "ecmascript/ecma_vm.h"
23 #include "ecmascript/mem/allocator-inl.h"
24 #include "ecmascript/mem/concurrent_sweeper.h"
25 #include "ecmascript/mem/linear_space.h"
26 #include "ecmascript/mem/mem_controller.h"
27 #include "ecmascript/mem/sparse_space.h"
28 #include "ecmascript/mem/tagged_object.h"
29 #include "ecmascript/mem/barriers-inl.h"
30 #include "ecmascript/mem/mem_map_allocator.h"
31 
32 namespace panda::ecmascript {
33 #define CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, space, message)                                         \
34     if (UNLIKELY((object) == nullptr)) {                                                                    \
35         EcmaVM *vm = GetEcmaVM();                                                                           \
36         size_t oomOvershootSize = vm->GetEcmaParamConfiguration().GetOutOfMemoryOvershootSize();            \
37         (space)->IncreaseOutOfMemoryOvershootSize(oomOvershootSize);                                        \
38         if ((space)->IsOOMDumpSpace()) {                                                                    \
39             DumpHeapSnapshotBeforeOOM();                                                                    \
40         }                                                                                                   \
41         StatisticHeapDetail();                                                                              \
42         (object) = reinterpret_cast<TaggedObject *>((space)->Allocate(size));                               \
43         ThrowOutOfMemoryError(size, message);                                                               \
44     }
45 
46 template<class Callback>
EnumerateOldSpaceRegions(const Callback & cb,Region * region)47 void Heap::EnumerateOldSpaceRegions(const Callback &cb, Region *region) const
48 {
49     oldSpace_->EnumerateRegions(cb, region);
50     appSpawnSpace_->EnumerateRegions(cb);
51     nonMovableSpace_->EnumerateRegions(cb);
52     hugeObjectSpace_->EnumerateRegions(cb);
53     machineCodeSpace_->EnumerateRegions(cb);
54     hugeMachineCodeSpace_->EnumerateRegions(cb);
55 }
56 
57 template<class Callback>
EnumerateSnapshotSpaceRegions(const Callback & cb)58 void Heap::EnumerateSnapshotSpaceRegions(const Callback &cb) const
59 {
60     snapshotSpace_->EnumerateRegions(cb);
61 }
62 
63 template<class Callback>
EnumerateNonNewSpaceRegions(const Callback & cb)64 void Heap::EnumerateNonNewSpaceRegions(const Callback &cb) const
65 {
66     oldSpace_->EnumerateRegions(cb);
67     oldSpace_->EnumerateCollectRegionSet(cb);
68     appSpawnSpace_->EnumerateRegions(cb);
69     snapshotSpace_->EnumerateRegions(cb);
70     nonMovableSpace_->EnumerateRegions(cb);
71     hugeObjectSpace_->EnumerateRegions(cb);
72     machineCodeSpace_->EnumerateRegions(cb);
73     hugeMachineCodeSpace_->EnumerateRegions(cb);
74 }
75 
76 template<class Callback>
EnumerateNonNewSpaceRegionsWithRecord(const Callback & cb)77 void Heap::EnumerateNonNewSpaceRegionsWithRecord(const Callback &cb) const
78 {
79     oldSpace_->EnumerateRegionsWithRecord(cb);
80     snapshotSpace_->EnumerateRegionsWithRecord(cb);
81     nonMovableSpace_->EnumerateRegionsWithRecord(cb);
82     hugeObjectSpace_->EnumerateRegionsWithRecord(cb);
83     machineCodeSpace_->EnumerateRegionsWithRecord(cb);
84     hugeMachineCodeSpace_->EnumerateRegionsWithRecord(cb);
85 }
86 
87 template<class Callback>
EnumerateNewSpaceRegions(const Callback & cb)88 void Heap::EnumerateNewSpaceRegions(const Callback &cb) const
89 {
90     activeSemiSpace_->EnumerateRegions(cb);
91 }
92 
93 template<class Callback>
EnumerateNonMovableRegions(const Callback & cb)94 void Heap::EnumerateNonMovableRegions(const Callback &cb) const
95 {
96     snapshotSpace_->EnumerateRegions(cb);
97     appSpawnSpace_->EnumerateRegions(cb);
98     nonMovableSpace_->EnumerateRegions(cb);
99     hugeObjectSpace_->EnumerateRegions(cb);
100     machineCodeSpace_->EnumerateRegions(cb);
101     hugeMachineCodeSpace_->EnumerateRegions(cb);
102 }
103 
104 template<class Callback>
EnumerateRegions(const Callback & cb)105 void Heap::EnumerateRegions(const Callback &cb) const
106 {
107     activeSemiSpace_->EnumerateRegions(cb);
108     oldSpace_->EnumerateRegions(cb);
109     oldSpace_->EnumerateCollectRegionSet(cb);
110     appSpawnSpace_->EnumerateRegions(cb);
111     snapshotSpace_->EnumerateRegions(cb);
112     nonMovableSpace_->EnumerateRegions(cb);
113     hugeObjectSpace_->EnumerateRegions(cb);
114     machineCodeSpace_->EnumerateRegions(cb);
115     hugeMachineCodeSpace_->EnumerateRegions(cb);
116 }
117 
118 template<class Callback>
IterateOverObjects(const Callback & cb)119 void Heap::IterateOverObjects(const Callback &cb) const
120 {
121     activeSemiSpace_->IterateOverObjects(cb);
122     oldSpace_->IterateOverObjects(cb);
123     readOnlySpace_->IterateOverObjects(cb);
124     appSpawnSpace_->IterateOverMarkedObjects(cb);
125     nonMovableSpace_->IterateOverObjects(cb);
126     hugeObjectSpace_->IterateOverObjects(cb);
127     hugeMachineCodeSpace_->IterateOverObjects(cb);
128 }
129 
AllocateYoungOrHugeObject(JSHClass * hclass)130 TaggedObject *Heap::AllocateYoungOrHugeObject(JSHClass *hclass)
131 {
132     size_t size = hclass->GetObjectSize();
133     return AllocateYoungOrHugeObject(hclass, size);
134 }
135 
AllocateYoungOrHugeObject(size_t size)136 TaggedObject *Heap::AllocateYoungOrHugeObject(size_t size)
137 {
138     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
139     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
140         return AllocateHugeObject(size);
141     }
142 
143     auto object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
144     if (object == nullptr) {
145         CollectGarbage(SelectGCType(), GCReason::ALLOCATION_LIMIT);
146         object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
147         if (object == nullptr) {
148             CollectGarbage(SelectGCType(), GCReason::ALLOCATION_FAILED);
149             object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
150             CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, activeSemiSpace_, "Heap::AllocateYoungOrHugeObject");
151         }
152     }
153     return object;
154 }
155 
AllocateYoungOrHugeObject(JSHClass * hclass,size_t size)156 TaggedObject *Heap::AllocateYoungOrHugeObject(JSHClass *hclass, size_t size)
157 {
158     auto object = AllocateYoungOrHugeObject(size);
159     object->SetClass(thread_, hclass);
160     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
161     return object;
162 }
163 
AllocateYoungSync(size_t size)164 uintptr_t Heap::AllocateYoungSync(size_t size)
165 {
166     return activeSemiSpace_->AllocateSync(size);
167 }
168 
MoveYoungRegionSync(Region * region)169 bool Heap::MoveYoungRegionSync(Region *region)
170 {
171     return activeSemiSpace_->SwapRegion(region, inactiveSemiSpace_);
172 }
173 
MergeToOldSpaceSync(LocalSpace * localSpace)174 void Heap::MergeToOldSpaceSync(LocalSpace *localSpace)
175 {
176     oldSpace_->Merge(localSpace);
177 }
178 
TryAllocateYoungGeneration(JSHClass * hclass,size_t size)179 TaggedObject *Heap::TryAllocateYoungGeneration(JSHClass *hclass, size_t size)
180 {
181     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
182     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
183         return nullptr;
184     }
185     auto object = reinterpret_cast<TaggedObject *>(activeSemiSpace_->Allocate(size));
186     if (object != nullptr) {
187         object->SetClass(thread_, hclass);
188     }
189     return object;
190 }
191 
AllocateOldOrHugeObject(JSHClass * hclass)192 TaggedObject *Heap::AllocateOldOrHugeObject(JSHClass *hclass)
193 {
194     size_t size = hclass->GetObjectSize();
195     return AllocateOldOrHugeObject(hclass, size);
196 }
197 
AllocateOldOrHugeObject(JSHClass * hclass,size_t size)198 TaggedObject *Heap::AllocateOldOrHugeObject(JSHClass *hclass, size_t size)
199 {
200     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
201     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
202         return AllocateHugeObject(hclass, size);
203     }
204     auto object = reinterpret_cast<TaggedObject *>(oldSpace_->Allocate(size));
205     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, oldSpace_, "Heap::AllocateOldOrHugeObject");
206     object->SetClass(thread_, hclass);
207     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
208     return object;
209 }
210 
AllocateReadOnlyOrHugeObject(JSHClass * hclass)211 TaggedObject *Heap::AllocateReadOnlyOrHugeObject(JSHClass *hclass)
212 {
213     size_t size = hclass->GetObjectSize();
214     return AllocateReadOnlyOrHugeObject(hclass, size);
215 }
216 
AllocateReadOnlyOrHugeObject(JSHClass * hclass,size_t size)217 TaggedObject *Heap::AllocateReadOnlyOrHugeObject(JSHClass *hclass, size_t size)
218 {
219     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
220     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
221         return AllocateHugeObject(hclass, size);
222     }
223     auto object = reinterpret_cast<TaggedObject *>(readOnlySpace_->Allocate(size));
224     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, readOnlySpace_, "Heap::AllocateReadOnlyOrHugeObject");
225     object->SetClass(thread_, hclass);
226     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
227     return object;
228 }
229 
AllocateNonMovableOrHugeObject(JSHClass * hclass)230 TaggedObject *Heap::AllocateNonMovableOrHugeObject(JSHClass *hclass)
231 {
232     size_t size = hclass->GetObjectSize();
233     return AllocateNonMovableOrHugeObject(hclass, size);
234 }
235 
AllocateNonMovableOrHugeObject(JSHClass * hclass,size_t size)236 TaggedObject *Heap::AllocateNonMovableOrHugeObject(JSHClass *hclass, size_t size)
237 {
238     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
239     if (size > MAX_REGULAR_HEAP_OBJECT_SIZE) {
240         return AllocateHugeObject(hclass, size);
241     }
242     auto object = reinterpret_cast<TaggedObject *>(nonMovableSpace_->CheckAndAllocate(size));
243     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, nonMovableSpace_, "Heap::AllocateNonMovableOrHugeObject");
244     object->SetClass(thread_, hclass);
245     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
246     return object;
247 }
248 
AllocateClassClass(JSHClass * hclass,size_t size)249 TaggedObject *Heap::AllocateClassClass(JSHClass *hclass, size_t size)
250 {
251     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
252     auto object = reinterpret_cast<TaggedObject *>(nonMovableSpace_->Allocate(size));
253     if (UNLIKELY(object == nullptr)) {
254         LOG_ECMA_MEM(FATAL) << "Heap::AllocateClassClass can not allocate any space";
255         UNREACHABLE();
256     }
257     *reinterpret_cast<MarkWordType *>(ToUintPtr(object)) = reinterpret_cast<MarkWordType>(hclass);
258     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
259     return object;
260 }
261 
AllocateHugeObject(size_t size)262 TaggedObject *Heap::AllocateHugeObject(size_t size)
263 {
264     // Check whether it is necessary to trigger Old GC before expanding to avoid OOM risk.
265     CheckAndTriggerOldGC(size);
266 
267     auto *object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
268     if (UNLIKELY(object == nullptr)) {
269         CollectGarbage(TriggerGCType::OLD_GC, GCReason::ALLOCATION_LIMIT);
270         object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
271         if (UNLIKELY(object == nullptr)) {
272             // if allocate huge object OOM, temporarily increase space size to avoid vm crash
273             size_t oomOvershootSize = GetEcmaVM()->GetEcmaParamConfiguration().GetOutOfMemoryOvershootSize();
274             oldSpace_->IncreaseOutOfMemoryOvershootSize(oomOvershootSize);
275             object = reinterpret_cast<TaggedObject *>(hugeObjectSpace_->Allocate(size, thread_));
276             DumpHeapSnapshotBeforeOOM();
277             StatisticHeapDetail();
278             ThrowOutOfMemoryError(size, "Heap::AllocateHugeObject");
279             if (UNLIKELY(object == nullptr)) {
280                 FatalOutOfMemoryError(size, "Heap::AllocateHugeObject");
281             }
282         }
283     }
284     return object;
285 }
286 
AllocateHugeObject(JSHClass * hclass,size_t size)287 TaggedObject *Heap::AllocateHugeObject(JSHClass *hclass, size_t size)
288 {
289     // Check whether it is necessary to trigger Old GC before expanding to avoid OOM risk.
290     CheckAndTriggerOldGC(size);
291     auto object = AllocateHugeObject(size);
292     object->SetClass(thread_, hclass);
293     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
294     return object;
295 }
296 
AllocateHugeMachineCodeObject(size_t size)297 TaggedObject *Heap::AllocateHugeMachineCodeObject(size_t size)
298 {
299     auto *object = reinterpret_cast<TaggedObject *>(hugeMachineCodeSpace_->Allocate(size, thread_));
300     return object;
301 }
302 
AllocateMachineCodeObject(JSHClass * hclass,size_t size)303 TaggedObject *Heap::AllocateMachineCodeObject(JSHClass *hclass, size_t size)
304 {
305     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
306     auto object = (size > MAX_REGULAR_HEAP_OBJECT_SIZE) ?
307         reinterpret_cast<TaggedObject *>(AllocateHugeMachineCodeObject(size)) :
308         reinterpret_cast<TaggedObject *>(machineCodeSpace_->Allocate(size));
309     CHECK_OBJ_AND_THROW_OOM_ERROR(object, size, machineCodeSpace_, "Heap::AllocateMachineCodeObject");
310     object->SetClass(thread_, hclass);
311     OnAllocateEvent(reinterpret_cast<TaggedObject*>(object), size);
312     return object;
313 }
314 
AllocateSnapshotSpace(size_t size)315 uintptr_t Heap::AllocateSnapshotSpace(size_t size)
316 {
317     size = AlignUp(size, static_cast<size_t>(MemAlignment::MEM_ALIGN_OBJECT));
318     uintptr_t object = snapshotSpace_->Allocate(size);
319     if (UNLIKELY(object == 0)) {
320         FatalOutOfMemoryError(size, "Heap::AllocateSnapshotSpaceObject");
321     }
322     return object;
323 }
324 
SwapNewSpace()325 void Heap::SwapNewSpace()
326 {
327     activeSemiSpace_->Stop();
328     inactiveSemiSpace_->Restart();
329 
330     SemiSpace *newSpace = inactiveSemiSpace_;
331     inactiveSemiSpace_ = activeSemiSpace_;
332     activeSemiSpace_ = newSpace;
333     if (UNLIKELY(ShouldVerifyHeap())) {
334         inactiveSemiSpace_->EnumerateRegions([](Region *region) {
335             region->SetInactiveSemiSpace();
336         });
337     }
338 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
339     activeSemiSpace_->SwapAllocationCounter(inactiveSemiSpace_);
340 #endif
341     auto topAddress = activeSemiSpace_->GetAllocationTopAddress();
342     auto endAddress = activeSemiSpace_->GetAllocationEndAddress();
343     thread_->ReSetNewSpaceAllocationAddress(topAddress, endAddress);
344 }
345 
SwapOldSpace()346 void Heap::SwapOldSpace()
347 {
348     compressSpace_->SetInitialCapacity(oldSpace_->GetInitialCapacity());
349     auto *oldSpace = compressSpace_;
350     compressSpace_ = oldSpace_;
351     oldSpace_ = oldSpace;
352 #ifdef ECMASCRIPT_SUPPORT_HEAPSAMPLING
353     oldSpace_->SwapAllocationCounter(compressSpace_);
354 #endif
355 }
356 
ReclaimRegions(TriggerGCType gcType)357 void Heap::ReclaimRegions(TriggerGCType gcType)
358 {
359     activeSemiSpace_->EnumerateRegionsWithRecord([] (Region *region) {
360         region->ClearMarkGCBitset();
361         region->ClearCrossRegionRSet();
362         region->ResetAliveObject();
363         region->ClearGCFlag(RegionGCFlags::IN_NEW_TO_NEW_SET);
364     });
365     size_t cachedSize = inactiveSemiSpace_->GetInitialCapacity();
366     if (gcType == TriggerGCType::FULL_GC) {
367         compressSpace_->Reset();
368         cachedSize = 0;
369     } else if (gcType == TriggerGCType::OLD_GC) {
370         oldSpace_->ReclaimCSet();
371     }
372 
373     inactiveSemiSpace_->ReclaimRegions(cachedSize);
374 
375     sweeper_->WaitAllTaskFinished();
376     EnumerateNonNewSpaceRegionsWithRecord([] (Region *region) {
377         region->ClearMarkGCBitset();
378         region->ClearCrossRegionRSet();
379     });
380     if (!clearTaskFinished_) {
381         LockHolder holder(waitClearTaskFinishedMutex_);
382         clearTaskFinished_ = true;
383         waitClearTaskFinishedCV_.SignalAll();
384     }
385 }
386 
387 // only call in js-thread
ClearSlotsRange(Region * current,uintptr_t freeStart,uintptr_t freeEnd)388 void Heap::ClearSlotsRange(Region *current, uintptr_t freeStart, uintptr_t freeEnd)
389 {
390     current->AtomicClearSweepingRSetInRange(freeStart, freeEnd);
391     current->ClearOldToNewRSetInRange(freeStart, freeEnd);
392     current->AtomicClearCrossRegionRSetInRange(freeStart, freeEnd);
393 }
394 
GetCommittedSize()395 size_t Heap::GetCommittedSize() const
396 {
397     size_t result = activeSemiSpace_->GetCommittedSize() +
398                     oldSpace_->GetCommittedSize() +
399                     hugeObjectSpace_->GetCommittedSize() +
400                     nonMovableSpace_->GetCommittedSize() +
401                     machineCodeSpace_->GetCommittedSize() +
402                     hugeMachineCodeSpace_->GetCommittedSize() +
403                     snapshotSpace_->GetCommittedSize();
404     return result;
405 }
406 
GetHeapObjectSize()407 size_t Heap::GetHeapObjectSize() const
408 {
409     size_t result = activeSemiSpace_->GetHeapObjectSize() +
410                     oldSpace_->GetHeapObjectSize() +
411                     hugeObjectSpace_->GetHeapObjectSize() +
412                     nonMovableSpace_->GetHeapObjectSize() +
413                     machineCodeSpace_->GetCommittedSize() +
414                     hugeMachineCodeSpace_->GetCommittedSize() +
415                     snapshotSpace_->GetHeapObjectSize();
416     return result;
417 }
418 
GetHeapObjectCount()419 uint32_t Heap::GetHeapObjectCount() const
420 {
421     uint32_t count = 0;
422     sweeper_->EnsureAllTaskFinished();
423     this->IterateOverObjects([&count]([[maybe_unused]] TaggedObject *obj) {
424         ++count;
425     });
426     return count;
427 }
428 
InitializeIdleStatusControl(std::function<void (bool)> callback)429 void Heap::InitializeIdleStatusControl(std::function<void(bool)> callback)
430 {
431     notifyIdleStatusCallback = callback;
432     if (callback != nullptr) {
433         OPTIONAL_LOG(ecmaVm_, INFO) << "Received idle status control call back";
434         enableIdleGC_ = ecmaVm_->GetJSOptions().EnableIdleGC();
435     }
436 }
437 }  // namespace panda::ecmascript
438 
439 #endif  // ECMASCRIPT_MEM_HEAP_INL_H
440